如何使用 golang 执行 AWS S3 分段复制
来源:stackoverflow
时间:2024-03-17 17:15:31 235浏览 收藏
**文章摘要:** 使用 AWS S3 分段复制功能,可以轻松复制大于 5GB 的文件。该功能通过将文件拆分为较小的分块进行上传,从而克服了单个分块大小限制。本文提供了一个分步指南,展示了如何使用 Golang 客户端库执行 S3 分段复制,包括启动上传、上传分块以及完成复制的过程。通过使用分段复制,用户可以有效地处理大文件上传,确保数据的完整性和可靠性。
我正在查看 S3 复制对象功能的 AWS golang 文档,它包含以下用于处理大文件上传的详细信息
但是,要复制大于 5 GB 的 > 对象,您必须使用分段上传 Upload Part - >Copy API。有关更多信息,请参阅使用 REST 分段上传 API > 复制对象(https://docs.aws.amazon.com/AmazonS3/latest/dev/CopyingObjctsUsingRESTMPUapi.html)。
当我点击该链接时,它只包含 Java 和 .Net 的代码示例
我是否缺少一些文档/示例来展示如何使用 golang 客户端复制 S3 中的现有大文件?
正确答案
所以花了一些实验,但我终于让多部分副本工作了
//imports
import (
"context"
"strconv"
"github.com/aws/aws-sdk-go/service/s3"
log "github.com/sirupsen/logrus"
)
//constant for number of bits in 5 megabyte chunk
const max_part_size = 5 * 1024 * 1024
//helper function to build the string for the range of bits to copy
func buildcopysourcerange(start int64, objectsize int64) string {
end := start + max_part_size - 1
if end > objectsize {
end = objectsize - 1
}
startrange := strconv.formatint(start, 10)
stoprange := strconv.formatint(end, 10)
return "bytes=" + startrange + "-" + stoprange
}
//function that starts, perform each part upload, and completes the copy
func multipartcopy(sess *session.session, sourcebucket string, sourcekey string, destbucket string, destkey string) error {
svc := s3.new(sess)
ctx, cancelfn := context.withtimeout(context.todo(), 10*time.minute)
defer cancelfn()
//struct for starting a multipart upload
startinput := s3.createmultipartuploadinput{
bucket: &destbucket,
key: &destkey,
}
//send command to start copy and get the upload id as it is needed later
var uploadid string
createoutput, err := svc.createmultipartuploadwithcontext(ctx, &startinput)
if err != nil {
return err
}
if createoutput != nil {
if createoutput.uploadid != nil {
uploadid = *createoutput.uploadid
}
}
if uploadid == "" {
return errors.new("no upload id found in start upload request")
}
var i int64
var partnumber int64 = 1
copysource := "/" + sourcebucket + "/" + sourcekey
parts := make([]*s3.completedpart, 0)
numuploads := filesize / max_part_size
log.infof("will attempt upload in %d number of parts to %s", numuploads, destkey)
for i = 0; i < filesize; i += max_part_size {
copyrange := buildcopysourcerange(i, filesize)
partinput := s3.uploadpartcopyinput{
bucket: &destbucket,
copysource: ©source,
copysourcerange: ©range,
key: &destkey,
partnumber: &partnumber,
uploadid: &uploadid,
}
log.debugf("attempting to upload part %d range: %s", partnumber, copyrange)
partresp, err := svc.uploadpartcopy(&partinput)
if err != nil {
log.error("attempting to abort upload")
abortin := s3.abortmultipartuploadinput{
uploadid: &uploadid,
}
//ignoring any errors with aborting the copy
svc.abortmultipartuploadrequest(&abortin)
return fmt.errorf("error uploading part %d : %w", partnumber, err)
}
//copy etag and part number from response as it is needed for completion
if partresp != nil {
partnum := partnumber
etag := strings.trim(*partresp.copypartresult.etag, "\"")
cpart := s3.completedpart{
etag: &etag,
partnumber: &partnum,
}
parts = append(parts, &cpart)
log.debugf("successfully upload part %d of %s", partnumber, uploadid)
}
partnumber++
if partnumber%50 == 0 {
log.infof("completed part %d of %d to %s", partnumber, numuploads, destkey)
}
}
//create struct for completing the upload
mpu := s3.completedmultipartupload{
parts: parts,
}
//complete actual upload
//does not actually copy if the complete command is not received
complete := s3.completemultipartuploadinput{
bucket: &destbucket,
key: &destkey,
uploadid: &uploadid,
multipartupload: &mpu,
}
compoutput, err := svc.completemultipartupload(&complete)
if err != nil {
return fmt.errorf("error completing upload: %w", err)
}
if compoutput != nil {
log.infof("successfully copied bucket: %s key: %s to bucket: %s key: %s", sourcebucket, sourcekey, destbucket, destkey)
}
return nil
}与 @Mike's answer 相同的方法,但使用 aws-sdk-go-v2:
import (
"logger"
"context"
"errors"
"strconv"
"strings"
"time"
"fmt"
"github.com/aws/aws-sdk-go-v2/service/s3"
"github.com/aws/aws-sdk-go-v2/service/s3/types"
)
//constant for number of bits in 5 megabyte chunk
const max_part_size = 5 * 1024 * 1024
var log *logger.Logger
//helper function to build the string for the range of bits to copy
func buildCopySourceRange(start int64, objectSize int64) string {
end := start + max_part_size - 1
if end > objectSize {
end = objectSize - 1
}
startRange := strconv.FormatInt(start, 10)
stopRange := strconv.FormatInt(end, 10)
return "bytes=" + startRange + "-" + stopRange
}
//function that starts, perform each part upload, and completes the copy
func MultiPartCopy(svc *s3.Client, fileSize int64, sourceBucket string, sourceKey string, destBucket string, destKey string) error {
log = logger.GetLogger()
ctx, cancelFn := context.WithTimeout(context.TODO(), 10*time.Minute)
defer cancelFn()
//struct for starting a multipart upload
startInput := s3.CreateMultipartUploadInput{
Bucket: &destBucket,
Key: &destKey,
}
//send command to start copy and get the upload id as it is needed later
var uploadId string
createOutput, err := svc.CreateMultipartUpload(ctx, &startInput)
if err != nil {
return err
}
if createOutput != nil {
if createOutput.UploadId != nil {
uploadId = *createOutput.UploadId
}
}
if uploadId == "" {
return errors.New("No upload id found in start upload request")
}
var i int64
var partNumber int32 = 1
copySource := "/" + sourceBucket + "/" + sourceKey
parts := make([]types.CompletedPart, 0)
numUploads := fileSize / max_part_size
log.Infof("Will attempt upload in %d number of parts to %s", numUploads, destKey)
for i = 0; i < fileSize; i += max_part_size {
copyRange := buildCopySourceRange(i, fileSize)
partInput := s3.UploadPartCopyInput{
Bucket: &destBucket,
CopySource: ©Source,
CopySourceRange: ©Range,
Key: &destKey,
PartNumber: partNumber,
UploadId: &uploadId,
}
log.Debugf("Attempting to upload part %d range: %s", partNumber, copyRange)
partResp, err := svc.UploadPartCopy(context.TODO(), &partInput)
if err != nil {
log.Error("Attempting to abort upload")
abortIn := s3.AbortMultipartUploadInput{
UploadId: &uploadId,
}
//ignoring any errors with aborting the copy
svc.AbortMultipartUpload(context.TODO(), &abortIn)
return fmt.Errorf("Error uploading part %d : %w", partNumber, err)
}
//copy etag and part number from response as it is needed for completion
if partResp != nil {
partNum := partNumber
etag := strings.Trim(*partResp.CopyPartResult.ETag, "\"")
cPart := types.CompletedPart{
ETag: &etag,
PartNumber: partNum,
}
parts = append(parts, cPart)
log.Debugf("Successfully upload part %d of %s", partNumber, uploadId)
}
partNumber++
if partNumber%50 == 0 {
log.Infof("Completed part %d of %d to %s", partNumber, numUploads, destKey)
}
}
//create struct for completing the upload
mpu := types.CompletedMultipartUpload{
Parts: parts,
}
//complete actual upload
//does not actually copy if the complete command is not received
complete := s3.CompleteMultipartUploadInput{
Bucket: &destBucket,
Key: &destKey,
UploadId: &uploadId,
MultipartUpload: &mpu,
}
compOutput, err := svc.CompleteMultipartUpload(context.TODO(), &complete)
if err != nil {
return fmt.Errorf("Error completing upload: %w", err)
}
if compOutput != nil {
log.Infof("Successfully copied Bucket: %s Key: %s to Bucket: %s Key: %s", sourceBucket, sourceKey, destBucket, destKey)
}
return nil
}
@Mike 一个问题。您使用了 abortmultipartuploadrequest,它在 aws-sdk-go-v2 中不存在,所以我使用了 abortmultipartupload 代替,希望它不会造成太大差异?
以上就是《如何使用 golang 执行 AWS S3 分段复制》的详细内容,更多关于的资料请关注golang学习网公众号!
-
502 收藏
-
502 收藏
-
501 收藏
-
501 收藏
-
501 收藏
-
139 收藏
-
204 收藏
-
325 收藏
-
478 收藏
-
486 收藏
-
439 收藏
-
357 收藏
-
352 收藏
-
101 收藏
-
440 收藏
-
212 收藏
-
143 收藏
-
- 前端进阶之JavaScript设计模式
- 设计模式是开发人员在软件开发过程中面临一般问题时的解决方案,代表了最佳的实践。本课程的主打内容包括JS常见设计模式以及具体应用场景,打造一站式知识长龙服务,适合有JS基础的同学学习。
- 立即学习 543次学习
-
- GO语言核心编程课程
- 本课程采用真实案例,全面具体可落地,从理论到实践,一步一步将GO核心编程技术、编程思想、底层实现融会贯通,使学习者贴近时代脉搏,做IT互联网时代的弄潮儿。
- 立即学习 516次学习
-
- 简单聊聊mysql8与网络通信
- 如有问题加微信:Le-studyg;在课程中,我们将首先介绍MySQL8的新特性,包括性能优化、安全增强、新数据类型等,帮助学生快速熟悉MySQL8的最新功能。接着,我们将深入解析MySQL的网络通信机制,包括协议、连接管理、数据传输等,让
- 立即学习 500次学习
-
- JavaScript正则表达式基础与实战
- 在任何一门编程语言中,正则表达式,都是一项重要的知识,它提供了高效的字符串匹配与捕获机制,可以极大的简化程序设计。
- 立即学习 487次学习
-
- 从零制作响应式网站—Grid布局
- 本系列教程将展示从零制作一个假想的网络科技公司官网,分为导航,轮播,关于我们,成功案例,服务流程,团队介绍,数据部分,公司动态,底部信息等内容区块。网站整体采用CSSGrid布局,支持响应式,有流畅过渡和展现动画。
- 立即学习 485次学习