Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(139): support multipart upload (close: #7444) #7630

Merged
merged 2 commits into from
Dec 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
135 changes: 100 additions & 35 deletions drivers/139/driver.go
Original file line number Diff line number Diff line change
Expand Up @@ -357,7 +357,10 @@ const (
TB
)

func getPartSize(size int64) int64 {
func (d *Yun139) getPartSize(size int64) int64 {
if d.CustomUploadPartSize != 0 {
return d.CustomUploadPartSize
}
// 网盘对于分片数量存在上限
if size/GB > 30 {
return 512 * MB
Expand All @@ -380,24 +383,51 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
return err
}
}
// return errs.NotImplement

partInfos := []PartInfo{}
var partSize = d.getPartSize(stream.GetSize())
part := (stream.GetSize() + partSize - 1) / partSize
if part == 0 {
part = 1
}
for i := int64(0); i < part; i++ {
if utils.IsCanceled(ctx) {
return ctx.Err()
}
start := i * partSize
byteSize := stream.GetSize() - start
if byteSize > partSize {
byteSize = partSize
}
partNumber := i + 1
partInfo := PartInfo{
PartNumber: partNumber,
PartSize: byteSize,
ParallelHashCtx: ParallelHashCtx{
PartOffset: start,
},
}
partInfos = append(partInfos, partInfo)
}

// 筛选出前 100 个 partInfos
firstPartInfos := partInfos
if len(firstPartInfos) > 100 {
firstPartInfos = firstPartInfos[:100]
}

// 获取上传信息和前100个分片的上传地址
data := base.Json{
"contentHash": fullHash,
"contentHashAlgorithm": "SHA256",
"contentType": "application/octet-stream",
"parallelUpload": false,
"partInfos": []base.Json{{
"parallelHashCtx": base.Json{
"partOffset": 0,
},
"partNumber": 1,
"partSize": stream.GetSize(),
}},
"size": stream.GetSize(),
"parentFileId": dstDir.GetID(),
"name": stream.GetName(),
"type": "file",
"fileRenameMode": "auto_rename",
"partInfos": firstPartInfos,
"size": stream.GetSize(),
"parentFileId": dstDir.GetID(),
"name": stream.GetName(),
"type": "file",
"fileRenameMode": "auto_rename",
}
pathname := "/hcy/file/create"
var resp PersonalUploadResp
Expand All @@ -410,32 +440,67 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
return nil
}

uploadPartInfos := resp.Data.PartInfos

// 获取后续分片的上传地址
for i := 101; i < len(partInfos); i += 100 {
end := i + 100
if end > len(partInfos) {
end = len(partInfos)
}
batchPartInfos := partInfos[i:end]

moredata := base.Json{
"fileId": resp.Data.FileId,
"uploadId": resp.Data.UploadId,
"partInfos": batchPartInfos,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname := "/hcy/file/getUploadUrl"
var moreresp PersonalUploadUrlResp
_, err = d.personalPost(pathname, moredata, &moreresp)
if err != nil {
return err
}
uploadPartInfos = append(uploadPartInfos, moreresp.Data.PartInfos...)
}

// Progress
p := driver.NewProgress(stream.GetSize(), up)

// Update Progress
r := io.TeeReader(stream, p)
// 上传所有分片
for _, uploadPartInfo := range uploadPartInfos {
index := uploadPartInfo.PartNumber - 1
partSize := partInfos[index].PartSize
log.Debugf("[139] uploading part %+v/%+v", index, len(uploadPartInfos))
limitReader := io.LimitReader(stream, partSize)

req, err := http.NewRequest("PUT", resp.Data.PartInfos[0].UploadUrl, r)
if err != nil {
return err
}
req = req.WithContext(ctx)
req.Header.Set("Content-Type", "application/octet-stream")
req.Header.Set("Content-Length", fmt.Sprint(stream.GetSize()))
req.Header.Set("Origin", "https://yun.139.com")
req.Header.Set("Referer", "https://yun.139.com/")
req.ContentLength = stream.GetSize()
// Update Progress
r := io.TeeReader(limitReader, p)

res, err := base.HttpClient.Do(req)
if err != nil {
return err
}
req, err := http.NewRequest("PUT", uploadPartInfo.UploadUrl, r)
if err != nil {
return err
}
req = req.WithContext(ctx)
req.Header.Set("Content-Type", "application/octet-stream")
req.Header.Set("Content-Length", fmt.Sprint(partSize))
req.Header.Set("Origin", "https://yun.139.com")
req.Header.Set("Referer", "https://yun.139.com/")
req.ContentLength = partSize

_ = res.Body.Close()
log.Debugf("%+v", res)
if res.StatusCode != http.StatusOK {
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
res, err := base.HttpClient.Do(req)
if err != nil {
return err
}
_ = res.Body.Close()
log.Debugf("[139] uploaded: %+v", res)
if res.StatusCode != http.StatusOK {
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
}
}

data = base.Json{
Expand Down Expand Up @@ -496,7 +561,7 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
// Progress
p := driver.NewProgress(stream.GetSize(), up)

var partSize = getPartSize(stream.GetSize())
var partSize = d.getPartSize(stream.GetSize())
part := (stream.GetSize() + partSize - 1) / partSize
if part == 0 {
part = 1
Expand Down
5 changes: 3 additions & 2 deletions drivers/139/meta.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@ type Addition struct {
//Account string `json:"account" required:"true"`
Authorization string `json:"authorization" type:"text" required:"true"`
driver.RootID
Type string `json:"type" type:"select" options:"personal,family,personal_new" default:"personal"`
CloudID string `json:"cloud_id"`
Type string `json:"type" type:"select" options:"personal,family,personal_new" default:"personal"`
CloudID string `json:"cloud_id"`
CustomUploadPartSize int64 `json:"custom_upload_part_size" type:"number" default:"0" help:"0 for auto"`
}

var config = driver.Config{
Expand Down
19 changes: 19 additions & 0 deletions drivers/139/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,16 @@ type QueryContentListResp struct {
} `json:"data"`
}

type ParallelHashCtx struct {
PartOffset int64 `json:"partOffset"`
}

type PartInfo struct {
PartNumber int64 `json:"partNumber"`
PartSize int64 `json:"partSize"`
ParallelHashCtx ParallelHashCtx `json:"parallelHashCtx"`
}

type PersonalThumbnail struct {
Style string `json:"style"`
Url string `json:"url"`
Expand Down Expand Up @@ -235,6 +245,15 @@ type PersonalUploadResp struct {
}
}

type PersonalUploadUrlResp struct {
BaseResp
Data struct {
FileId string `json:"fileId"`
UploadId string `json:"uploadId"`
PartInfos []PersonalPartInfo `json:"partInfos"`
}
}

type RefreshTokenResp struct {
XMLName xml.Name `xml:"root"`
Return string `xml:"return"`
Expand Down
Loading