2017-09-03 08:57:06 +00:00
|
|
|
package s3store
|
2016-01-05 17:21:53 +00:00
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
2019-09-15 11:43:59 +00:00
|
|
|
"context"
|
2017-09-03 08:57:06 +00:00
|
|
|
"fmt"
|
2016-01-05 17:21:53 +00:00
|
|
|
"io/ioutil"
|
2020-07-29 13:24:46 +00:00
|
|
|
"strings"
|
2016-01-05 17:21:53 +00:00
|
|
|
"testing"
|
2020-04-27 10:42:56 +00:00
|
|
|
"time"
|
2016-01-05 17:21:53 +00:00
|
|
|
|
|
|
|
"github.com/golang/mock/gomock"
|
|
|
|
"github.com/stretchr/testify/assert"
|
|
|
|
|
|
|
|
"github.com/aws/aws-sdk-go/aws"
|
|
|
|
"github.com/aws/aws-sdk-go/aws/awserr"
|
2020-07-29 13:24:46 +00:00
|
|
|
"github.com/aws/aws-sdk-go/aws/request"
|
2016-01-05 17:21:53 +00:00
|
|
|
"github.com/aws/aws-sdk-go/service/s3"
|
2023-03-08 12:03:18 +00:00
|
|
|
"github.com/tus/tusd/v2/pkg/handler"
|
2016-01-05 17:21:53 +00:00
|
|
|
)
|
|
|
|
|
2019-06-11 16:23:20 +00:00
|
|
|
//go:generate mockgen -destination=./s3store_mock_test.go -package=s3store github.com/tus/tusd/pkg/s3store S3API
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2016-01-19 21:37:05 +00:00
|
|
|
// Test interface implementations
|
2019-06-11 16:23:20 +00:00
|
|
|
var _ handler.DataStore = S3Store{}
|
|
|
|
var _ handler.TerminaterDataStore = S3Store{}
|
|
|
|
var _ handler.ConcaterDataStore = S3Store{}
|
2019-09-10 08:05:45 +00:00
|
|
|
var _ handler.LengthDeferrerDataStore = S3Store{}
|
2016-01-19 21:37:05 +00:00
|
|
|
|
2016-01-05 17:21:53 +00:00
|
|
|
func TestNewUpload(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2017-07-19 09:54:26 +00:00
|
|
|
assert.Equal("bucket", store.Bucket)
|
|
|
|
assert.Equal(s3obj, store.Service)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2016-01-19 20:39:24 +00:00
|
|
|
s1 := "hello"
|
2019-07-21 20:40:18 +00:00
|
|
|
s2 := "men???hi"
|
2016-01-19 20:39:24 +00:00
|
|
|
|
2016-01-05 17:21:53 +00:00
|
|
|
gomock.InOrder(
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().CreateMultipartUploadWithContext(context.Background(), &s3.CreateMultipartUploadInput{
|
2016-01-05 17:21:53 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
2016-01-19 20:39:24 +00:00
|
|
|
Metadata: map[string]*string{
|
|
|
|
"foo": &s1,
|
|
|
|
"bar": &s2,
|
|
|
|
},
|
2016-01-05 17:21:53 +00:00
|
|
|
}).Return(&s3.CreateMultipartUploadOutput{
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
}, nil),
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().PutObjectWithContext(context.Background(), &s3.PutObjectInput{
|
2017-07-19 09:54:26 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
2019-08-19 08:38:08 +00:00
|
|
|
Body: bytes.NewReader([]byte(`{"ID":"uploadId+multipartId","Size":500,"SizeIsDeferred":false,"Offset":0,"MetaData":{"bar":"menü\r\nhi","foo":"hello"},"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":{"Bucket":"bucket","Key":"uploadId","Type":"s3store"}}`)),
|
|
|
|
ContentLength: aws.Int64(int64(241)),
|
2018-11-13 00:14:37 +00:00
|
|
|
}),
|
|
|
|
)
|
|
|
|
|
2019-06-11 16:23:20 +00:00
|
|
|
info := handler.FileInfo{
|
2018-11-13 00:14:37 +00:00
|
|
|
ID: "uploadId",
|
|
|
|
Size: 500,
|
|
|
|
MetaData: map[string]string{
|
|
|
|
"foo": "hello",
|
2019-07-21 20:40:18 +00:00
|
|
|
"bar": "menü\r\nhi",
|
2018-11-13 00:14:37 +00:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.NewUpload(context.Background(), info)
|
2018-11-13 00:14:37 +00:00
|
|
|
assert.Nil(err)
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.NotNil(upload)
|
2018-11-13 00:14:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestNewUploadWithObjectPrefix(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
|
|
|
store := New("bucket", s3obj)
|
|
|
|
store.ObjectPrefix = "my/uploaded/files"
|
|
|
|
|
|
|
|
assert.Equal("bucket", store.Bucket)
|
|
|
|
assert.Equal(s3obj, store.Service)
|
|
|
|
|
|
|
|
s1 := "hello"
|
|
|
|
s2 := "men?"
|
|
|
|
|
|
|
|
gomock.InOrder(
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().CreateMultipartUploadWithContext(context.Background(), &s3.CreateMultipartUploadInput{
|
2018-11-13 00:14:37 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("my/uploaded/files/uploadId"),
|
|
|
|
Metadata: map[string]*string{
|
|
|
|
"foo": &s1,
|
|
|
|
"bar": &s2,
|
|
|
|
},
|
|
|
|
}).Return(&s3.CreateMultipartUploadOutput{
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
}, nil),
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().PutObjectWithContext(context.Background(), &s3.PutObjectInput{
|
2018-11-13 00:14:37 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("my/uploaded/files/uploadId.info"),
|
2019-08-19 08:38:08 +00:00
|
|
|
Body: bytes.NewReader([]byte(`{"ID":"uploadId+multipartId","Size":500,"SizeIsDeferred":false,"Offset":0,"MetaData":{"bar":"menü","foo":"hello"},"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":{"Bucket":"bucket","Key":"my/uploaded/files/uploadId","Type":"s3store"}}`)),
|
|
|
|
ContentLength: aws.Int64(int64(253)),
|
2017-07-19 09:54:26 +00:00
|
|
|
}),
|
2016-01-05 17:21:53 +00:00
|
|
|
)
|
|
|
|
|
2019-06-11 16:23:20 +00:00
|
|
|
info := handler.FileInfo{
|
2016-01-05 17:21:53 +00:00
|
|
|
ID: "uploadId",
|
|
|
|
Size: 500,
|
2016-01-19 20:39:24 +00:00
|
|
|
MetaData: map[string]string{
|
|
|
|
"foo": "hello",
|
2016-07-06 14:25:06 +00:00
|
|
|
"bar": "menü",
|
2016-01-19 20:39:24 +00:00
|
|
|
},
|
2016-01-05 17:21:53 +00:00
|
|
|
}
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.NewUpload(context.Background(), info)
|
2016-01-05 17:21:53 +00:00
|
|
|
assert.Nil(err)
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.NotNil(upload)
|
2016-01-05 17:21:53 +00:00
|
|
|
}
|
|
|
|
|
2020-02-01 16:33:02 +00:00
|
|
|
func TestNewUploadWithMetadataObjectPrefix(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
|
|
|
store := New("bucket", s3obj)
|
|
|
|
store.ObjectPrefix = "my/uploaded/files"
|
|
|
|
store.MetadataObjectPrefix = "my/metadata"
|
|
|
|
|
|
|
|
assert.Equal("bucket", store.Bucket)
|
|
|
|
assert.Equal(s3obj, store.Service)
|
|
|
|
|
|
|
|
s1 := "hello"
|
|
|
|
s2 := "men?"
|
|
|
|
|
|
|
|
gomock.InOrder(
|
|
|
|
s3obj.EXPECT().CreateMultipartUploadWithContext(context.Background(), &s3.CreateMultipartUploadInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("my/uploaded/files/uploadId"),
|
|
|
|
Metadata: map[string]*string{
|
|
|
|
"foo": &s1,
|
|
|
|
"bar": &s2,
|
|
|
|
},
|
|
|
|
}).Return(&s3.CreateMultipartUploadOutput{
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
}, nil),
|
|
|
|
s3obj.EXPECT().PutObjectWithContext(context.Background(), &s3.PutObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("my/metadata/uploadId.info"),
|
|
|
|
Body: bytes.NewReader([]byte(`{"ID":"uploadId+multipartId","Size":500,"SizeIsDeferred":false,"Offset":0,"MetaData":{"bar":"menü","foo":"hello"},"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":{"Bucket":"bucket","Key":"my/uploaded/files/uploadId","Type":"s3store"}}`)),
|
|
|
|
ContentLength: aws.Int64(int64(253)),
|
|
|
|
}),
|
|
|
|
)
|
|
|
|
|
|
|
|
info := handler.FileInfo{
|
|
|
|
ID: "uploadId",
|
|
|
|
Size: 500,
|
|
|
|
MetaData: map[string]string{
|
|
|
|
"foo": "hello",
|
|
|
|
"bar": "menü",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
upload, err := store.NewUpload(context.Background(), info)
|
|
|
|
assert.Nil(err)
|
|
|
|
assert.NotNil(upload)
|
|
|
|
}
|
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
// This test ensures that an newly created upload without any chunks can be
|
|
|
|
// directly finished. There are no calls to ListPart or HeadObject because
|
|
|
|
// the upload is not fetched from S3 first.
|
2020-02-23 19:26:00 +00:00
|
|
|
func TestEmptyUpload(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
|
|
|
store := New("bucket", s3obj)
|
|
|
|
|
|
|
|
gomock.InOrder(
|
|
|
|
s3obj.EXPECT().CreateMultipartUploadWithContext(context.Background(), &s3.CreateMultipartUploadInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
Metadata: map[string]*string{},
|
|
|
|
}).Return(&s3.CreateMultipartUploadOutput{
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
}, nil),
|
|
|
|
s3obj.EXPECT().PutObjectWithContext(context.Background(), &s3.PutObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
Body: bytes.NewReader([]byte(`{"ID":"uploadId+multipartId","Size":0,"SizeIsDeferred":false,"Offset":0,"MetaData":null,"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":{"Bucket":"bucket","Key":"uploadId","Type":"s3store"}}`)),
|
|
|
|
ContentLength: aws.Int64(int64(208)),
|
|
|
|
}),
|
|
|
|
s3obj.EXPECT().UploadPartWithContext(context.Background(), NewUploadPartInputMatcher(&s3.UploadPartInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumber: aws.Int64(1),
|
|
|
|
Body: bytes.NewReader([]byte("")),
|
|
|
|
})).Return(&s3.UploadPartOutput{
|
|
|
|
ETag: aws.String("etag"),
|
|
|
|
}, nil),
|
|
|
|
s3obj.EXPECT().CompleteMultipartUploadWithContext(context.Background(), &s3.CompleteMultipartUploadInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
MultipartUpload: &s3.CompletedMultipartUpload{
|
|
|
|
Parts: []*s3.CompletedPart{
|
|
|
|
{
|
|
|
|
ETag: aws.String("etag"),
|
|
|
|
PartNumber: aws.Int64(1),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}).Return(nil, nil),
|
|
|
|
)
|
|
|
|
|
|
|
|
info := handler.FileInfo{
|
|
|
|
ID: "uploadId",
|
|
|
|
Size: 0,
|
|
|
|
}
|
|
|
|
|
|
|
|
upload, err := store.NewUpload(context.Background(), info)
|
|
|
|
assert.Nil(err)
|
|
|
|
assert.NotNil(upload)
|
|
|
|
err = upload.FinishUpload(context.Background())
|
|
|
|
assert.Nil(err)
|
|
|
|
}
|
|
|
|
|
2017-09-03 08:57:06 +00:00
|
|
|
func TestNewUploadLargerMaxObjectSize(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
|
|
|
store := New("bucket", s3obj)
|
|
|
|
|
|
|
|
assert.Equal("bucket", store.Bucket)
|
|
|
|
assert.Equal(s3obj, store.Service)
|
|
|
|
|
2019-06-11 16:23:20 +00:00
|
|
|
info := handler.FileInfo{
|
2017-09-03 08:57:06 +00:00
|
|
|
ID: "uploadId",
|
|
|
|
Size: store.MaxObjectSize + 1,
|
|
|
|
}
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.NewUpload(context.Background(), info)
|
2017-09-03 08:57:06 +00:00
|
|
|
assert.NotNil(err)
|
|
|
|
assert.EqualError(err, fmt.Sprintf("s3store: upload size of %v bytes exceeds MaxObjectSize of %v bytes", info.Size, store.MaxObjectSize))
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(upload)
|
2017-09-03 08:57:06 +00:00
|
|
|
}
|
|
|
|
|
2016-01-05 17:21:53 +00:00
|
|
|
func TestGetInfoNotFound(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
2016-01-05 17:21:53 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
}).Return(nil, awserr.New("NoSuchKey", "The specified key does not exist.", nil))
|
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(0),
|
|
|
|
}).Return(nil, awserr.New("NoSuchUpload", "Not found", nil))
|
|
|
|
s3obj.EXPECT().HeadObjectWithContext(context.Background(), &s3.HeadObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
}).Return(nil, awserr.New("NoSuchKey", "Not found", nil))
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
_, err = upload.GetInfo(context.Background())
|
2019-06-11 16:23:20 +00:00
|
|
|
assert.Equal(handler.ErrNotFound, err)
|
2016-01-05 17:21:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestGetInfo(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"ID":"uploadId+multipartId","Size":500,"Offset":0,"MetaData":{"bar":"menü","foo":"hello"},"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":{"Bucket":"bucket","Key":"my/uploaded/files/uploadId","Type":"s3store"}}`))),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(0),
|
|
|
|
}).Return(&s3.ListPartsOutput{
|
|
|
|
Parts: []*s3.Part{
|
|
|
|
{
|
|
|
|
PartNumber: aws.Int64(1),
|
|
|
|
Size: aws.Int64(100),
|
|
|
|
ETag: aws.String("etag-1"),
|
2016-01-05 17:21:53 +00:00
|
|
|
},
|
2021-05-18 08:29:18 +00:00
|
|
|
{
|
|
|
|
PartNumber: aws.Int64(2),
|
|
|
|
Size: aws.Int64(200),
|
|
|
|
ETag: aws.String("etag-2"),
|
2017-09-03 08:57:06 +00:00
|
|
|
},
|
2021-05-18 08:29:18 +00:00
|
|
|
},
|
|
|
|
NextPartNumberMarker: aws.Int64(2),
|
|
|
|
// Simulate a truncated response, so s3store should send a second request
|
|
|
|
IsTruncated: aws.Bool(true),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(2),
|
|
|
|
}).Return(&s3.ListPartsOutput{
|
|
|
|
Parts: []*s3.Part{
|
|
|
|
{
|
|
|
|
PartNumber: aws.Int64(3),
|
|
|
|
Size: aws.Int64(100),
|
|
|
|
ETag: aws.String("etag-3"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().HeadObjectWithContext(context.Background(), &s3.HeadObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
}).Return(nil, awserr.New("NoSuchKey", "Not found", nil))
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
info, err := upload.GetInfo(context.Background())
|
2016-01-05 17:21:53 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
assert.Equal(int64(500), info.Size)
|
2017-09-03 08:57:06 +00:00
|
|
|
assert.Equal(int64(400), info.Offset)
|
2016-03-26 17:23:37 +00:00
|
|
|
assert.Equal("uploadId+multipartId", info.ID)
|
2016-07-06 14:25:06 +00:00
|
|
|
assert.Equal("hello", info.MetaData["foo"])
|
|
|
|
assert.Equal("menü", info.MetaData["bar"])
|
2019-08-19 08:38:08 +00:00
|
|
|
assert.Equal("s3store", info.Storage["Type"])
|
|
|
|
assert.Equal("bucket", info.Storage["Bucket"])
|
|
|
|
assert.Equal("my/uploaded/files/uploadId", info.Storage["Key"])
|
2016-01-05 17:21:53 +00:00
|
|
|
}
|
|
|
|
|
2020-02-01 16:33:02 +00:00
|
|
|
func TestGetInfoWithMetadataObjectPrefix(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
|
|
|
store := New("bucket", s3obj)
|
|
|
|
store.MetadataObjectPrefix = "my/metadata"
|
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("my/metadata/uploadId.info"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"ID":"uploadId+multipartId","Size":500,"Offset":0,"MetaData":{"bar":"menü","foo":"hello"},"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":{"Bucket":"bucket","Key":"my/uploaded/files/uploadId","Type":"s3store"}}`))),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(0),
|
|
|
|
}).Return(&s3.ListPartsOutput{
|
|
|
|
Parts: []*s3.Part{
|
|
|
|
{
|
|
|
|
PartNumber: aws.Int64(1),
|
|
|
|
Size: aws.Int64(100),
|
|
|
|
ETag: aws.String("etag-1"),
|
2020-02-01 16:33:02 +00:00
|
|
|
},
|
2021-05-18 08:29:18 +00:00
|
|
|
{
|
|
|
|
PartNumber: aws.Int64(2),
|
|
|
|
Size: aws.Int64(200),
|
|
|
|
ETag: aws.String("etag-2"),
|
2020-02-01 16:33:02 +00:00
|
|
|
},
|
2021-05-18 08:29:18 +00:00
|
|
|
},
|
|
|
|
NextPartNumberMarker: aws.Int64(2),
|
|
|
|
// Simulate a truncated response, so s3store should send a second request
|
|
|
|
IsTruncated: aws.Bool(true),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(2),
|
|
|
|
}).Return(&s3.ListPartsOutput{
|
|
|
|
Parts: []*s3.Part{
|
|
|
|
{
|
|
|
|
PartNumber: aws.Int64(3),
|
|
|
|
Size: aws.Int64(100),
|
|
|
|
ETag: aws.String("etag-3"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().HeadObjectWithContext(context.Background(), &s3.HeadObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("my/metadata/uploadId.part"),
|
|
|
|
}).Return(nil, awserr.New("NoSuchKey", "Not found", nil))
|
2020-02-01 16:33:02 +00:00
|
|
|
|
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
|
|
|
assert.Nil(err)
|
|
|
|
|
|
|
|
info, err := upload.GetInfo(context.Background())
|
|
|
|
assert.Nil(err)
|
|
|
|
assert.Equal(int64(500), info.Size)
|
|
|
|
assert.Equal(int64(400), info.Offset)
|
|
|
|
assert.Equal("uploadId+multipartId", info.ID)
|
|
|
|
assert.Equal("hello", info.MetaData["foo"])
|
|
|
|
assert.Equal("menü", info.MetaData["bar"])
|
|
|
|
assert.Equal("s3store", info.Storage["Type"])
|
|
|
|
assert.Equal("bucket", info.Storage["Bucket"])
|
|
|
|
assert.Equal("my/uploaded/files/uploadId", info.Storage["Key"])
|
|
|
|
}
|
|
|
|
|
2019-01-05 08:02:42 +00:00
|
|
|
func TestGetInfoWithIncompletePart(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
|
|
|
store := New("bucket", s3obj)
|
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"ID":"uploadId+multipartId","Size":500,"Offset":0,"MetaData":{},"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":null}`))),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(0),
|
|
|
|
}).Return(&s3.ListPartsOutput{Parts: []*s3.Part{}}, nil)
|
|
|
|
s3obj.EXPECT().HeadObjectWithContext(context.Background(), &s3.HeadObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
}).Return(&s3.HeadObjectOutput{
|
|
|
|
ContentLength: aws.Int64(10),
|
|
|
|
}, nil)
|
2019-01-05 08:02:42 +00:00
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
info, err := upload.GetInfo(context.Background())
|
2019-01-05 08:02:42 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
assert.Equal(int64(10), info.Offset)
|
|
|
|
assert.Equal("uploadId+multipartId", info.ID)
|
|
|
|
}
|
|
|
|
|
2016-01-05 17:21:53 +00:00
|
|
|
func TestGetInfoFinished(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"ID":"uploadId","Size":500,"Offset":0,"MetaData":null,"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":null}`))),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(0),
|
|
|
|
}).Return(nil, awserr.New("NoSuchUpload", "The specified upload does not exist.", nil))
|
|
|
|
s3obj.EXPECT().HeadObjectWithContext(context.Background(), &s3.HeadObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
}).Return(nil, awserr.New("NoSuchKey", "Not found", nil))
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
info, err := upload.GetInfo(context.Background())
|
2016-01-05 17:21:53 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
assert.Equal(int64(500), info.Size)
|
|
|
|
assert.Equal(int64(500), info.Offset)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestGetReader(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
2016-01-05 17:21:53 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`hello world`))),
|
|
|
|
}, nil)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
content, err := upload.GetReader(context.Background())
|
2016-01-05 17:21:53 +00:00
|
|
|
assert.Nil(err)
|
2017-07-19 09:54:26 +00:00
|
|
|
assert.Equal(ioutil.NopCloser(bytes.NewReader([]byte(`hello world`))), content)
|
2016-01-05 17:21:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestGetReaderNotFound(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
|
|
|
gomock.InOrder(
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
2016-01-05 17:21:53 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
}).Return(nil, awserr.New("NoSuchKey", "The specified key does not exist.", nil)),
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
2016-01-05 17:21:53 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
MaxParts: aws.Int64(0),
|
|
|
|
}).Return(nil, awserr.New("NoSuchUpload", "The specified upload does not exist.", nil)),
|
|
|
|
)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
content, err := upload.GetReader(context.Background())
|
2016-01-05 17:21:53 +00:00
|
|
|
assert.Nil(content)
|
2019-06-11 16:23:20 +00:00
|
|
|
assert.Equal(handler.ErrNotFound, err)
|
2016-01-05 17:21:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestGetReaderNotFinished(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
|
|
|
gomock.InOrder(
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
2016-01-05 17:21:53 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
}).Return(nil, awserr.New("NoSuchKey", "The specified key does not exist.", nil)),
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
2016-01-05 17:21:53 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
MaxParts: aws.Int64(0),
|
|
|
|
}).Return(&s3.ListPartsOutput{
|
|
|
|
Parts: []*s3.Part{},
|
|
|
|
}, nil),
|
|
|
|
)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
content, err := upload.GetReader(context.Background())
|
2016-01-05 17:21:53 +00:00
|
|
|
assert.Nil(content)
|
2022-03-01 23:36:49 +00:00
|
|
|
assert.Equal("ERR_INCOMPLETE_UPLOAD: cannot stream non-finished upload", err.Error())
|
2016-01-05 17:21:53 +00:00
|
|
|
}
|
|
|
|
|
2019-01-05 08:02:42 +00:00
|
|
|
func TestDeclareLength(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
|
|
|
store := New("bucket", s3obj)
|
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"ID":"uploadId+multipartId","Size":0,"SizeIsDeferred":true,"Offset":0,"MetaData":{},"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":{"Bucket":"bucket","Key":"uploadId","Type":"s3store"}}`))),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(0),
|
|
|
|
}).Return(&s3.ListPartsOutput{
|
|
|
|
Parts: []*s3.Part{},
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().HeadObjectWithContext(context.Background(), &s3.HeadObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
}).Return(nil, awserr.New("NotFound", "Not Found", nil))
|
|
|
|
s3obj.EXPECT().PutObjectWithContext(context.Background(), &s3.PutObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
Body: bytes.NewReader([]byte(`{"ID":"uploadId+multipartId","Size":500,"SizeIsDeferred":false,"Offset":0,"MetaData":{},"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":{"Bucket":"bucket","Key":"uploadId","Type":"s3store"}}`)),
|
|
|
|
ContentLength: aws.Int64(int64(208)),
|
|
|
|
})
|
2019-01-05 08:02:42 +00:00
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
err = store.AsLengthDeclarableUpload(upload).DeclareLength(context.Background(), 500)
|
2019-01-05 08:02:42 +00:00
|
|
|
assert.Nil(err)
|
2021-03-11 17:17:47 +00:00
|
|
|
info, err := upload.GetInfo(context.Background())
|
|
|
|
assert.Nil(err)
|
|
|
|
assert.Equal(int64(500), info.Size)
|
2019-01-05 08:02:42 +00:00
|
|
|
}
|
|
|
|
|
2016-01-05 17:21:53 +00:00
|
|
|
func TestFinishUpload(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"ID":"uploadId","Size":400,"Offset":0,"MetaData":null,"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":null}`))),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(0),
|
|
|
|
}).Return(&s3.ListPartsOutput{
|
|
|
|
Parts: []*s3.Part{
|
|
|
|
{
|
|
|
|
Size: aws.Int64(100),
|
|
|
|
ETag: aws.String("etag-1"),
|
|
|
|
PartNumber: aws.Int64(1),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Size: aws.Int64(200),
|
|
|
|
ETag: aws.String("etag-2"),
|
|
|
|
PartNumber: aws.Int64(2),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
NextPartNumberMarker: aws.Int64(2),
|
|
|
|
IsTruncated: aws.Bool(true),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(2),
|
|
|
|
}).Return(&s3.ListPartsOutput{
|
|
|
|
Parts: []*s3.Part{
|
|
|
|
{
|
|
|
|
Size: aws.Int64(100),
|
|
|
|
ETag: aws.String("etag-3"),
|
|
|
|
PartNumber: aws.Int64(3),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().HeadObjectWithContext(context.Background(), &s3.HeadObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
}).Return(nil, awserr.New("NotFound", "Not Found", nil))
|
|
|
|
s3obj.EXPECT().CompleteMultipartUploadWithContext(context.Background(), &s3.CompleteMultipartUploadInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
MultipartUpload: &s3.CompletedMultipartUpload{
|
|
|
|
Parts: []*s3.CompletedPart{
|
2016-01-05 17:21:53 +00:00
|
|
|
{
|
2021-05-18 08:29:18 +00:00
|
|
|
ETag: aws.String("etag-1"),
|
2016-01-05 17:21:53 +00:00
|
|
|
PartNumber: aws.Int64(1),
|
|
|
|
},
|
|
|
|
{
|
2021-05-18 08:29:18 +00:00
|
|
|
ETag: aws.String("etag-2"),
|
2016-01-05 17:21:53 +00:00
|
|
|
PartNumber: aws.Int64(2),
|
|
|
|
},
|
2017-09-03 08:57:06 +00:00
|
|
|
{
|
2021-05-18 08:29:18 +00:00
|
|
|
ETag: aws.String("etag-3"),
|
2017-09-03 08:57:06 +00:00
|
|
|
PartNumber: aws.Int64(3),
|
|
|
|
},
|
|
|
|
},
|
2021-05-18 08:29:18 +00:00
|
|
|
},
|
|
|
|
}).Return(nil, nil)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
err = upload.FinishUpload(context.Background())
|
2016-01-05 17:21:53 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestWriteChunk(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
|
|
|
store.MaxPartSize = 8
|
|
|
|
store.MinPartSize = 4
|
2020-08-01 12:58:31 +00:00
|
|
|
store.PreferredPartSize = 4
|
2017-09-03 08:57:06 +00:00
|
|
|
store.MaxMultipartParts = 10000
|
|
|
|
store.MaxObjectSize = 5 * 1024 * 1024 * 1024 * 1024
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
// From GetInfo
|
2020-07-29 13:24:46 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"ID":"uploadId","Size":500,"Offset":0,"MetaData":null,"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":null}`))),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(0),
|
|
|
|
}).Return(&s3.ListPartsOutput{
|
|
|
|
Parts: []*s3.Part{
|
|
|
|
{
|
2021-05-18 08:29:18 +00:00
|
|
|
Size: aws.Int64(100),
|
|
|
|
ETag: aws.String("etag-1"),
|
|
|
|
PartNumber: aws.Int64(1),
|
2016-01-05 17:21:53 +00:00
|
|
|
},
|
2020-07-29 13:24:46 +00:00
|
|
|
{
|
2021-05-18 08:29:18 +00:00
|
|
|
Size: aws.Int64(200),
|
|
|
|
ETag: aws.String("etag-2"),
|
|
|
|
PartNumber: aws.Int64(2),
|
2016-01-05 17:21:53 +00:00
|
|
|
},
|
2020-07-29 13:24:46 +00:00
|
|
|
},
|
2021-05-18 08:29:18 +00:00
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().HeadObjectWithContext(context.Background(), &s3.HeadObjectInput{
|
2020-07-29 13:24:46 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
2021-05-18 08:29:18 +00:00
|
|
|
}).Return(nil, awserr.New("NoSuchKey", "Not found", nil))
|
|
|
|
|
|
|
|
// From WriteChunk
|
|
|
|
s3obj.EXPECT().UploadPartWithContext(context.Background(), NewUploadPartInputMatcher(&s3.UploadPartInput{
|
2022-07-11 13:01:24 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumber: aws.Int64(3),
|
|
|
|
Body: bytes.NewReader([]byte("1234")),
|
|
|
|
ContentLength: aws.Int64(4),
|
2021-05-18 08:29:18 +00:00
|
|
|
})).Return(&s3.UploadPartOutput{
|
|
|
|
ETag: aws.String("etag-3"),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().UploadPartWithContext(context.Background(), NewUploadPartInputMatcher(&s3.UploadPartInput{
|
2022-07-11 13:01:24 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumber: aws.Int64(4),
|
|
|
|
Body: bytes.NewReader([]byte("5678")),
|
|
|
|
ContentLength: aws.Int64(4),
|
2021-05-18 08:29:18 +00:00
|
|
|
})).Return(&s3.UploadPartOutput{
|
|
|
|
ETag: aws.String("etag-4"),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().UploadPartWithContext(context.Background(), NewUploadPartInputMatcher(&s3.UploadPartInput{
|
2022-07-11 13:01:24 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumber: aws.Int64(5),
|
|
|
|
Body: bytes.NewReader([]byte("90AB")),
|
|
|
|
ContentLength: aws.Int64(4),
|
2021-05-18 08:29:18 +00:00
|
|
|
})).Return(&s3.UploadPartOutput{
|
|
|
|
ETag: aws.String("etag-5"),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().PutObjectWithContext(context.Background(), NewPutObjectInputMatcher(&s3.PutObjectInput{
|
2020-07-29 13:24:46 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
2021-05-18 08:29:18 +00:00
|
|
|
Body: bytes.NewReader([]byte("CD")),
|
|
|
|
})).Return(nil, nil)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
bytesRead, err := upload.WriteChunk(context.Background(), 300, bytes.NewReader([]byte("1234567890ABCD")))
|
2016-01-05 17:21:53 +00:00
|
|
|
assert.Nil(err)
|
2019-01-05 08:02:42 +00:00
|
|
|
assert.Equal(int64(14), bytesRead)
|
2016-01-05 17:21:53 +00:00
|
|
|
}
|
|
|
|
|
2019-01-05 08:02:42 +00:00
|
|
|
func TestWriteChunkWriteIncompletePartBecauseTooSmall(t *testing.T) {
|
2016-01-05 17:21:53 +00:00
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2020-07-29 13:24:46 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"ID":"uploadId","Size":500,"Offset":0,"MetaData":null,"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":null}`))),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(0),
|
|
|
|
}).Return(&s3.ListPartsOutput{
|
|
|
|
Parts: []*s3.Part{
|
|
|
|
{
|
2021-05-18 08:29:18 +00:00
|
|
|
Size: aws.Int64(100),
|
|
|
|
ETag: aws.String("etag-1"),
|
|
|
|
PartNumber: aws.Int64(1),
|
2016-01-05 17:21:53 +00:00
|
|
|
},
|
2020-07-29 13:24:46 +00:00
|
|
|
{
|
2021-05-18 08:29:18 +00:00
|
|
|
Size: aws.Int64(200),
|
|
|
|
ETag: aws.String("etag-2"),
|
|
|
|
PartNumber: aws.Int64(2),
|
2016-01-05 17:21:53 +00:00
|
|
|
},
|
2020-07-29 13:24:46 +00:00
|
|
|
},
|
2021-05-18 08:29:18 +00:00
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().HeadObjectWithContext(context.Background(), &s3.HeadObjectInput{
|
2020-07-29 13:24:46 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
2021-05-18 08:29:18 +00:00
|
|
|
}).Return(nil, awserr.New("NoSuchKey", "The specified key does not exist", nil))
|
2020-07-29 13:24:46 +00:00
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
s3obj.EXPECT().PutObjectWithContext(context.Background(), NewPutObjectInputMatcher(&s3.PutObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
Body: bytes.NewReader([]byte("1234567890")),
|
|
|
|
})).Return(nil, nil)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
bytesRead, err := upload.WriteChunk(context.Background(), 300, bytes.NewReader([]byte("1234567890")))
|
2016-01-05 17:21:53 +00:00
|
|
|
assert.Nil(err)
|
2019-01-05 08:02:42 +00:00
|
|
|
assert.Equal(int64(10), bytesRead)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestWriteChunkPrependsIncompletePart(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
|
|
|
store := New("bucket", s3obj)
|
|
|
|
store.MaxPartSize = 8
|
|
|
|
store.MinPartSize = 4
|
2020-08-01 12:58:31 +00:00
|
|
|
store.PreferredPartSize = 4
|
2019-01-05 08:02:42 +00:00
|
|
|
store.MaxMultipartParts = 10000
|
|
|
|
store.MaxObjectSize = 5 * 1024 * 1024 * 1024 * 1024
|
|
|
|
|
2020-07-29 13:24:46 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"ID":"uploadId","Size":5,"Offset":0,"MetaData":null,"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":null}`))),
|
|
|
|
}, nil)
|
2021-05-18 08:29:18 +00:00
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(0),
|
|
|
|
}).Return(&s3.ListPartsOutput{
|
|
|
|
Parts: []*s3.Part{},
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().HeadObjectWithContext(context.Background(), &s3.HeadObjectInput{
|
2020-07-29 13:24:46 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
2021-05-18 08:29:18 +00:00
|
|
|
}).Return(&s3.HeadObjectOutput{
|
2020-07-29 13:24:46 +00:00
|
|
|
ContentLength: aws.Int64(3),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
ContentLength: aws.Int64(3),
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte("123"))),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().DeleteObjectWithContext(context.Background(), &s3.DeleteObjectInput{
|
|
|
|
Bucket: aws.String(store.Bucket),
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
}).Return(&s3.DeleteObjectOutput{}, nil)
|
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
s3obj.EXPECT().UploadPartWithContext(context.Background(), NewUploadPartInputMatcher(&s3.UploadPartInput{
|
2022-07-11 13:01:24 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumber: aws.Int64(1),
|
|
|
|
Body: bytes.NewReader([]byte("1234")),
|
|
|
|
ContentLength: aws.Int64(4),
|
2021-05-18 08:29:18 +00:00
|
|
|
})).Return(&s3.UploadPartOutput{
|
|
|
|
ETag: aws.String("etag-1"),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().UploadPartWithContext(context.Background(), NewUploadPartInputMatcher(&s3.UploadPartInput{
|
2022-07-11 13:01:24 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumber: aws.Int64(2),
|
|
|
|
Body: bytes.NewReader([]byte("5")),
|
|
|
|
ContentLength: aws.Int64(1),
|
2021-05-18 08:29:18 +00:00
|
|
|
})).Return(&s3.UploadPartOutput{
|
|
|
|
ETag: aws.String("etag-2"),
|
|
|
|
}, nil)
|
2019-01-05 08:02:42 +00:00
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
bytesRead, err := upload.WriteChunk(context.Background(), 3, bytes.NewReader([]byte("45")))
|
2019-01-05 08:02:42 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
assert.Equal(int64(2), bytesRead)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestWriteChunkPrependsIncompletePartAndWritesANewIncompletePart(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
|
|
|
store := New("bucket", s3obj)
|
|
|
|
store.MaxPartSize = 8
|
|
|
|
store.MinPartSize = 4
|
2020-08-01 12:58:31 +00:00
|
|
|
store.PreferredPartSize = 4
|
2019-01-05 08:02:42 +00:00
|
|
|
store.MaxMultipartParts = 10000
|
|
|
|
store.MaxObjectSize = 5 * 1024 * 1024 * 1024 * 1024
|
|
|
|
|
2020-07-29 13:24:46 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"ID":"uploadId","Size":10,"Offset":0,"MetaData":null,"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":null}`))),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(0),
|
2021-05-18 08:29:18 +00:00
|
|
|
}).Return(&s3.ListPartsOutput{Parts: []*s3.Part{}}, nil)
|
|
|
|
s3obj.EXPECT().HeadObjectWithContext(context.Background(), &s3.HeadObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
}).Return(&s3.HeadObjectOutput{
|
|
|
|
ContentLength: aws.Int64(3),
|
|
|
|
}, nil)
|
2020-07-29 13:24:46 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
ContentLength: aws.Int64(3),
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte("123"))),
|
2021-05-18 08:29:18 +00:00
|
|
|
}, nil)
|
2020-07-29 13:24:46 +00:00
|
|
|
s3obj.EXPECT().DeleteObjectWithContext(context.Background(), &s3.DeleteObjectInput{
|
|
|
|
Bucket: aws.String(store.Bucket),
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
}).Return(&s3.DeleteObjectOutput{}, nil)
|
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
s3obj.EXPECT().UploadPartWithContext(context.Background(), NewUploadPartInputMatcher(&s3.UploadPartInput{
|
2022-07-11 13:01:24 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumber: aws.Int64(1),
|
|
|
|
Body: bytes.NewReader([]byte("1234")),
|
|
|
|
ContentLength: aws.Int64(4),
|
2021-05-18 08:29:18 +00:00
|
|
|
})).Return(&s3.UploadPartOutput{
|
|
|
|
ETag: aws.String("etag-1"),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().PutObjectWithContext(context.Background(), NewPutObjectInputMatcher(&s3.PutObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
Body: bytes.NewReader([]byte("5")),
|
|
|
|
})).Return(nil, nil)
|
2019-01-05 08:02:42 +00:00
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
bytesRead, err := upload.WriteChunk(context.Background(), 3, bytes.NewReader([]byte("45")))
|
2019-01-05 08:02:42 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
assert.Equal(int64(2), bytesRead)
|
2016-01-05 17:21:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestWriteChunkAllowTooSmallLast(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
2016-01-05 17:21:53 +00:00
|
|
|
store.MinPartSize = 20
|
|
|
|
|
2020-07-29 13:24:46 +00:00
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"ID":"uploadId","Size":500,"Offset":0,"MetaData":null,"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":null}`))),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(0),
|
|
|
|
}).Return(&s3.ListPartsOutput{
|
|
|
|
Parts: []*s3.Part{
|
|
|
|
{
|
2021-05-18 08:29:18 +00:00
|
|
|
PartNumber: aws.Int64(1),
|
|
|
|
Size: aws.Int64(400),
|
|
|
|
ETag: aws.String("etag-1"),
|
2016-01-05 17:21:53 +00:00
|
|
|
},
|
2020-07-29 13:24:46 +00:00
|
|
|
{
|
2021-05-18 08:29:18 +00:00
|
|
|
PartNumber: aws.Int64(2),
|
|
|
|
Size: aws.Int64(90),
|
|
|
|
ETag: aws.String("etag-2"),
|
2016-01-05 17:21:53 +00:00
|
|
|
},
|
2020-07-29 13:24:46 +00:00
|
|
|
},
|
2021-05-18 08:29:18 +00:00
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().HeadObjectWithContext(context.Background(), &s3.HeadObjectInput{
|
2020-07-29 13:24:46 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
2021-05-18 08:29:18 +00:00
|
|
|
}).Return(nil, awserr.New("AccessDenied", "Access Denied.", nil))
|
2020-07-29 13:24:46 +00:00
|
|
|
s3obj.EXPECT().UploadPartWithContext(context.Background(), NewUploadPartInputMatcher(&s3.UploadPartInput{
|
2022-07-11 13:01:24 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumber: aws.Int64(3),
|
|
|
|
Body: bytes.NewReader([]byte("1234567890")),
|
|
|
|
ContentLength: aws.Int64(10),
|
2021-05-18 08:29:18 +00:00
|
|
|
})).Return(&s3.UploadPartOutput{
|
|
|
|
ETag: aws.String("etag-3"),
|
|
|
|
}, nil)
|
2016-01-05 17:21:53 +00:00
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2016-01-05 17:21:53 +00:00
|
|
|
// 10 bytes are missing for the upload to be finished (offset at 490 for 500
|
|
|
|
// bytes file) but the minimum chunk size is higher (20). The chunk is
|
|
|
|
// still uploaded since the last part may be smaller than the minimum.
|
2019-09-15 11:43:59 +00:00
|
|
|
bytesRead, err := upload.WriteChunk(context.Background(), 490, bytes.NewReader([]byte("1234567890")))
|
2016-01-05 17:21:53 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
assert.Equal(int64(10), bytesRead)
|
|
|
|
}
|
2016-01-16 15:12:37 +00:00
|
|
|
|
|
|
|
func TestTerminate(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
2016-01-16 15:12:37 +00:00
|
|
|
|
|
|
|
// Order is not important in this situation.
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().AbortMultipartUploadWithContext(context.Background(), &s3.AbortMultipartUploadInput{
|
2016-01-16 15:12:37 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
}).Return(nil, nil)
|
|
|
|
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().DeleteObjectsWithContext(context.Background(), &s3.DeleteObjectsInput{
|
2016-01-16 15:12:37 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Delete: &s3.Delete{
|
|
|
|
Objects: []*s3.ObjectIdentifier{
|
|
|
|
{
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
},
|
2019-01-05 08:02:42 +00:00
|
|
|
{
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
},
|
2016-01-16 15:12:37 +00:00
|
|
|
{
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Quiet: aws.Bool(true),
|
|
|
|
},
|
|
|
|
}).Return(&s3.DeleteObjectsOutput{}, nil)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
err = store.AsTerminatableUpload(upload).Terminate(context.Background())
|
2016-01-16 15:12:37 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestTerminateWithErrors(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
2016-01-16 15:12:37 +00:00
|
|
|
|
|
|
|
// Order is not important in this situation.
|
|
|
|
// NoSuchUpload errors should be ignored
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().AbortMultipartUploadWithContext(context.Background(), &s3.AbortMultipartUploadInput{
|
2016-01-16 15:12:37 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
}).Return(nil, awserr.New("NoSuchUpload", "The specified upload does not exist.", nil))
|
|
|
|
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().DeleteObjectsWithContext(context.Background(), &s3.DeleteObjectsInput{
|
2016-01-16 15:12:37 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Delete: &s3.Delete{
|
|
|
|
Objects: []*s3.ObjectIdentifier{
|
|
|
|
{
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
},
|
2019-01-05 08:02:42 +00:00
|
|
|
{
|
|
|
|
Key: aws.String("uploadId.part"),
|
|
|
|
},
|
2016-01-16 15:12:37 +00:00
|
|
|
{
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Quiet: aws.Bool(true),
|
|
|
|
},
|
|
|
|
}).Return(&s3.DeleteObjectsOutput{
|
|
|
|
Errors: []*s3.Error{
|
|
|
|
{
|
|
|
|
Code: aws.String("hello"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
Message: aws.String("it's me."),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}, nil)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
2019-09-10 08:05:45 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
2019-09-15 11:43:59 +00:00
|
|
|
err = store.AsTerminatableUpload(upload).Terminate(context.Background())
|
2016-09-27 20:10:16 +00:00
|
|
|
assert.Equal("Multiple errors occurred:\n\tAWS S3 Error (hello) for object uploadId: it's me.\n", err.Error())
|
2016-01-16 15:12:37 +00:00
|
|
|
}
|
2016-02-03 20:18:21 +00:00
|
|
|
|
2020-04-27 10:42:56 +00:00
|
|
|
func TestConcatUploadsUsingMultipart(t *testing.T) {
|
2016-02-03 20:18:21 +00:00
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
2017-09-03 08:57:06 +00:00
|
|
|
store := New("bucket", s3obj)
|
2020-04-27 10:42:56 +00:00
|
|
|
store.MinPartSize = 100
|
2016-02-03 20:18:21 +00:00
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
// Calls from NewUpload
|
|
|
|
s3obj.EXPECT().CreateMultipartUploadWithContext(context.Background(), &s3.CreateMultipartUploadInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
Metadata: map[string]*string{},
|
|
|
|
}).Return(&s3.CreateMultipartUploadOutput{
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().PutObjectWithContext(context.Background(), &s3.PutObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
Body: bytes.NewReader([]byte(`{"ID":"uploadId+multipartId","Size":0,"SizeIsDeferred":false,"Offset":0,"MetaData":null,"IsPartial":false,"IsFinal":true,"PartialUploads":["aaa+AAA","bbb+BBB","ccc+CCC"],"Storage":{"Bucket":"bucket","Key":"uploadId","Type":"s3store"}}`)),
|
|
|
|
ContentLength: aws.Int64(int64(234)),
|
|
|
|
})
|
|
|
|
|
|
|
|
// Calls from ConcatUploads
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().UploadPartCopyWithContext(context.Background(), &s3.UploadPartCopyInput{
|
2016-02-03 20:18:21 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
CopySource: aws.String("bucket/aaa"),
|
|
|
|
PartNumber: aws.Int64(1),
|
2021-05-18 08:29:18 +00:00
|
|
|
}).Return(&s3.UploadPartCopyOutput{
|
|
|
|
CopyPartResult: &s3.CopyPartResult{
|
|
|
|
ETag: aws.String("etag-1"),
|
|
|
|
},
|
|
|
|
}, nil)
|
2016-02-03 20:18:21 +00:00
|
|
|
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().UploadPartCopyWithContext(context.Background(), &s3.UploadPartCopyInput{
|
2016-02-03 20:18:21 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
CopySource: aws.String("bucket/bbb"),
|
|
|
|
PartNumber: aws.Int64(2),
|
2021-05-18 08:29:18 +00:00
|
|
|
}).Return(&s3.UploadPartCopyOutput{
|
|
|
|
CopyPartResult: &s3.CopyPartResult{
|
|
|
|
ETag: aws.String("etag-2"),
|
|
|
|
},
|
|
|
|
}, nil)
|
2016-02-03 20:18:21 +00:00
|
|
|
|
2019-09-15 12:33:02 +00:00
|
|
|
s3obj.EXPECT().UploadPartCopyWithContext(context.Background(), &s3.UploadPartCopyInput{
|
2016-02-03 20:18:21 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
CopySource: aws.String("bucket/ccc"),
|
|
|
|
PartNumber: aws.Int64(3),
|
2021-05-18 08:29:18 +00:00
|
|
|
}).Return(&s3.UploadPartCopyOutput{
|
|
|
|
CopyPartResult: &s3.CopyPartResult{
|
|
|
|
ETag: aws.String("etag-3"),
|
|
|
|
},
|
|
|
|
}, nil)
|
2016-02-03 20:18:21 +00:00
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
// Calls from FinishUpload
|
|
|
|
s3obj.EXPECT().CompleteMultipartUploadWithContext(context.Background(), &s3.CompleteMultipartUploadInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
MultipartUpload: &s3.CompletedMultipartUpload{
|
|
|
|
Parts: []*s3.CompletedPart{
|
2016-02-03 20:18:21 +00:00
|
|
|
{
|
2021-05-18 08:29:18 +00:00
|
|
|
ETag: aws.String("etag-1"),
|
2016-02-03 20:18:21 +00:00
|
|
|
PartNumber: aws.Int64(1),
|
|
|
|
},
|
|
|
|
{
|
2021-05-18 08:29:18 +00:00
|
|
|
ETag: aws.String("etag-2"),
|
2016-02-03 20:18:21 +00:00
|
|
|
PartNumber: aws.Int64(2),
|
|
|
|
},
|
|
|
|
{
|
2021-05-18 08:29:18 +00:00
|
|
|
ETag: aws.String("etag-3"),
|
2016-02-03 20:18:21 +00:00
|
|
|
PartNumber: aws.Int64(3),
|
|
|
|
},
|
|
|
|
},
|
2021-05-18 08:29:18 +00:00
|
|
|
},
|
|
|
|
}).Return(nil, nil)
|
2016-02-03 20:18:21 +00:00
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
info := handler.FileInfo{
|
|
|
|
ID: "uploadId",
|
|
|
|
IsFinal: true,
|
|
|
|
PartialUploads: []string{
|
|
|
|
"aaa+AAA",
|
|
|
|
"bbb+BBB",
|
|
|
|
"ccc+CCC",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
upload, err := store.NewUpload(context.Background(), info)
|
2019-09-19 10:14:25 +00:00
|
|
|
assert.Nil(err)
|
|
|
|
|
|
|
|
uploadA, err := store.GetUpload(context.Background(), "aaa+AAA")
|
|
|
|
assert.Nil(err)
|
|
|
|
uploadB, err := store.GetUpload(context.Background(), "bbb+BBB")
|
|
|
|
assert.Nil(err)
|
|
|
|
uploadC, err := store.GetUpload(context.Background(), "ccc+CCC")
|
|
|
|
assert.Nil(err)
|
|
|
|
|
2020-04-27 10:42:56 +00:00
|
|
|
// All uploads have a size larger than the MinPartSize, so a S3 Multipart Upload is used for concatenation.
|
|
|
|
uploadA.(*s3Upload).info = &handler.FileInfo{Size: 500}
|
|
|
|
uploadB.(*s3Upload).info = &handler.FileInfo{Size: 500}
|
|
|
|
uploadC.(*s3Upload).info = &handler.FileInfo{Size: 500}
|
|
|
|
|
|
|
|
err = store.AsConcatableUpload(upload).ConcatUploads(context.Background(), []handler.Upload{
|
|
|
|
uploadA,
|
|
|
|
uploadB,
|
|
|
|
uploadC,
|
|
|
|
})
|
|
|
|
assert.Nil(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestConcatUploadsUsingDownload(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
|
|
|
store := New("bucket", s3obj)
|
|
|
|
store.MinPartSize = 100
|
|
|
|
|
|
|
|
gomock.InOrder(
|
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("aaa"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte("aaa"))),
|
|
|
|
}, nil),
|
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("bbb"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte("bbbb"))),
|
|
|
|
}, nil),
|
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("ccc"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte("ccccc"))),
|
|
|
|
}, nil),
|
|
|
|
s3obj.EXPECT().PutObjectWithContext(context.Background(), NewPutObjectInputMatcher(&s3.PutObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
Body: bytes.NewReader([]byte("aaabbbbccccc")),
|
|
|
|
})),
|
|
|
|
s3obj.EXPECT().AbortMultipartUploadWithContext(context.Background(), &s3.AbortMultipartUploadInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
}).Return(nil, nil),
|
|
|
|
)
|
|
|
|
|
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
|
|
|
assert.Nil(err)
|
|
|
|
|
|
|
|
uploadA, err := store.GetUpload(context.Background(), "aaa+AAA")
|
|
|
|
assert.Nil(err)
|
|
|
|
uploadB, err := store.GetUpload(context.Background(), "bbb+BBB")
|
|
|
|
assert.Nil(err)
|
|
|
|
uploadC, err := store.GetUpload(context.Background(), "ccc+CCC")
|
|
|
|
assert.Nil(err)
|
|
|
|
|
|
|
|
// All uploads have a size smaller than the MinPartSize, so the files are downloaded for concatenation.
|
|
|
|
uploadA.(*s3Upload).info = &handler.FileInfo{Size: 3}
|
|
|
|
uploadB.(*s3Upload).info = &handler.FileInfo{Size: 4}
|
|
|
|
uploadC.(*s3Upload).info = &handler.FileInfo{Size: 5}
|
|
|
|
|
2019-09-19 10:14:25 +00:00
|
|
|
err = store.AsConcatableUpload(upload).ConcatUploads(context.Background(), []handler.Upload{
|
|
|
|
uploadA,
|
|
|
|
uploadB,
|
|
|
|
uploadC,
|
2016-02-03 20:18:21 +00:00
|
|
|
})
|
|
|
|
assert.Nil(err)
|
2020-04-27 10:42:56 +00:00
|
|
|
|
|
|
|
// Wait a short delay until the call to AbortMultipartUploadWithContext also occurs.
|
|
|
|
<-time.After(10 * time.Millisecond)
|
2016-02-03 20:18:21 +00:00
|
|
|
}
|
2020-07-29 13:24:46 +00:00
|
|
|
|
|
|
|
type s3APIWithTempFileAssertion struct {
|
|
|
|
*MockS3API
|
|
|
|
assert *assert.Assertions
|
|
|
|
tempDir string
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s s3APIWithTempFileAssertion) UploadPartWithContext(context.Context, *s3.UploadPartInput, ...request.Option) (*s3.UploadPartOutput, error) {
|
|
|
|
assert := s.assert
|
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
// Make sure that there are temporary files from tusd in here.
|
2020-07-29 13:24:46 +00:00
|
|
|
files, err := ioutil.ReadDir(s.tempDir)
|
|
|
|
assert.Nil(err)
|
|
|
|
for _, file := range files {
|
|
|
|
assert.True(strings.HasPrefix(file.Name(), "tusd-s3-tmp-"))
|
|
|
|
}
|
2021-10-25 08:28:51 +00:00
|
|
|
|
2021-10-23 21:28:33 +00:00
|
|
|
assert.GreaterOrEqual(len(files), 1)
|
|
|
|
assert.LessOrEqual(len(files), 3)
|
2020-07-29 13:24:46 +00:00
|
|
|
|
|
|
|
return nil, fmt.Errorf("not now")
|
|
|
|
}
|
|
|
|
|
|
|
|
// This test ensures that the S3Store will cleanup all files that it creates during
|
|
|
|
// a call to WriteChunk, even if an error occurs during that invocation.
|
|
|
|
// Here, we provide 14 bytes to WriteChunk and since the PartSize is set to 10,
|
|
|
|
// it will split the input into two parts (10 bytes and 4 bytes).
|
|
|
|
// Inside the first call to UploadPartWithContext, we assert that the temporary files
|
|
|
|
// for both parts have been created and we return an error.
|
|
|
|
// In the end, we assert that the error bubbled up and that all temporary files have
|
|
|
|
// been cleaned up.
|
|
|
|
func TestWriteChunkCleansUpTempFiles(t *testing.T) {
|
|
|
|
mockCtrl := gomock.NewController(t)
|
|
|
|
defer mockCtrl.Finish()
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
// Create a temporary directory, so no files get mixed in.
|
|
|
|
tempDir, err := ioutil.TempDir("", "tusd-s3-cleanup-tests-")
|
|
|
|
assert.Nil(err)
|
|
|
|
|
|
|
|
s3obj := NewMockS3API(mockCtrl)
|
|
|
|
s3api := s3APIWithTempFileAssertion{
|
|
|
|
MockS3API: s3obj,
|
|
|
|
assert: assert,
|
|
|
|
tempDir: tempDir,
|
|
|
|
}
|
|
|
|
store := New("bucket", s3api)
|
|
|
|
store.MaxPartSize = 10
|
|
|
|
store.MinPartSize = 10
|
2020-08-01 12:58:31 +00:00
|
|
|
store.PreferredPartSize = 10
|
2020-07-29 13:24:46 +00:00
|
|
|
store.MaxMultipartParts = 10000
|
|
|
|
store.MaxObjectSize = 5 * 1024 * 1024 * 1024 * 1024
|
|
|
|
store.TemporaryDirectory = tempDir
|
|
|
|
|
|
|
|
// The usual S3 calls for retrieving the upload
|
|
|
|
s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.info"),
|
|
|
|
}).Return(&s3.GetObjectOutput{
|
2021-05-18 08:29:18 +00:00
|
|
|
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"ID":"uploadId","Size":14,"Offset":0,"MetaData":null,"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":null}`))),
|
2020-07-29 13:24:46 +00:00
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{
|
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId"),
|
|
|
|
UploadId: aws.String("multipartId"),
|
|
|
|
PartNumberMarker: aws.Int64(0),
|
|
|
|
}).Return(&s3.ListPartsOutput{
|
2021-05-18 08:29:18 +00:00
|
|
|
Parts: []*s3.Part{},
|
|
|
|
}, nil)
|
|
|
|
s3obj.EXPECT().HeadObjectWithContext(context.Background(), &s3.HeadObjectInput{
|
2020-07-29 13:24:46 +00:00
|
|
|
Bucket: aws.String("bucket"),
|
|
|
|
Key: aws.String("uploadId.part"),
|
2021-05-18 08:29:18 +00:00
|
|
|
}).Return(nil, awserr.New("NoSuchKey", "Not found", nil))
|
2020-07-29 13:24:46 +00:00
|
|
|
|
|
|
|
// No calls to s3obj.EXPECT().UploadPartWithContext since that is handled by s3APIWithTempFileAssertion
|
|
|
|
|
|
|
|
upload, err := store.GetUpload(context.Background(), "uploadId+multipartId")
|
|
|
|
assert.Nil(err)
|
|
|
|
|
2021-05-18 08:29:18 +00:00
|
|
|
bytesRead, err := upload.WriteChunk(context.Background(), 0, bytes.NewReader([]byte("1234567890ABCD")))
|
2020-07-29 13:24:46 +00:00
|
|
|
assert.NotNil(err)
|
|
|
|
assert.Equal(err.Error(), "not now")
|
|
|
|
assert.Equal(int64(0), bytesRead)
|
|
|
|
|
|
|
|
files, err := ioutil.ReadDir(tempDir)
|
|
|
|
assert.Nil(err)
|
|
|
|
assert.Equal(len(files), 0)
|
|
|
|
}
|