diff --git a/s3store/s3store_test.go b/s3store/s3store_test.go index 969b7f7..5541944 100644 --- a/s3store/s3store_test.go +++ b/s3store/s3store_test.go @@ -214,7 +214,6 @@ func TestNewUpload(t *testing.T) { assert.Equal("uploadId+multipartId", id) } -/* func TestNewUploadLargerMaxObjectSize(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() @@ -226,42 +225,16 @@ func TestNewUploadLargerMaxObjectSize(t *testing.T) { assert.Equal("bucket", store.Bucket) assert.Equal(s3obj, store.Service) - s1 := "hello" - s2 := "men?" - - gomock.InOrder( - s3obj.EXPECT().CreateMultipartUpload(&s3.CreateMultipartUploadInput{ - Bucket: aws.String("bucket"), - Key: aws.String("uploadId"), - Metadata: map[string]*string{ - "foo": &s1, - "bar": &s2, - }, - }).Return(&s3.CreateMultipartUploadOutput{ - UploadId: aws.String("multipartId"), - }, nil), - s3obj.EXPECT().PutObject(&s3.PutObjectInput{ - Bucket: aws.String("bucket"), - Key: aws.String("uploadId.info"), - Body: bytes.NewReader([]byte(`{"ID":"uploadId+multipartId","Size":500,"Offset":0,"MetaData":{"bar":"menĂ¼","foo":"hello"},"IsPartial":false,"IsFinal":false,"PartialUploads":null}`)), - ContentLength: aws.Int64(int64(148)), - }), - ) - info := tusd.FileInfo{ ID: "uploadId", - Size: 500, - MetaData: map[string]string{ - "foo": "hello", - "bar": "menĂ¼", - }, + Size: store.MaxObjectSize + 1, } id, err := store.NewUpload(info) - assert.Nil(err) - assert.Equal("uploadId+multipartId", id) + assert.NotNil(err) + assert.EqualError(err, fmt.Sprintf("s3store: upload size of %v bytes exceeds MaxObjectSize of %v bytes", info.Size, store.MaxObjectSize)) + assert.Equal("", id) } -*/ func TestGetInfoNotFound(t *testing.T) { mockCtrl := gomock.NewController(t)