diff --git a/pkg/s3store/s3store_test.go b/pkg/s3store/s3store_test.go index 1c866d5..88ec09a 100644 --- a/pkg/s3store/s3store_test.go +++ b/pkg/s3store/s3store_test.go @@ -4,7 +4,6 @@ import ( "bytes" "context" "fmt" - "io" "io/ioutil" "strings" "testing" @@ -764,72 +763,6 @@ func TestWriteChunk(t *testing.T) { assert.Equal(int64(14), bytesRead) } -// TestWriteChunkWithUnexpectedEOF ensures that WriteChunk does not error out -// if the io.Reader returns an io.ErrUnexpectedEOF. This happens when a HTTP -// PATCH request gets interrupted. -func TestWriteChunkWithUnexpectedEOF(t *testing.T) { - mockCtrl := gomock.NewController(t) - defer mockCtrl.Finish() - assert := assert.New(t) - - s3obj := NewMockS3API(mockCtrl) - store := New("bucket", s3obj) - store.MaxPartSize = 500 - store.MinPartSize = 100 - store.PreferredPartSize = 100 - store.MaxMultipartParts = 10000 - store.MaxObjectSize = 5 * 1024 * 1024 * 1024 * 1024 - - s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{ - Bucket: aws.String("bucket"), - Key: aws.String("uploadId.info"), - }).Return(&s3.GetObjectOutput{ - Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"ID":"uploadId","Size":500,"Offset":0,"MetaData":null,"IsPartial":false,"IsFinal":false,"PartialUploads":null,"Storage":null}`))), - }, nil) - s3obj.EXPECT().ListPartsWithContext(context.Background(), &s3.ListPartsInput{ - Bucket: aws.String("bucket"), - Key: aws.String("uploadId"), - UploadId: aws.String("multipartId"), - PartNumberMarker: aws.Int64(0), - }).Return(&s3.ListPartsOutput{ - Parts: []*s3.Part{ - { - Size: aws.Int64(100), - }, - { - Size: aws.Int64(200), - }, - }, - }, nil).Times(2) - s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{ - Bucket: aws.String("bucket"), - Key: aws.String("uploadId.part"), - }).Return(&s3.GetObjectOutput{}, awserr.New("NoSuchKey", "Not found", nil)) - s3obj.EXPECT().GetObjectWithContext(context.Background(), &s3.GetObjectInput{ - Bucket: aws.String("bucket"), - Key: aws.String("uploadId.part"), - }).Return(&s3.GetObjectOutput{}, awserr.New("NoSuchKey", "The specified key does not exist.", nil)) - s3obj.EXPECT().PutObjectWithContext(context.Background(), NewPutObjectInputMatcher(&s3.PutObjectInput{ - Bucket: aws.String("bucket"), - Key: aws.String("uploadId.part"), - Body: bytes.NewReader([]byte("1234567890ABCD")), - })).Return(nil, nil) - - reader, writer := io.Pipe() - - go func() { - writer.Write([]byte("1234567890ABCD")) - writer.CloseWithError(io.ErrUnexpectedEOF) - }() - - upload, err := store.GetUpload(context.Background(), "uploadId+multipartId") - assert.Nil(err) - - bytesRead, err := upload.WriteChunk(context.Background(), 300, reader) - assert.Nil(err) - assert.Equal(int64(14), bytesRead) -} - func TestWriteChunkWriteIncompletePartBecauseTooSmall(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish()