diff --git a/cmd/tusd/cli/composer.go b/cmd/tusd/cli/composer.go index 356cf8f..ecc87a1 100644 --- a/cmd/tusd/cli/composer.go +++ b/cmd/tusd/cli/composer.go @@ -42,13 +42,11 @@ func CreateComposer() { } if Flags.S3Endpoint == "" { - if Flags.S3TransferAcceleration { stdout.Printf("Using 's3://%s' as S3 bucket for storage with AWS S3 Transfer Acceleration enabled.\n", Flags.S3Bucket) } else { stdout.Printf("Using 's3://%s' as S3 bucket for storage.\n", Flags.S3Bucket) } - } else { stdout.Printf("Using '%s/%s' as S3 endpoint and bucket for storage.\n", Flags.S3Endpoint, Flags.S3Bucket) @@ -61,6 +59,7 @@ func CreateComposer() { store.ObjectPrefix = Flags.S3ObjectPrefix store.PreferredPartSize = Flags.S3PartSize store.DisableContentHashes = Flags.S3DisableContentHashes + store.SetConcurrentPartUploads(Flags.S3ConcurrentPartUploads) store.UseIn(Composer) locker := memorylocker.New() diff --git a/pkg/s3store/s3store.go b/pkg/s3store/s3store.go index da4944c..a44b59a 100644 --- a/pkg/s3store/s3store.go +++ b/pkg/s3store/s3store.go @@ -198,8 +198,8 @@ func New(bucket string, service S3API) S3Store { } } -// S3ConcurrentPartUploads changes the limit on how many concurrent part uploads to S3 are allowed. -func (store *S3Store) S3ConcurrentPartUploads(limit int) { +// SetConcurrentPartUploads changes the limit on how many concurrent part uploads to S3 are allowed. +func (store *S3Store) SetConcurrentPartUploads(limit int) { store.uploadSemaphore = semaphore.New(limit) }