Merge pull request #182 from acj/acj/add-upload-defer-length-support

Implement Upload-Defer-Length Extension
This commit is contained in:
Marius 2018-06-04 20:24:15 +02:00 committed by GitHub
commit d9d0f7c4e7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 418 additions and 40 deletions

View File

@ -16,6 +16,8 @@ type StoreComposer struct {
GetReader GetReaderDataStore GetReader GetReaderDataStore
UsesConcater bool UsesConcater bool
Concater ConcaterDataStore Concater ConcaterDataStore
UsesLengthDeferrer bool
LengthDeferrer LengthDeferrerDataStore
} }
// NewStoreComposer creates a new and empty store composer. // NewStoreComposer creates a new and empty store composer.
@ -45,6 +47,9 @@ func newStoreComposerFromDataStore(store DataStore) *StoreComposer {
if mod, ok := store.(ConcaterDataStore); ok { if mod, ok := store.(ConcaterDataStore); ok {
composer.UseConcater(mod) composer.UseConcater(mod)
} }
if mod, ok := store.(LengthDeferrerDataStore); ok {
composer.UseLengthDeferrer(mod)
}
return composer return composer
} }
@ -90,6 +95,12 @@ func (store *StoreComposer) Capabilities() string {
} else { } else {
str += "✗" str += "✗"
} }
str += ` LengthDeferrer: `
if store.UsesLengthDeferrer {
str += "✓"
} else {
str += "✗"
}
return str return str
} }
@ -120,3 +131,8 @@ func (store *StoreComposer) UseConcater(ext ConcaterDataStore) {
store.UsesConcater = ext != nil store.UsesConcater = ext != nil
store.Concater = ext store.Concater = ext
} }
func (store *StoreComposer) UseLengthDeferrer(ext LengthDeferrerDataStore) {
store.UsesLengthDeferrer = ext != nil
store.LengthDeferrer = ext
}

View File

@ -31,6 +31,7 @@ type StoreComposer struct {
USE_FIELD(Locker) USE_FIELD(Locker)
USE_FIELD(GetReader) USE_FIELD(GetReader)
USE_FIELD(Concater) USE_FIELD(Concater)
USE_FIELD(LengthDeferrer)
} }
// NewStoreComposer creates a new and empty store composer. // NewStoreComposer creates a new and empty store composer.
@ -50,6 +51,7 @@ func newStoreComposerFromDataStore(store DataStore) *StoreComposer {
USE_FROM(Locker) USE_FROM(Locker)
USE_FROM(GetReader) USE_FROM(GetReader)
USE_FROM(Concater) USE_FROM(Concater)
USE_FROM(LengthDeferrer)
return composer return composer
} }
@ -70,6 +72,7 @@ func (store *StoreComposer) Capabilities() string {
USE_CAP(Locker) USE_CAP(Locker)
USE_CAP(GetReader) USE_CAP(GetReader)
USE_CAP(Concater) USE_CAP(Concater)
USE_CAP(LengthDeferrer)
return str return str
} }
@ -85,3 +88,4 @@ USE_FUNC(Finisher)
USE_FUNC(Locker) USE_FUNC(Locker)
USE_FUNC(GetReader) USE_FUNC(GetReader)
USE_FUNC(Concater) USE_FUNC(Concater)
USE_FUNC(LengthDeferrer)

View File

@ -10,6 +10,8 @@ type FileInfo struct {
ID string ID string
// Total file size in bytes specified in the NewUpload call // Total file size in bytes specified in the NewUpload call
Size int64 Size int64
// Indicates whether the total file size is deferred until later
SizeIsDeferred bool
// Offset in bytes (zero-based) // Offset in bytes (zero-based)
Offset int64 Offset int64
MetaData MetaData MetaData MetaData
@ -112,3 +114,11 @@ type ConcaterDataStore interface {
// must be respected during concatenation. // must be respected during concatenation.
ConcatUploads(destination string, partialUploads []string) error ConcatUploads(destination string, partialUploads []string) error
} }
// LengthDeferrerDataStore is the interface that must be implemented if the
// creation-defer-length extension should be enabled. The extension enables a
// client to upload files when their total size is not yet known. Instead, the
// client must send the total size as soon as it becomes known.
type LengthDeferrerDataStore interface {
DeclareLength(id string, length int64) error
}

View File

@ -55,6 +55,7 @@ func (store FileStore) UseIn(composer *tusd.StoreComposer) {
composer.UseTerminater(store) composer.UseTerminater(store)
composer.UseLocker(store) composer.UseLocker(store)
composer.UseConcater(store) composer.UseConcater(store)
composer.UseLengthDeferrer(store)
} }
func (store FileStore) NewUpload(info tusd.FileInfo) (id string, err error) { func (store FileStore) NewUpload(info tusd.FileInfo) (id string, err error) {
@ -142,6 +143,16 @@ func (store FileStore) ConcatUploads(dest string, uploads []string) (err error)
return return
} }
func (store FileStore) DeclareLength(id string, length int64) error {
info, err := store.GetInfo(id)
if err != nil {
return err
}
info.Size = length
info.SizeIsDeferred = false
return store.writeInfo(id, info)
}
func (store FileStore) LockUpload(id string) error { func (store FileStore) LockUpload(id string) error {
lock, err := store.newLock(id) lock, err := store.newLock(id)
if err != nil { if err != nil {

View File

@ -18,6 +18,7 @@ var _ tusd.GetReaderDataStore = FileStore{}
var _ tusd.TerminaterDataStore = FileStore{} var _ tusd.TerminaterDataStore = FileStore{}
var _ tusd.LockerDataStore = FileStore{} var _ tusd.LockerDataStore = FileStore{}
var _ tusd.ConcaterDataStore = FileStore{} var _ tusd.ConcaterDataStore = FileStore{}
var _ tusd.LengthDeferrerDataStore = FileStore{}
func TestFilestore(t *testing.T) { func TestFilestore(t *testing.T) {
a := assert.New(t) a := assert.New(t)
@ -146,3 +147,28 @@ func TestConcatUploads(t *testing.T) {
a.Equal("abcdefghi", string(content)) a.Equal("abcdefghi", string(content))
reader.(io.Closer).Close() reader.(io.Closer).Close()
} }
func TestDeclareLength(t *testing.T) {
a := assert.New(t)
tmp, err := ioutil.TempDir("", "tusd-filestore-declare-length-")
a.NoError(err)
store := FileStore{tmp}
originalInfo := tusd.FileInfo{Size: 0, SizeIsDeferred: true}
id, err := store.NewUpload(originalInfo)
a.NoError(err)
info, err := store.GetInfo(id)
a.Equal(info.Size, originalInfo.Size)
a.Equal(info.SizeIsDeferred, originalInfo.SizeIsDeferred)
size := int64(100)
err = store.DeclareLength(id, size)
a.NoError(err)
updatedInfo, err := store.GetInfo(id)
a.Equal(updatedInfo.Size, size)
a.False(updatedInfo.SizeIsDeferred)
}

View File

@ -4,10 +4,9 @@
package tusd_test package tusd_test
import ( import (
io "io"
gomock "github.com/golang/mock/gomock" gomock "github.com/golang/mock/gomock"
tusd "github.com/tus/tusd" tusd "github.com/tus/tusd"
io "io"
) )
// Mock of FullDataStore interface // Mock of FullDataStore interface
@ -105,6 +104,16 @@ func (_mr *_MockFullDataStoreRecorder) FinishUpload(arg0 interface{}) *gomock.Ca
return _mr.mock.ctrl.RecordCall(_mr.mock, "FinishUpload", arg0) return _mr.mock.ctrl.RecordCall(_mr.mock, "FinishUpload", arg0)
} }
func (_m *MockFullDataStore) DeclareLength(id string, length int64) error {
ret := _m.ctrl.Call(_m, "DeclareLength", id, length)
ret0, _ := ret[0].(error)
return ret0
}
func (_mr *_MockFullDataStoreRecorder) DeclareLength(arg0, arg1 interface{}) *gomock.Call {
return _mr.mock.ctrl.RecordCall(_mr.mock, "DeclareLength", arg0, arg1)
}
// Mock of Locker interface // Mock of Locker interface
type MockLocker struct { type MockLocker struct {
ctrl *gomock.Controller ctrl *gomock.Controller

View File

@ -81,4 +81,52 @@ func TestHead(t *testing.T) {
t.Errorf("Expected empty body for failed HEAD request") t.Errorf("Expected empty body for failed HEAD request")
} }
}) })
SubTest(t, "DeferLengthHeader", func(t *testing.T, store *MockFullDataStore) {
store.EXPECT().GetInfo("yes").Return(FileInfo{
SizeIsDeferred: true,
Size: 0,
}, nil)
handler, _ := NewHandler(Config{
DataStore: store,
})
(&httpTest{
Method: "HEAD",
URL: "yes",
ReqHeader: map[string]string{
"Tus-Resumable": "1.0.0",
},
Code: http.StatusOK,
ResHeader: map[string]string{
"Upload-Defer-Length": "1",
},
}).Run(handler, t)
})
SubTest(t, "NoDeferLengthHeader", func(t *testing.T, store *MockFullDataStore) {
gomock.InOrder(
store.EXPECT().GetInfo("yes").Return(FileInfo{
SizeIsDeferred: false,
Size: 10,
}, nil),
)
handler, _ := NewHandler(Config{
DataStore: store,
})
(&httpTest{
Method: "HEAD",
URL: "yes",
ReqHeader: map[string]string{
"Tus-Resumable": "1.0.0",
},
Code: http.StatusOK,
ResHeader: map[string]string{
"Upload-Defer-Length": "",
},
}).Run(handler, t)
})
} }

View File

@ -257,6 +257,132 @@ func TestPatch(t *testing.T) {
}).Run(handler, t) }).Run(handler, t)
}) })
SubTest(t, "DeclareLengthOnFinalChunk", func(t *testing.T, store *MockFullDataStore) {
gomock.InOrder(
store.EXPECT().GetInfo("yes").Return(FileInfo{
ID: "yes",
Offset: 5,
Size: 0,
SizeIsDeferred: true,
}, nil),
store.EXPECT().DeclareLength("yes", int64(20)),
store.EXPECT().WriteChunk("yes", int64(5), NewReaderMatcher("hellothisismore")).Return(int64(15), nil),
store.EXPECT().FinishUpload("yes"),
)
handler, _ := NewHandler(Config{
DataStore: store,
MaxSize: 20,
})
body := strings.NewReader("hellothisismore")
(&httpTest{
Method: "PATCH",
URL: "yes",
ReqHeader: map[string]string{
"Tus-Resumable": "1.0.0",
"Content-Type": "application/offset+octet-stream",
"Upload-Offset": "5",
"Upload-Length": "20",
},
ReqBody: body,
Code: http.StatusNoContent,
ResHeader: map[string]string{
"Upload-Offset": "20",
},
}).Run(handler, t)
})
SubTest(t, "DeclareLengthAfterFinalChunk", func(t *testing.T, store *MockFullDataStore) {
gomock.InOrder(
store.EXPECT().GetInfo("yes").Return(FileInfo{
ID: "yes",
Offset: 20,
Size: 0,
SizeIsDeferred: true,
}, nil),
store.EXPECT().DeclareLength("yes", int64(20)),
store.EXPECT().FinishUpload("yes"),
)
handler, _ := NewHandler(Config{
DataStore: store,
MaxSize: 20,
})
(&httpTest{
Method: "PATCH",
URL: "yes",
ReqHeader: map[string]string{
"Tus-Resumable": "1.0.0",
"Content-Type": "application/offset+octet-stream",
"Upload-Offset": "20",
"Upload-Length": "20",
},
ReqBody: nil,
Code: http.StatusNoContent,
ResHeader: map[string]string{},
}).Run(handler, t)
})
SubTest(t, "DeclareLengthOnNonFinalChunk", func(t *testing.T, store *MockFullDataStore) {
gomock.InOrder(
store.EXPECT().GetInfo("yes").Return(FileInfo{
ID: "yes",
Offset: 5,
Size: 0,
SizeIsDeferred: true,
}, nil),
store.EXPECT().DeclareLength("yes", int64(20)),
store.EXPECT().WriteChunk("yes", int64(5), NewReaderMatcher("hello")).Return(int64(5), nil),
store.EXPECT().GetInfo("yes").Return(FileInfo{
ID: "yes",
Offset: 10,
Size: 20,
SizeIsDeferred: false,
}, nil),
store.EXPECT().WriteChunk("yes", int64(10), NewReaderMatcher("thisismore")).Return(int64(10), nil),
store.EXPECT().FinishUpload("yes"),
)
handler, _ := NewHandler(Config{
DataStore: store,
MaxSize: 20,
})
(&httpTest{
Method: "PATCH",
URL: "yes",
ReqHeader: map[string]string{
"Tus-Resumable": "1.0.0",
"Content-Type": "application/offset+octet-stream",
"Upload-Offset": "5",
"Upload-Length": "20",
},
ReqBody: strings.NewReader("hello"),
Code: http.StatusNoContent,
ResHeader: map[string]string{
"Upload-Offset": "10",
},
}).Run(handler, t)
(&httpTest{
Method: "PATCH",
URL: "yes",
ReqHeader: map[string]string{
"Tus-Resumable": "1.0.0",
"Content-Type": "application/offset+octet-stream",
"Upload-Offset": "10",
},
ReqBody: strings.NewReader("thisismore"),
Code: http.StatusNoContent,
ResHeader: map[string]string{
"Upload-Offset": "20",
},
}).Run(handler, t)
})
SubTest(t, "Locker", func(t *testing.T, store *MockFullDataStore) { SubTest(t, "Locker", func(t *testing.T, store *MockFullDataStore) {
ctrl := gomock.NewController(t) ctrl := gomock.NewController(t)
defer ctrl.Finish() defer ctrl.Finish()

View File

@ -123,6 +123,54 @@ func TestPost(t *testing.T) {
}).Run(handler, t) }).Run(handler, t)
}) })
SubTest(t, "UploadLengthAndUploadDeferLengthFail", func(t *testing.T, store *MockFullDataStore) {
handler, _ := NewHandler(Config{
DataStore: store,
})
(&httpTest{
Method: "POST",
URL: "",
ReqHeader: map[string]string{
"Tus-Resumable": "1.0.0",
"Upload-Length": "10",
"Upload-Defer-Length": "1",
},
Code: http.StatusBadRequest,
}).Run(handler, t)
})
SubTest(t, "NeitherUploadLengthNorUploadDeferLengthFail", func(t *testing.T, store *MockFullDataStore) {
handler, _ := NewHandler(Config{
DataStore: store,
})
(&httpTest{
Method: "POST",
URL: "",
ReqHeader: map[string]string{
"Tus-Resumable": "1.0.0",
},
Code: http.StatusBadRequest,
}).Run(handler, t)
})
SubTest(t, "InvalidUploadDeferLengthFail", func(t *testing.T, store *MockFullDataStore) {
handler, _ := NewHandler(Config{
DataStore: store,
})
(&httpTest{
Method: "POST",
URL: "",
ReqHeader: map[string]string{
"Tus-Resumable": "1.0.0",
"Upload-Defer-Length": "bad",
},
Code: http.StatusBadRequest,
}).Run(handler, t)
})
SubTest(t, "ForwardHeaders", func(t *testing.T, store *MockFullDataStore) { SubTest(t, "ForwardHeaders", func(t *testing.T, store *MockFullDataStore) {
SubTest(t, "IgnoreXForwarded", func(t *testing.T, store *MockFullDataStore) { SubTest(t, "IgnoreXForwarded", func(t *testing.T, store *MockFullDataStore) {
store.EXPECT().NewUpload(FileInfo{ store.EXPECT().NewUpload(FileInfo{

View File

@ -271,6 +271,7 @@ func (store S3Store) WriteChunk(id string, offset int64, src io.Reader) (int64,
return bytesUploaded, nil return bytesUploaded, nil
} }
if !info.SizeIsDeferred {
if (size - offset) <= optimalPartSize { if (size - offset) <= optimalPartSize {
if (size - offset) != n { if (size - offset) != n {
return bytesUploaded, nil return bytesUploaded, nil
@ -278,6 +279,7 @@ func (store S3Store) WriteChunk(id string, offset int64, src io.Reader) (int64,
} else if n < optimalPartSize { } else if n < optimalPartSize {
return bytesUploaded, nil return bytesUploaded, nil
} }
}
// Seek to the beginning of the file // Seek to the beginning of the file
file.Seek(0, 0) file.Seek(0, 0)

View File

@ -52,8 +52,8 @@ func TestNewUpload(t *testing.T) {
s3obj.EXPECT().PutObject(&s3.PutObjectInput{ s3obj.EXPECT().PutObject(&s3.PutObjectInput{
Bucket: aws.String("bucket"), Bucket: aws.String("bucket"),
Key: aws.String("uploadId.info"), Key: aws.String("uploadId.info"),
Body: bytes.NewReader([]byte(`{"ID":"uploadId+multipartId","Size":500,"Offset":0,"MetaData":{"bar":"menü","foo":"hello"},"IsPartial":false,"IsFinal":false,"PartialUploads":null}`)), Body: bytes.NewReader([]byte(`{"ID":"uploadId+multipartId","Size":500,"SizeIsDeferred":false,"Offset":0,"MetaData":{"bar":"menü","foo":"hello"},"IsPartial":false,"IsFinal":false,"PartialUploads":null}`)),
ContentLength: aws.Int64(int64(148)), ContentLength: aws.Int64(int64(171)),
}), }),
) )

View File

@ -5,6 +5,7 @@ import (
"errors" "errors"
"io" "io"
"log" "log"
"math"
"net" "net"
"net/http" "net/http"
"os" "os"
@ -15,6 +16,8 @@ import (
"time" "time"
) )
const UploadLengthDeferred = "1"
var ( var (
reExtractFileID = regexp.MustCompile(`([^/]+)\/?$`) reExtractFileID = regexp.MustCompile(`([^/]+)\/?$`)
reForwardedHost = regexp.MustCompile(`host=([^,]+)`) reForwardedHost = regexp.MustCompile(`host=([^,]+)`)
@ -60,6 +63,8 @@ var (
ErrUploadNotFinished = NewHTTPError(errors.New("one of the partial uploads is not finished"), http.StatusBadRequest) ErrUploadNotFinished = NewHTTPError(errors.New("one of the partial uploads is not finished"), http.StatusBadRequest)
ErrInvalidConcat = NewHTTPError(errors.New("invalid Upload-Concat header"), http.StatusBadRequest) ErrInvalidConcat = NewHTTPError(errors.New("invalid Upload-Concat header"), http.StatusBadRequest)
ErrModifyFinal = NewHTTPError(errors.New("modifying a final upload is not allowed"), http.StatusForbidden) ErrModifyFinal = NewHTTPError(errors.New("modifying a final upload is not allowed"), http.StatusForbidden)
ErrUploadLengthAndUploadDeferLength = NewHTTPError(errors.New("provided both Upload-Length and Upload-Defer-Length"), http.StatusBadRequest)
ErrInvalidUploadDeferLength = NewHTTPError(errors.New("invalid Upload-Defer-Length header"), http.StatusBadRequest)
) )
// UnroutedHandler exposes methods to handle requests as part of the tus protocol, // UnroutedHandler exposes methods to handle requests as part of the tus protocol,
@ -122,6 +127,9 @@ func NewUnroutedHandler(config Config) (*UnroutedHandler, error) {
if config.StoreComposer.UsesConcater { if config.StoreComposer.UsesConcater {
extensions += ",concatenation" extensions += ",concatenation"
} }
if config.StoreComposer.UsesLengthDeferrer {
extensions += ",creation-defer-length"
}
handler := &UnroutedHandler{ handler := &UnroutedHandler{
config: config, config: config,
@ -241,6 +249,7 @@ func (handler *UnroutedHandler) PostFile(w http.ResponseWriter, r *http.Request)
// uploads the size is sum of all sizes of these files (no need for // uploads the size is sum of all sizes of these files (no need for
// Upload-Length header) // Upload-Length header)
var size int64 var size int64
var sizeIsDeferred bool
if isFinal { if isFinal {
// A final upload must not contain a chunk within the creation request // A final upload must not contain a chunk within the creation request
if containsChunk { if containsChunk {
@ -254,9 +263,11 @@ func (handler *UnroutedHandler) PostFile(w http.ResponseWriter, r *http.Request)
return return
} }
} else { } else {
size, err = strconv.ParseInt(r.Header.Get("Upload-Length"), 10, 64) uploadLengthHeader := r.Header.Get("Upload-Length")
if err != nil || size < 0 { uploadDeferLengthHeader := r.Header.Get("Upload-Defer-Length")
handler.sendError(w, r, ErrInvalidUploadLength) size, sizeIsDeferred, err = handler.validateNewUploadLengthHeaders(uploadLengthHeader, uploadDeferLengthHeader)
if err != nil {
handler.sendError(w, r, err)
return return
} }
} }
@ -272,6 +283,7 @@ func (handler *UnroutedHandler) PostFile(w http.ResponseWriter, r *http.Request)
info := FileInfo{ info := FileInfo{
Size: size, Size: size,
SizeIsDeferred: sizeIsDeferred,
MetaData: meta, MetaData: meta,
IsPartial: isPartial, IsPartial: isPartial,
IsFinal: isFinal, IsFinal: isFinal,
@ -325,7 +337,7 @@ func (handler *UnroutedHandler) PostFile(w http.ResponseWriter, r *http.Request)
handler.sendError(w, r, err) handler.sendError(w, r, err)
return return
} }
} else if size == 0 { } else if !sizeIsDeferred && size == 0 {
// Directly finish the upload if the upload is empty (i.e. has a size of 0). // Directly finish the upload if the upload is empty (i.e. has a size of 0).
// This statement is in an else-if block to avoid causing duplicate calls // This statement is in an else-if block to avoid causing duplicate calls
// to finishUploadIfComplete if an upload is empty and contains a chunk. // to finishUploadIfComplete if an upload is empty and contains a chunk.
@ -380,8 +392,13 @@ func (handler *UnroutedHandler) HeadFile(w http.ResponseWriter, r *http.Request)
w.Header().Set("Upload-Metadata", SerializeMetadataHeader(info.MetaData)) w.Header().Set("Upload-Metadata", SerializeMetadataHeader(info.MetaData))
} }
w.Header().Set("Cache-Control", "no-store") if info.SizeIsDeferred {
w.Header().Set("Upload-Defer-Length", UploadLengthDeferred)
} else {
w.Header().Set("Upload-Length", strconv.FormatInt(info.Size, 10)) w.Header().Set("Upload-Length", strconv.FormatInt(info.Size, 10))
}
w.Header().Set("Cache-Control", "no-store")
w.Header().Set("Upload-Offset", strconv.FormatInt(info.Offset, 10)) w.Header().Set("Upload-Offset", strconv.FormatInt(info.Offset, 10))
handler.sendResp(w, r, http.StatusOK) handler.sendResp(w, r, http.StatusOK)
} }
@ -437,12 +454,35 @@ func (handler *UnroutedHandler) PatchFile(w http.ResponseWriter, r *http.Request
} }
// Do not proxy the call to the data store if the upload is already completed // Do not proxy the call to the data store if the upload is already completed
if info.Offset == info.Size { if !info.SizeIsDeferred && info.Offset == info.Size {
w.Header().Set("Upload-Offset", strconv.FormatInt(offset, 10)) w.Header().Set("Upload-Offset", strconv.FormatInt(offset, 10))
handler.sendResp(w, r, http.StatusNoContent) handler.sendResp(w, r, http.StatusNoContent)
return return
} }
if r.Header.Get("Upload-Length") != "" {
if handler.composer.UsesLengthDeferrer {
if info.SizeIsDeferred {
uploadLength, err := strconv.ParseInt(r.Header.Get("Upload-Length"), 10, 64)
if err != nil || uploadLength < 0 || uploadLength < info.Offset || uploadLength > handler.config.MaxSize {
handler.sendError(w, r, ErrInvalidUploadLength)
return
}
info.Size = uploadLength
info.SizeIsDeferred = false
if err := handler.composer.LengthDeferrer.DeclareLength(id, info.Size); err != nil {
handler.sendError(w, r, err)
return
}
} else {
handler.sendError(w, r, ErrInvalidUploadLength)
}
} else {
handler.sendError(w, r, ErrNotImplemented)
}
}
if err := handler.writeChunk(id, info, w, r); err != nil { if err := handler.writeChunk(id, info, w, r); err != nil {
handler.sendError(w, r, err) handler.sendError(w, r, err)
return return
@ -460,11 +500,23 @@ func (handler *UnroutedHandler) writeChunk(id string, info FileInfo, w http.Resp
offset := info.Offset offset := info.Offset
// Test if this upload fits into the file's size // Test if this upload fits into the file's size
if offset+length > info.Size { if !info.SizeIsDeferred && offset+length > info.Size {
return ErrSizeExceeded return ErrSizeExceeded
} }
maxSize := info.Size - offset maxSize := info.Size - offset
// If the upload's length is deferred and the PATCH request does not contain the Content-Length
// header (which is allowed if 'Transfer-Encoding: chunked' is used), we still need to set limits for
// the body size.
if info.SizeIsDeferred {
if handler.config.MaxSize > 0 {
// Ensure that the upload does not exceed the maximum upload size
maxSize = handler.config.MaxSize - offset
} else {
// If no upload limit is given, we allow arbitrary sizes
maxSize = math.MaxInt64
}
}
if length > 0 { if length > 0 {
maxSize = length maxSize = length
} }
@ -507,7 +559,7 @@ func (handler *UnroutedHandler) writeChunk(id string, info FileInfo, w http.Resp
// function and send the necessary message on the CompleteUpload channel. // function and send the necessary message on the CompleteUpload channel.
func (handler *UnroutedHandler) finishUploadIfComplete(info FileInfo) error { func (handler *UnroutedHandler) finishUploadIfComplete(info FileInfo) error {
// If the upload is completed, ... // If the upload is completed, ...
if info.Offset == info.Size { if !info.SizeIsDeferred && info.Offset == info.Size {
// ... allow custom mechanism to finish and cleanup the upload // ... allow custom mechanism to finish and cleanup the upload
if handler.composer.UsesFinisher { if handler.composer.UsesFinisher {
if err := handler.composer.Finisher.FinishUpload(info.ID); err != nil { if err := handler.composer.Finisher.FinishUpload(info.ID); err != nil {
@ -836,7 +888,7 @@ func (handler *UnroutedHandler) sizeOfUploads(ids []string) (size int64, err err
return size, err return size, err
} }
if info.Offset != info.Size { if info.SizeIsDeferred || info.Offset != info.Size {
err = ErrUploadNotFinished err = ErrUploadNotFinished
return size, err return size, err
} }
@ -847,6 +899,31 @@ func (handler *UnroutedHandler) sizeOfUploads(ids []string) (size int64, err err
return return
} }
// Verify that the Upload-Length and Upload-Defer-Length headers are acceptable for creating a
// new upload
func (handler *UnroutedHandler) validateNewUploadLengthHeaders(uploadLengthHeader string, uploadDeferLengthHeader string) (uploadLength int64, uploadLengthDeferred bool, err error) {
haveBothLengthHeaders := uploadLengthHeader != "" && uploadDeferLengthHeader != ""
haveInvalidDeferHeader := uploadDeferLengthHeader != "" && uploadDeferLengthHeader != UploadLengthDeferred
lengthIsDeferred := uploadDeferLengthHeader == UploadLengthDeferred
if lengthIsDeferred && !handler.composer.UsesLengthDeferrer {
err = ErrNotImplemented
} else if haveBothLengthHeaders {
err = ErrUploadLengthAndUploadDeferLength
} else if haveInvalidDeferHeader {
err = ErrInvalidUploadDeferLength
} else if lengthIsDeferred {
uploadLengthDeferred = true
} else {
uploadLength, err = strconv.ParseInt(uploadLengthHeader, 10, 64)
if err != nil || uploadLength < 0 {
err = ErrInvalidUploadLength
}
}
return
}
// ParseMetadataHeader parses the Upload-Metadata header as defined in the // ParseMetadataHeader parses the Upload-Metadata header as defined in the
// File Creation extension. // File Creation extension.
// e.g. Upload-Metadata: name bHVucmpzLnBuZw==,type aW1hZ2UvcG5n // e.g. Upload-Metadata: name bHVucmpzLnBuZw==,type aW1hZ2UvcG5n

View File

@ -27,6 +27,7 @@ type FullDataStore interface {
tusd.ConcaterDataStore tusd.ConcaterDataStore
tusd.GetReaderDataStore tusd.GetReaderDataStore
tusd.FinisherDataStore tusd.FinisherDataStore
tusd.LengthDeferrerDataStore
} }
type Locker interface { type Locker interface {