2015-02-12 15:06:15 +00:00
|
|
|
package filestore
|
|
|
|
|
|
|
|
import (
|
2015-02-28 15:12:33 +00:00
|
|
|
"io"
|
2015-02-12 15:06:15 +00:00
|
|
|
"io/ioutil"
|
2015-02-28 13:47:39 +00:00
|
|
|
"os"
|
2015-02-12 15:06:15 +00:00
|
|
|
"strings"
|
|
|
|
"testing"
|
2015-02-16 16:53:50 +00:00
|
|
|
|
2016-01-20 15:40:13 +00:00
|
|
|
"github.com/stretchr/testify/assert"
|
|
|
|
|
2015-02-16 16:53:50 +00:00
|
|
|
"github.com/tus/tusd"
|
2015-02-12 15:06:15 +00:00
|
|
|
)
|
|
|
|
|
2015-02-28 13:49:52 +00:00
|
|
|
// Test interface implementation of Filestore
|
|
|
|
var _ tusd.DataStore = FileStore{}
|
2016-01-19 21:37:05 +00:00
|
|
|
var _ tusd.GetReaderDataStore = FileStore{}
|
|
|
|
var _ tusd.TerminaterDataStore = FileStore{}
|
|
|
|
var _ tusd.LockerDataStore = FileStore{}
|
2016-01-20 14:33:17 +00:00
|
|
|
var _ tusd.ConcaterDataStore = FileStore{}
|
2015-02-28 13:49:52 +00:00
|
|
|
|
2015-02-12 15:06:15 +00:00
|
|
|
func TestFilestore(t *testing.T) {
|
2016-01-20 15:40:13 +00:00
|
|
|
a := assert.New(t)
|
|
|
|
|
2015-02-12 15:06:15 +00:00
|
|
|
tmp, err := ioutil.TempDir("", "tusd-filestore-")
|
2016-01-20 15:40:13 +00:00
|
|
|
a.NoError(err)
|
2015-02-12 15:06:15 +00:00
|
|
|
|
|
|
|
store := FileStore{tmp}
|
|
|
|
|
|
|
|
// Create new upload
|
2015-02-16 16:53:50 +00:00
|
|
|
id, err := store.NewUpload(tusd.FileInfo{
|
|
|
|
Size: 42,
|
|
|
|
MetaData: map[string]string{
|
|
|
|
"hello": "world",
|
|
|
|
},
|
2015-02-12 15:06:15 +00:00
|
|
|
})
|
2016-01-20 15:40:13 +00:00
|
|
|
a.NoError(err)
|
|
|
|
a.NotEqual("", id)
|
2015-02-12 15:06:15 +00:00
|
|
|
|
|
|
|
// Check info without writing
|
|
|
|
info, err := store.GetInfo(id)
|
2016-01-20 15:40:13 +00:00
|
|
|
a.NoError(err)
|
|
|
|
a.EqualValues(42, info.Size)
|
|
|
|
a.EqualValues(0, info.Offset)
|
|
|
|
a.Equal(tusd.MetaData{"hello": "world"}, info.MetaData)
|
2015-02-12 15:06:15 +00:00
|
|
|
|
|
|
|
// Write data to upload
|
2015-03-23 18:02:12 +00:00
|
|
|
bytesWritten, err := store.WriteChunk(id, 0, strings.NewReader("hello world"))
|
2016-01-20 15:40:13 +00:00
|
|
|
a.NoError(err)
|
|
|
|
a.EqualValues(len("hello world"), bytesWritten)
|
2015-02-12 15:06:15 +00:00
|
|
|
|
|
|
|
// Check new offset
|
|
|
|
info, err = store.GetInfo(id)
|
2016-01-20 15:40:13 +00:00
|
|
|
a.NoError(err)
|
|
|
|
a.EqualValues(42, info.Size)
|
|
|
|
a.EqualValues(11, info.Offset)
|
2015-02-12 15:06:15 +00:00
|
|
|
|
|
|
|
// Read content
|
|
|
|
reader, err := store.GetReader(id)
|
2016-01-20 15:40:13 +00:00
|
|
|
a.NoError(err)
|
|
|
|
|
2015-02-12 15:06:15 +00:00
|
|
|
content, err := ioutil.ReadAll(reader)
|
2016-01-20 15:40:13 +00:00
|
|
|
a.NoError(err)
|
|
|
|
a.Equal("hello world", string(content))
|
2015-02-28 15:12:33 +00:00
|
|
|
reader.(io.Closer).Close()
|
2015-02-28 13:47:39 +00:00
|
|
|
|
|
|
|
// Terminate upload
|
2016-01-20 15:40:13 +00:00
|
|
|
a.NoError(store.Terminate(id))
|
2015-02-28 13:47:39 +00:00
|
|
|
|
|
|
|
// Test if upload is deleted
|
2016-01-20 15:40:13 +00:00
|
|
|
_, err = store.GetInfo(id)
|
|
|
|
a.True(os.IsNotExist(err))
|
2015-02-12 15:06:15 +00:00
|
|
|
}
|
2015-12-26 20:23:09 +00:00
|
|
|
|
|
|
|
func TestFileLocker(t *testing.T) {
|
2016-01-20 15:40:13 +00:00
|
|
|
a := assert.New(t)
|
|
|
|
|
2015-12-26 20:23:09 +00:00
|
|
|
dir, err := ioutil.TempDir("", "tusd-file-locker")
|
2016-01-20 15:40:13 +00:00
|
|
|
a.NoError(err)
|
2015-12-26 20:23:09 +00:00
|
|
|
|
|
|
|
var locker tusd.LockerDataStore
|
|
|
|
locker = FileStore{dir}
|
|
|
|
|
2016-01-20 15:40:13 +00:00
|
|
|
a.NoError(locker.LockUpload("one"))
|
|
|
|
a.Equal(tusd.ErrFileLocked, locker.LockUpload("one"))
|
|
|
|
a.NoError(locker.UnlockUpload("one"))
|
|
|
|
a.NoError(locker.UnlockUpload("one"))
|
2015-12-26 20:23:09 +00:00
|
|
|
}
|
2016-01-23 21:55:08 +00:00
|
|
|
|
|
|
|
func TestConcatUploads(t *testing.T) {
|
|
|
|
a := assert.New(t)
|
|
|
|
|
|
|
|
tmp, err := ioutil.TempDir("", "tusd-filestore-concat-")
|
|
|
|
a.NoError(err)
|
|
|
|
|
|
|
|
store := FileStore{tmp}
|
|
|
|
|
|
|
|
// Create new upload to hold concatenated upload
|
|
|
|
finId, err := store.NewUpload(tusd.FileInfo{Size: 9})
|
|
|
|
a.NoError(err)
|
|
|
|
a.NotEqual("", finId)
|
|
|
|
|
|
|
|
// Create three uploads for concatenating
|
|
|
|
ids := make([]string, 3)
|
|
|
|
contents := []string{
|
|
|
|
"abc",
|
|
|
|
"def",
|
|
|
|
"ghi",
|
|
|
|
}
|
|
|
|
for i := 0; i < 3; i++ {
|
|
|
|
id, err := store.NewUpload(tusd.FileInfo{Size: 3})
|
|
|
|
a.NoError(err)
|
|
|
|
|
|
|
|
n, err := store.WriteChunk(id, 0, strings.NewReader(contents[i]))
|
|
|
|
a.NoError(err)
|
|
|
|
a.EqualValues(3, n)
|
|
|
|
|
|
|
|
ids[i] = id
|
|
|
|
}
|
|
|
|
|
|
|
|
err = store.ConcatUploads(finId, ids)
|
|
|
|
a.NoError(err)
|
|
|
|
|
|
|
|
// Check offset
|
|
|
|
info, err := store.GetInfo(finId)
|
|
|
|
a.NoError(err)
|
|
|
|
a.EqualValues(9, info.Size)
|
|
|
|
a.EqualValues(9, info.Offset)
|
|
|
|
|
|
|
|
// Read content
|
|
|
|
reader, err := store.GetReader(finId)
|
|
|
|
a.NoError(err)
|
|
|
|
|
|
|
|
content, err := ioutil.ReadAll(reader)
|
|
|
|
a.NoError(err)
|
|
|
|
a.Equal("abcdefghi", string(content))
|
|
|
|
reader.(io.Closer).Close()
|
|
|
|
}
|