2017-09-16 18:49:09 +00:00
|
|
|
// Package gcsstore provides a Google cloud storage based backend.
|
|
|
|
//
|
|
|
|
// GCSStore is a storage backend that uses the GCSAPI interface in order to store uploads
|
|
|
|
// on GCS. Uploads will be represented by two files in GCS; the data file will be stored
|
|
|
|
// as an extensionless object [uid] and the JSON info file will stored as [uid].info.
|
|
|
|
// In order to store uploads on GCS, make sure to specifiy the appropriate Google service
|
|
|
|
// account file path in the GCS_SERVICE_ACCOUNT_FILE environment variable. Also make sure that
|
|
|
|
// this service account file has the "https://www.googleapis.com/auth/devstorage.read_write"
|
|
|
|
// scope enabled so you can read and write data to the storage buckets associated with the
|
|
|
|
// service account file.
|
|
|
|
package gcsstore
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
2018-07-25 14:25:59 +00:00
|
|
|
"sync"
|
|
|
|
"sync/atomic"
|
2017-09-16 18:49:09 +00:00
|
|
|
|
2019-05-22 13:54:14 +00:00
|
|
|
"golang.org/x/net/context"
|
|
|
|
|
2018-05-25 10:14:16 +00:00
|
|
|
"cloud.google.com/go/storage"
|
2017-09-16 18:49:09 +00:00
|
|
|
"github.com/tus/tusd"
|
|
|
|
"github.com/tus/tusd/uid"
|
|
|
|
)
|
|
|
|
|
|
|
|
// See the tusd.DataStore interface for documentation about the different
|
|
|
|
// methods.
|
|
|
|
type GCSStore struct {
|
|
|
|
// Specifies the GCS bucket that uploads will be stored in
|
|
|
|
Bucket string
|
|
|
|
|
2019-05-22 13:54:14 +00:00
|
|
|
// ObjectPrefix is prepended to the name of each GCS object that is created.
|
|
|
|
// It can be used to create a pseudo-directory structure in the bucket,
|
|
|
|
// e.g. "path/to/my/uploads".
|
|
|
|
ObjectPrefix string
|
|
|
|
|
2017-09-16 18:49:09 +00:00
|
|
|
// Service specifies an interface used to communicate with the Google
|
|
|
|
// cloud storage backend. Implementation can be seen in gcsservice file.
|
|
|
|
Service GCSAPI
|
|
|
|
}
|
|
|
|
|
|
|
|
// New constructs a new GCS storage backend using the supplied GCS bucket name
|
|
|
|
// and service object.
|
2019-05-22 13:57:32 +00:00
|
|
|
func New(bucket string, objPrefix string, service GCSAPI) GCSStore {
|
2017-09-16 18:49:09 +00:00
|
|
|
return GCSStore{
|
2019-05-22 13:57:32 +00:00
|
|
|
Bucket: bucket,
|
|
|
|
ObjectPrefix: objPrefix,
|
|
|
|
Service: service,
|
2017-09-16 18:49:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (store GCSStore) UseIn(composer *tusd.StoreComposer) {
|
|
|
|
composer.UseCore(store)
|
|
|
|
composer.UseTerminater(store)
|
|
|
|
composer.UseFinisher(store)
|
|
|
|
composer.UseGetReader(store)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (store GCSStore) NewUpload(info tusd.FileInfo) (id string, err error) {
|
|
|
|
if info.ID == "" {
|
|
|
|
info.ID = uid.Uid()
|
|
|
|
}
|
|
|
|
|
2018-07-25 14:25:59 +00:00
|
|
|
ctx := context.Background()
|
2019-05-22 15:04:17 +00:00
|
|
|
err = store.writeInfo(ctx, store.keyWithPrefix(info.ID), info)
|
2017-09-16 18:49:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return info.ID, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return info.ID, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (store GCSStore) WriteChunk(id string, offset int64, src io.Reader) (int64, error) {
|
2019-05-22 15:04:17 +00:00
|
|
|
prefix := fmt.Sprintf("%s_", store.keyWithPrefix(id))
|
2017-09-16 18:49:09 +00:00
|
|
|
filterParams := GCSFilterParams{
|
|
|
|
Bucket: store.Bucket,
|
|
|
|
Prefix: prefix,
|
|
|
|
}
|
|
|
|
|
2018-07-25 14:25:59 +00:00
|
|
|
ctx := context.Background()
|
|
|
|
names, err := store.Service.FilterObjects(ctx, filterParams)
|
2017-09-16 18:49:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
|
|
|
|
maxIdx := -1
|
|
|
|
|
|
|
|
for _, name := range names {
|
|
|
|
split := strings.Split(name, "_")
|
|
|
|
idx, err := strconv.Atoi(split[len(split)-1])
|
|
|
|
if err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if idx > maxIdx {
|
|
|
|
maxIdx = idx
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-22 15:04:17 +00:00
|
|
|
cid := fmt.Sprintf("%s_%d", store.keyWithPrefix(id), maxIdx+1)
|
2017-09-16 18:49:09 +00:00
|
|
|
objectParams := GCSObjectParams{
|
|
|
|
Bucket: store.Bucket,
|
|
|
|
ID: cid,
|
|
|
|
}
|
|
|
|
|
2018-07-25 14:25:59 +00:00
|
|
|
n, err := store.Service.WriteObject(ctx, objectParams, src)
|
2017-09-16 18:49:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return n, err
|
|
|
|
}
|
|
|
|
|
2018-07-25 14:25:59 +00:00
|
|
|
const CONCURRENT_SIZE_REQUESTS = 32
|
|
|
|
|
2017-09-16 18:49:09 +00:00
|
|
|
func (store GCSStore) GetInfo(id string) (tusd.FileInfo, error) {
|
|
|
|
info := tusd.FileInfo{}
|
2019-05-22 15:04:17 +00:00
|
|
|
i := fmt.Sprintf("%s.info", store.keyWithPrefix(id))
|
2017-09-16 18:49:09 +00:00
|
|
|
|
|
|
|
params := GCSObjectParams{
|
|
|
|
Bucket: store.Bucket,
|
|
|
|
ID: i,
|
|
|
|
}
|
|
|
|
|
2018-07-25 16:51:07 +00:00
|
|
|
ctx := context.Background()
|
2018-07-25 14:25:59 +00:00
|
|
|
r, err := store.Service.ReadObject(ctx, params)
|
2017-09-16 18:49:09 +00:00
|
|
|
if err != nil {
|
2018-05-25 10:14:16 +00:00
|
|
|
if err == storage.ErrObjectNotExist {
|
|
|
|
return info, tusd.ErrNotFound
|
|
|
|
}
|
2017-09-16 18:49:09 +00:00
|
|
|
return info, err
|
|
|
|
}
|
|
|
|
|
|
|
|
buf := make([]byte, r.Size())
|
|
|
|
_, err = r.Read(buf)
|
|
|
|
if err != nil {
|
|
|
|
return info, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := json.Unmarshal(buf, &info); err != nil {
|
|
|
|
return info, err
|
|
|
|
}
|
|
|
|
|
2019-05-22 15:04:17 +00:00
|
|
|
prefix := fmt.Sprintf("%s", store.keyWithPrefix(id))
|
2017-09-16 18:49:09 +00:00
|
|
|
filterParams := GCSFilterParams{
|
|
|
|
Bucket: store.Bucket,
|
|
|
|
Prefix: prefix,
|
|
|
|
}
|
|
|
|
|
2018-07-25 14:25:59 +00:00
|
|
|
names, err := store.Service.FilterObjects(ctx, filterParams)
|
2017-09-16 18:49:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return info, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var offset int64 = 0
|
2018-07-25 14:25:59 +00:00
|
|
|
var firstError error = nil
|
|
|
|
var wg sync.WaitGroup
|
|
|
|
|
|
|
|
sem := make(chan struct{}, CONCURRENT_SIZE_REQUESTS)
|
|
|
|
errChan := make(chan error)
|
2018-07-25 16:51:07 +00:00
|
|
|
ctxCancel, cancel := context.WithCancel(ctx)
|
|
|
|
defer cancel()
|
2018-07-25 14:25:59 +00:00
|
|
|
|
|
|
|
go func() {
|
|
|
|
for err := range errChan {
|
|
|
|
if err != context.Canceled && firstError == nil {
|
|
|
|
firstError = err
|
|
|
|
cancel()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2017-09-16 18:49:09 +00:00
|
|
|
for _, name := range names {
|
2018-07-25 14:25:59 +00:00
|
|
|
sem <- struct{}{}
|
|
|
|
wg.Add(1)
|
2017-09-16 18:49:09 +00:00
|
|
|
params = GCSObjectParams{
|
|
|
|
Bucket: store.Bucket,
|
|
|
|
ID: name,
|
|
|
|
}
|
|
|
|
|
2018-07-25 16:51:07 +00:00
|
|
|
go func(params GCSObjectParams) {
|
2018-07-25 14:25:59 +00:00
|
|
|
defer func() {
|
|
|
|
<-sem
|
|
|
|
wg.Done()
|
|
|
|
}()
|
2017-09-16 18:49:09 +00:00
|
|
|
|
2018-07-25 16:51:07 +00:00
|
|
|
size, err := store.Service.GetObjectSize(ctxCancel, params)
|
2018-07-25 14:25:59 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
errChan <- err
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
atomic.AddInt64(&offset, size)
|
2018-07-25 16:51:07 +00:00
|
|
|
}(params)
|
2018-07-25 14:25:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
wg.Wait()
|
|
|
|
close(errChan)
|
|
|
|
|
|
|
|
if firstError != nil {
|
|
|
|
return info, firstError
|
2017-09-16 18:49:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
info.Offset = offset
|
2019-05-22 15:04:17 +00:00
|
|
|
err = store.writeInfo(ctx, store.keyWithPrefix(id), info)
|
2017-09-16 18:49:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return info, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return info, nil
|
|
|
|
}
|
|
|
|
|
2018-07-25 14:25:59 +00:00
|
|
|
func (store GCSStore) writeInfo(ctx context.Context, id string, info tusd.FileInfo) error {
|
2017-09-16 18:49:09 +00:00
|
|
|
data, err := json.Marshal(info)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
r := bytes.NewReader(data)
|
|
|
|
|
|
|
|
i := fmt.Sprintf("%s.info", id)
|
|
|
|
params := GCSObjectParams{
|
|
|
|
Bucket: store.Bucket,
|
|
|
|
ID: i,
|
|
|
|
}
|
|
|
|
|
2018-07-25 14:25:59 +00:00
|
|
|
_, err = store.Service.WriteObject(ctx, params, r)
|
2017-09-16 18:49:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (store GCSStore) FinishUpload(id string) error {
|
2019-05-22 15:04:17 +00:00
|
|
|
prefix := fmt.Sprintf("%s_", store.keyWithPrefix(id))
|
2017-09-16 18:49:09 +00:00
|
|
|
filterParams := GCSFilterParams{
|
|
|
|
Bucket: store.Bucket,
|
|
|
|
Prefix: prefix,
|
|
|
|
}
|
|
|
|
|
2018-07-25 14:25:59 +00:00
|
|
|
ctx := context.Background()
|
|
|
|
names, err := store.Service.FilterObjects(ctx, filterParams)
|
2017-09-16 18:49:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
composeParams := GCSComposeParams{
|
|
|
|
Bucket: store.Bucket,
|
2019-05-22 15:04:17 +00:00
|
|
|
Destination: store.keyWithPrefix(id),
|
2017-09-16 18:49:09 +00:00
|
|
|
Sources: names,
|
|
|
|
}
|
|
|
|
|
2018-07-25 14:25:59 +00:00
|
|
|
err = store.Service.ComposeObjects(ctx, composeParams)
|
2017-09-16 18:49:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2018-07-25 14:25:59 +00:00
|
|
|
err = store.Service.DeleteObjectsWithFilter(ctx, filterParams)
|
2017-09-16 18:49:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
info, err := store.GetInfo(id)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
objectParams := GCSObjectParams{
|
|
|
|
Bucket: store.Bucket,
|
2019-05-22 15:04:17 +00:00
|
|
|
ID: store.keyWithPrefix(id),
|
2017-09-16 18:49:09 +00:00
|
|
|
}
|
|
|
|
|
2018-07-25 14:25:59 +00:00
|
|
|
err = store.Service.SetObjectMetadata(ctx, objectParams, info.MetaData)
|
2017-09-16 18:49:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (store GCSStore) Terminate(id string) error {
|
|
|
|
filterParams := GCSFilterParams{
|
|
|
|
Bucket: store.Bucket,
|
2019-05-22 15:04:17 +00:00
|
|
|
Prefix: store.keyWithPrefix(id),
|
2017-09-16 18:49:09 +00:00
|
|
|
}
|
|
|
|
|
2018-07-25 14:25:59 +00:00
|
|
|
ctx := context.Background()
|
|
|
|
err := store.Service.DeleteObjectsWithFilter(ctx, filterParams)
|
2017-09-16 18:49:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (store GCSStore) GetReader(id string) (io.Reader, error) {
|
|
|
|
params := GCSObjectParams{
|
|
|
|
Bucket: store.Bucket,
|
2019-05-22 15:04:17 +00:00
|
|
|
ID: store.keyWithPrefix(id),
|
2017-09-16 18:49:09 +00:00
|
|
|
}
|
|
|
|
|
2018-07-25 14:25:59 +00:00
|
|
|
ctx := context.Background()
|
|
|
|
r, err := store.Service.ReadObject(ctx, params)
|
2017-09-16 18:49:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return r, nil
|
|
|
|
}
|
2019-05-22 15:04:17 +00:00
|
|
|
|
|
|
|
func (store GCSStore) keyWithPrefix(key string) string {
|
|
|
|
prefix := store.ObjectPrefix
|
|
|
|
if prefix != "" && !strings.HasSuffix(prefix, "/") {
|
|
|
|
prefix += "/"
|
|
|
|
}
|
|
|
|
return prefix + key
|
|
|
|
}
|