fix: don't rely on content length, but do a basic heuristic by reading 1 byte past the max upload, if if we haven't hit the limit and the sizes don't match, then error, but otherwise take an optimistic stance

This commit is contained in:
Derrick Hammer 2024-03-05 12:38:41 -05:00
parent c0fa8d4ea3
commit a5c1356847
Signed by: pcfreak30
GPG Key ID: C997C339BE476FF2
1 changed files with 7 additions and 13 deletions

View File

@ -1085,31 +1085,25 @@ func (s *S5API) pinEntity(ctx context.Context, userId uint, cid *encoding.CID) e
}
}(res.Body)
contentLengthStr := res.Header.Get("Content-Length")
if contentLengthStr == "" {
err = dlUriProvider.Downvote(location)
if err != nil {
s.logger.Error("Error downvoting location", zap.Error(err))
return nil, false
}
return nil, false
}
contentLength, err := strconv.ParseInt(contentLengthStr, 10, 64)
// Use io.LimitedReader to limit the download size and attempt to detect if there's more data.
limitedReader := &io.LimitedReader{R: res.Body, N: int64(s.config.Config().Core.PostUploadLimit + 1)}
data, err := io.ReadAll(limitedReader)
if err != nil {
return nil, false
}
if !isCidManifest(cid) {
if uint64(contentLength) != cid.Size {
if limitedReader.N >= 0 && uint64(len(data)) != cid.Size {
return nil, false
}
} else {
data, err := io.ReadAll(res.Body)
dataCont, err := io.ReadAll(res.Body)
if err != nil {
return nil, false
}
data = append(data, dataCont...)
proof, err := s.storage.HashObject(ctx, bytes.NewReader(data))
if err != nil {
return nil, false