portal/bao/bao.go

162 lines
3.5 KiB
Go
Raw Permalink Normal View History

package bao
import (
"bytes"
_ "embed"
"errors"
"io"
"time"
"github.com/samber/lo"
"go.uber.org/zap"
2024-02-17 02:55:16 +00:00
2024-03-30 17:59:47 +00:00
"lukechampine.com/blake3/bao"
)
var _ io.ReadCloser = (*Verifier)(nil)
2024-03-30 17:59:47 +00:00
var _ io.WriterAt = (*proofWriter)(nil)
var ErrVerifyFailed = errors.New("verification failed")
2024-03-30 17:59:47 +00:00
const groupLog = 8
const groupChunks = 1 << groupLog
type Verifier struct {
r io.ReadCloser
proof Result
read uint64
buffer *bytes.Buffer
logger *zap.Logger
readTime []time.Duration
verifyTime time.Duration
}
2024-03-30 17:59:47 +00:00
type Result struct {
Hash []byte
Proof []byte
Length uint
}
func (v *Verifier) Read(p []byte) (int, error) {
// Initial attempt to read from the buffer
n, err := v.buffer.Read(p)
if n == len(p) {
// If the buffer already had enough data to fulfill the request, return immediately
return n, nil
} else if err != nil && err != io.EOF {
// For errors other than EOF, return the error immediately
return n, err
}
2024-03-30 17:59:47 +00:00
buf := make([]byte, groupChunks)
// Continue reading from the source and verifying until we have enough data or hit an error
for v.buffer.Len() < len(p)-n {
readStart := time.Now()
2024-02-28 15:42:01 +00:00
bytesRead, err := io.ReadFull(v.r, buf)
if err != nil && err != io.EOF && err != io.ErrUnexpectedEOF {
return n, err // Return any read error immediately
}
readEnd := time.Now()
v.readTime = append(v.readTime, readEnd.Sub(readStart))
timeStart := time.Now()
2024-03-01 11:23:31 +00:00
if bytesRead > 0 {
2024-03-30 17:59:47 +00:00
if status := bao.VerifyChunk(buf[:bytesRead], v.proof.Proof, groupChunks, v.read, [32]byte(v.proof.Hash)); !status {
2024-03-01 11:23:31 +00:00
return n, errors.Join(ErrVerifyFailed, err)
}
v.read += uint64(bytesRead)
v.buffer.Write(buf[:bytesRead]) // Append new data to the buffer
}
timeEnd := time.Now()
v.verifyTime += timeEnd.Sub(timeStart)
if err == io.EOF {
// If EOF, break the loop as no more data can be read
break
}
}
if len(v.readTime) > 0 {
averageReadTime := lo.Reduce(v.readTime, func(acc time.Duration, cur time.Duration, _ int) time.Duration {
return acc + cur
}, time.Duration(0)) / time.Duration(len(v.readTime))
v.logger.Debug("Read time", zap.Duration("average", averageReadTime))
}
2024-03-30 17:59:47 +00:00
averageVerifyTime := v.verifyTime / time.Duration(v.read/groupChunks)
v.logger.Debug("Verification time", zap.Duration("average", averageVerifyTime))
// Attempt to read the remainder of the data from the buffer
additionalBytes, _ := v.buffer.Read(p[n:])
return n + additionalBytes, nil
}
func (v *Verifier) Close() error {
return v.r.Close()
}
2024-03-30 17:59:47 +00:00
func Hash(r io.Reader, size uint64) (*Result, error) {
reader := newSizeReader(r)
writer := newProofWriter(int(size))
2024-03-30 17:59:47 +00:00
hash, err := bao.Encode(writer, reader, int64(size), groupLog, true)
if err != nil {
2024-03-30 17:59:47 +00:00
return nil, err
}
2024-03-30 17:59:47 +00:00
return &Result{
Hash: hash[:],
Proof: writer.buf,
Length: uint(size),
}, nil
}
2024-03-30 17:59:47 +00:00
func NewVerifier(r io.ReadCloser, proof Result, logger *zap.Logger) *Verifier {
return &Verifier{
r: r,
proof: proof,
buffer: new(bytes.Buffer),
logger: logger,
}
2024-03-30 17:59:47 +00:00
}
2024-03-30 17:59:47 +00:00
type proofWriter struct {
buf []byte
}
2024-03-30 17:59:47 +00:00
func (p proofWriter) WriteAt(b []byte, off int64) (n int, err error) {
if copy(p.buf[off:], b) != len(b) {
panic("bad buffer size")
}
2024-03-30 17:59:47 +00:00
return len(b), nil
}
2024-03-30 17:59:47 +00:00
func newProofWriter(size int) *proofWriter {
return &proofWriter{
buf: make([]byte, bao.EncodedSize(size, groupLog, true)),
}
}
2024-03-30 17:59:47 +00:00
type sizeReader struct {
reader io.Reader
read int64
}
2024-03-30 17:59:47 +00:00
func (s sizeReader) Read(p []byte) (int, error) {
n, err := s.reader.Read(p)
s.read += int64(n)
return n, err
}
2024-03-30 17:59:47 +00:00
func newSizeReader(r io.Reader) *sizeReader {
return &sizeReader{
reader: r,
read: 0,
}
}