archiver: convert buffer pool to use sync.Pool

This commit is contained in:
Michael Eischer 2025-10-13 22:08:58 +02:00
parent dd6cb0dd8e
commit 7f6fdcc52c
2 changed files with 14 additions and 27 deletions

View file

@ -1,5 +1,7 @@
package archiver
import "sync"
// buffer is a reusable buffer. After the buffer has been used, Release should
// be called so the underlying slice is put back into the pool.
type buffer struct {
@ -14,41 +16,32 @@ func (b *buffer) Release() {
return
}
select {
case pool.ch <- b:
default:
}
pool.pool.Put(b)
}
// bufferPool implements a limited set of reusable buffers.
type bufferPool struct {
ch chan *buffer
pool sync.Pool
defaultSize int
}
// newBufferPool initializes a new buffer pool. The pool stores at most max
// items. New buffers are created with defaultSize. Buffers that have grown
// larger are not put back.
func newBufferPool(max int, defaultSize int) *bufferPool {
func newBufferPool(defaultSize int) *bufferPool {
b := &bufferPool{
ch: make(chan *buffer, max),
defaultSize: defaultSize,
}
b.pool = sync.Pool{New: func() any {
return &buffer{
Data: make([]byte, defaultSize),
pool: b,
}
}}
return b
}
// Get returns a new buffer, either from the pool or newly allocated.
func (pool *bufferPool) Get() *buffer {
select {
case buf := <-pool.ch:
return buf
default:
}
b := &buffer{
Data: make([]byte, pool.defaultSize),
pool: pool,
}
return b
return pool.pool.Get().(*buffer)
}

View file

@ -4,7 +4,6 @@ import (
"context"
"fmt"
"io"
"runtime"
"sync"
"github.com/restic/chunker"
@ -34,16 +33,11 @@ type fileSaver struct {
// started, it is stopped when ctx is cancelled.
func newFileSaver(ctx context.Context, wg *errgroup.Group, uploader restic.BlobSaverAsync, pol chunker.Pol, fileWorkers uint) *fileSaver {
ch := make(chan saveFileJob)
// TODO find a way to get rid of this parameter
blobWorkers := uint(runtime.GOMAXPROCS(0))
debug.Log("new file saver with %v file workers and %v blob workers", fileWorkers, blobWorkers)
poolSize := fileWorkers + blobWorkers
debug.Log("new file saver with %v file workers", fileWorkers)
s := &fileSaver{
uploader: uploader,
saveFilePool: newBufferPool(int(poolSize), chunker.MaxSize),
saveFilePool: newBufferPool(chunker.MaxSize),
pol: pol,
ch: ch,