// findPacker returns a packer for a new blob of size bytes. Either a new one is // created or one is returned that already has some blobs. func (r *packerManager) findPacker(size uint) (packer *pack.Packer, err error) { r.pm.Lock() defer r.pm.Unlock() // search for a suitable packer if len(r.packs) > 0 { debug.Log("searching packer for %d bytes\n", size) for i, p := range r.packs { if p.Size()+size < maxPackSize { debug.Log("found packer %v", p) // remove from list r.packs = append(r.packs[:i], r.packs[i+1:]...) return p, nil } } } // no suitable packer found, return new debug.Log("create new pack for %d bytes", size) tmpfile, err := ioutil.TempFile("", "restic-temp-pack-") if err != nil { return nil, errors.Wrap(err, "ioutil.TempFile") } return pack.NewPacker(r.key, tmpfile), nil }
// findPacker returns a packer for a new blob of size bytes. Either a new one is // created or one is returned that already has some blobs. func (r *packerManager) findPacker(size uint) (*pack.Packer, error) { r.pm.Lock() defer r.pm.Unlock() // search for a suitable packer if len(r.packs) > 0 { debug.Log("Repo.findPacker", "searching packer for %d bytes\n", size) for i, p := range r.packs { if p.Size()+size < maxPackSize { debug.Log("Repo.findPacker", "found packer %v", p) // remove from list r.packs = append(r.packs[:i], r.packs[i+1:]...) return p, nil } } } // no suitable packer found, return new debug.Log("Repo.findPacker", "create new pack for %d bytes", size) return pack.NewPacker(r.key, nil), nil }
func newPack(t testing.TB, k *crypto.Key, lengths []int) ([]Buf, []byte, uint) { bufs := []Buf{} for _, l := range lengths { b := make([]byte, l) _, err := io.ReadFull(rand.Reader, b) OK(t, err) h := sha256.Sum256(b) bufs = append(bufs, Buf{data: b, id: h}) } // pack blobs p := pack.NewPacker(k, nil) for _, b := range bufs { p.Add(restic.TreeBlob, b.id, b.data) } _, err := p.Finalize() OK(t, err) packData := p.Writer().(*bytes.Buffer).Bytes() return bufs, packData, p.Size() }
func TestCreatePack(t *testing.T) { type Buf struct { data []byte id backend.ID } bufs := []Buf{} for _, l := range lengths { b := make([]byte, l) _, err := io.ReadFull(rand.Reader, b) OK(t, err) h := sha256.Sum256(b) bufs = append(bufs, Buf{data: b, id: h}) } // create random keys k := crypto.NewRandomKey() // pack blobs p := pack.NewPacker(k, nil) for _, b := range bufs { p.Add(pack.Tree, b.id, bytes.NewReader(b.data)) } packData, err := p.Finalize() OK(t, err) written := 0 for _, l := range lengths { written += l } // header length written += binary.Size(uint32(0)) // header written += len(lengths) * (binary.Size(pack.BlobType(0)) + binary.Size(uint32(0)) + backend.IDSize) // header crypto written += crypto.Extension // check length Equals(t, written, len(packData)) Equals(t, uint(written), p.Size()) // read and parse it again rd := bytes.NewReader(packData) np, err := pack.NewUnpacker(k, rd) OK(t, err) Equals(t, len(np.Entries), len(bufs)) for i, b := range bufs { e := np.Entries[i] Equals(t, b.id, e.ID) brd, err := e.GetReader(rd) OK(t, err) data, err := ioutil.ReadAll(brd) OK(t, err) Assert(t, bytes.Equal(b.data, data), "data for blob %v doesn't match", i) } }