func TestIndexUnserialize(t *testing.T) { oldIdx := backend.IDs{ParseID("ed54ae36197f4745ebc4b54d10e0f623eaaaedd03013eb7ae90df881b7781452")} idx, err := repository.DecodeIndex(bytes.NewReader(docExample)) OK(t, err) for _, test := range exampleTests { blob, err := idx.Lookup(test.id) OK(t, err) Equals(t, test.packID, blob.PackID) Equals(t, test.tpe, blob.Type) Equals(t, test.offset, blob.Offset) Equals(t, test.length, blob.Length) } Equals(t, oldIdx, idx.Supersedes()) blobs := idx.ListPack(exampleLookupTest.packID) if len(blobs) != len(exampleLookupTest.blobs) { t.Fatalf("expected %d blobs in pack, got %d", len(exampleLookupTest.blobs), len(blobs)) } for _, blob := range blobs { if !exampleLookupTest.blobs.Has(blob.ID) { t.Errorf("unexpected blob %v found", blob.ID.Str()) } } }
func TestIndexUnserialize(t *testing.T) { idx, err := repository.DecodeIndex(bytes.NewReader(docExample)) OK(t, err) for _, test := range exampleTests { packID, tpe, offset, length, err := idx.Lookup(test.id) OK(t, err) Equals(t, test.packID, packID) Equals(t, test.tpe, tpe) Equals(t, test.offset, offset) Equals(t, test.length, length) } }
func TestIndexUnserialize(t *testing.T) { oldIdx := backend.IDs{ParseID("ed54ae36197f4745ebc4b54d10e0f623eaaaedd03013eb7ae90df881b7781452")} idx, err := repository.DecodeIndex(bytes.NewReader(docExample)) OK(t, err) for _, test := range exampleTests { packID, tpe, offset, length, err := idx.Lookup(test.id) OK(t, err) Equals(t, test.packID, packID) Equals(t, test.tpe, tpe) Equals(t, test.offset, offset) Equals(t, test.length, length) } Equals(t, oldIdx, idx.Supersedes()) }
func TestIndexSerialize(t *testing.T) { type testEntry struct { id backend.ID pack backend.ID tpe pack.BlobType offset, length uint } tests := []testEntry{} idx := repository.NewIndex() // create 50 packs with 20 blobs each for i := 0; i < 50; i++ { packID := randomID() pos := uint(0) for j := 0; j < 20; j++ { id := randomID() length := uint(i*100 + j) idx.Store(pack.Data, id, packID, pos, length) tests = append(tests, testEntry{ id: id, pack: packID, tpe: pack.Data, offset: pos, length: length, }) pos += length } } wr := bytes.NewBuffer(nil) err := idx.Encode(wr) OK(t, err) idx2, err := repository.DecodeIndex(wr) OK(t, err) Assert(t, idx2 != nil, "nil returned for decoded index") wr2 := bytes.NewBuffer(nil) err = idx2.Encode(wr2) OK(t, err) for _, testBlob := range tests { packID, tpe, offset, length, err := idx.Lookup(testBlob.id) OK(t, err) Equals(t, testBlob.pack, packID) Equals(t, testBlob.tpe, tpe) Equals(t, testBlob.offset, offset) Equals(t, testBlob.length, length) packID, tpe, offset, length, err = idx2.Lookup(testBlob.id) OK(t, err) Equals(t, testBlob.pack, packID) Equals(t, testBlob.tpe, tpe) Equals(t, testBlob.offset, offset) Equals(t, testBlob.length, length) } // add more blobs to idx2 newtests := []testEntry{} for i := 0; i < 10; i++ { packID := randomID() pos := uint(0) for j := 0; j < 10; j++ { id := randomID() length := uint(i*100 + j) idx2.Store(pack.Data, id, packID, pos, length) newtests = append(newtests, testEntry{ id: id, pack: packID, tpe: pack.Data, offset: pos, length: length, }) pos += length } } // serialize idx2, unserialize to idx3 wr3 := bytes.NewBuffer(nil) err = idx2.Encode(wr3) OK(t, err) idx3, err := repository.DecodeIndex(wr3) OK(t, err) Assert(t, idx3 != nil, "nil returned for decoded index") // all old blobs must not be present in the index for _, testBlob := range tests { _, _, _, _, err := idx3.Lookup(testBlob.id) Assert(t, err != nil, "found old id %v in serialized index", testBlob.id.Str()) } // all new blobs must be in the index for _, testBlob := range newtests { packID, tpe, offset, length, err := idx3.Lookup(testBlob.id) OK(t, err) Equals(t, testBlob.pack, packID) Equals(t, testBlob.tpe, tpe) Equals(t, testBlob.offset, offset) Equals(t, testBlob.length, length) } }
func TestIndexSerialize(t *testing.T) { type testEntry struct { id backend.ID pack backend.ID tpe pack.BlobType offset, length uint } tests := []testEntry{} idx := repository.NewIndex() // create 50 packs with 20 blobs each for i := 0; i < 50; i++ { packID := randomID() pos := uint(0) for j := 0; j < 20; j++ { id := randomID() length := uint(i*100 + j) idx.Store(repository.PackedBlob{ Type: pack.Data, ID: id, PackID: packID, Offset: pos, Length: length, }) tests = append(tests, testEntry{ id: id, pack: packID, tpe: pack.Data, offset: pos, length: length, }) pos += length } } wr := bytes.NewBuffer(nil) err := idx.Encode(wr) OK(t, err) idx2, err := repository.DecodeIndex(wr) OK(t, err) Assert(t, idx2 != nil, "nil returned for decoded index") wr2 := bytes.NewBuffer(nil) err = idx2.Encode(wr2) OK(t, err) for _, testBlob := range tests { result, err := idx.Lookup(testBlob.id) OK(t, err) Equals(t, testBlob.pack, result.PackID) Equals(t, testBlob.tpe, result.Type) Equals(t, testBlob.offset, result.Offset) Equals(t, testBlob.length, result.Length) result2, err := idx2.Lookup(testBlob.id) OK(t, err) Equals(t, testBlob.pack, result2.PackID) Equals(t, testBlob.tpe, result2.Type) Equals(t, testBlob.offset, result2.Offset) Equals(t, testBlob.length, result2.Length) } // add more blobs to idx newtests := []testEntry{} for i := 0; i < 10; i++ { packID := randomID() pos := uint(0) for j := 0; j < 10; j++ { id := randomID() length := uint(i*100 + j) idx.Store(repository.PackedBlob{ Type: pack.Data, ID: id, PackID: packID, Offset: pos, Length: length, }) newtests = append(newtests, testEntry{ id: id, pack: packID, tpe: pack.Data, offset: pos, length: length, }) pos += length } } // serialize idx, unserialize to idx3 wr3 := bytes.NewBuffer(nil) err = idx.Finalize(wr3) OK(t, err) Assert(t, idx.Final(), "index not final after encoding") id := randomID() OK(t, idx.SetID(id)) id2, err := idx.ID() Assert(t, id2.Equal(id), "wrong ID returned: want %v, got %v", id, id2) idx3, err := repository.DecodeIndex(wr3) OK(t, err) Assert(t, idx3 != nil, "nil returned for decoded index") Assert(t, idx3.Final(), "decoded index is not final") // all new blobs must be in the index for _, testBlob := range newtests { blob, err := idx3.Lookup(testBlob.id) OK(t, err) Equals(t, testBlob.pack, blob.PackID) Equals(t, testBlob.tpe, blob.Type) Equals(t, testBlob.offset, blob.Offset) Equals(t, testBlob.length, blob.Length) } }