func deserializeHash(reader io.Reader) hash.Hash { digest := hash.Digest{} n, err := io.ReadFull(reader, digest[:]) d.Chk.NoError(err) d.Chk.True(int(hash.ByteLen) == n) return hash.New(digest) }
func fromDbKey(key []byte) (uint64, hash.Hash) { refHeight := uint64(0) r := bytes.NewReader(key) err := binary.Read(r, binary.BigEndian, &refHeight) d.Chk.NoError(err) digest := hash.Digest{} err = binary.Read(r, binary.BigEndian, &digest) d.Chk.NoError(err) return refHeight, hash.New(digest) }
func deserializeChunk(reader io.Reader) (Chunk, bool) { digest := hash.Sha1Digest{} n, err := io.ReadFull(reader, digest[:]) if err == io.EOF { return EmptyChunk, false } d.Chk.NoError(err) d.Chk.True(int(sha1.Size) == n) h := hash.New(digest) chunkSize := uint32(0) err = binary.Read(reader, binary.BigEndian, &chunkSize) d.Chk.NoError(err) w := NewChunkWriter() n2, err := io.CopyN(w, reader, int64(chunkSize)) d.Chk.NoError(err) d.Chk.True(int64(chunkSize) == n2) c := w.Chunk() d.Chk.True(h == c.Hash(), "%s != %s", h, c.Hash()) return c, true }
func TestEnsureHash(t *testing.T) { assert := assert.New(t) vs := NewTestValueStore() count := byte(1) mockGetRef := func(v Value) hash.Hash { d := hash.Digest{} d[0] = count count++ return hash.New(d) } testRef := func(r hash.Hash, expected byte) { d := r.Digest() assert.Equal(expected, d[0]) for i := 1; i < len(d); i++ { assert.Equal(byte(0), d[i]) } } getHashOverride = mockGetRef defer func() { getHashOverride = nil }() bl := newBlob(newBlobLeafSequence(nil, []byte("hi"))) cb := newBlob(newBlobMetaSequence([]metaTuple{{Ref{}, newOrderedKey(Number(2)), 2, bl}}, vs)) ll := newList(newListLeafSequence(nil, String("foo"))) lt := MakeListType(StringType) cl := newList(newIndexedMetaSequence([]metaTuple{{Ref{}, newOrderedKey(Number(1)), 1, ll}}, lt, vs)) newStringOrderedKey := func(s string) orderedKey { return newOrderedKey(String(s)) } ml := newMap(newMapLeafSequence(nil, mapEntry{String("foo"), String("bar")})) cm := newMap(newOrderedMetaSequence([]metaTuple{{Ref{}, newStringOrderedKey("foo"), 1, ml}}, MakeMapType(StringType, StringType), vs)) sl := newSet(newSetLeafSequence(nil, String("foo"))) cps := newSet(newOrderedMetaSequence([]metaTuple{{Ref{}, newStringOrderedKey("foo"), 1, sl}}, MakeSetType(StringType), vs)) count = byte(1) values := []Value{ newBlob(newBlobLeafSequence(nil, []byte{})), cb, newList(newListLeafSequence(nil, String("bar"))), cl, cm, newMap(newMapLeafSequence(nil)), cps, newSet(newSetLeafSequence(nil)), } for i := 0; i < 2; i++ { for j, v := range values { testRef(v.Hash(), byte(j+1)) } } for _, v := range values { expected := byte(0x42) assignHash(v.(hashCacher), hash.New(hash.Digest{0: expected})) testRef(v.Hash(), expected) } count = byte(1) values = []Value{ Bool(false), Number(0), String(""), } for i := 0; i < 2; i++ { for j, v := range values { testRef(v.Hash(), byte(i*len(values)+(j+1))) } } }
func (b *binaryNomsReader) readHash() hash.Hash { digest := hash.Sha1Digest{} copy(digest[:], b.buff[b.offset:b.offset+sha1.Size]) b.offset += sha1.Size return hash.New(digest) }
func (b *binaryNomsReader) readHash() hash.Hash { digest := hash.Digest{} copy(digest[:], b.buff[b.offset:b.offset+hash.ByteLen]) b.offset += hash.ByteLen return hash.New(digest) }