func TestPathParseErrors(t *testing.T) { assert := assert.New(t) test := func(str, expectError string) { p, err := ParsePath(str) assert.Equal(Path{}, p) if err != nil { assert.Equal(expectError, err.Error()) } else { assert.Fail("Expected " + expectError) } } test("", "Empty path") test(".", "Invalid field: ") test("[", "Path ends in [") test("]", "] is missing opening [") test(".#", "Invalid field: #") test(". ", "Invalid field: ") test(". invalid.field", "Invalid field: invalid.field") test(".foo.", "Invalid field: ") test(".foo.#invalid.field", "Invalid field: #invalid.field") test(".foo!", "Invalid operator: !") test(".foo!bar", "Invalid operator: !") test(".foo#", "Invalid operator: #") test(".foo#bar", "Invalid operator: #") test(".foo[", "Path ends in [") test(".foo[.bar", "[ is missing closing ]") test(".foo]", "] is missing opening [") test(".foo].bar", "] is missing opening [") test(".foo[]", "Empty index value") test(".foo[[]", "Invalid index: [") test(".foo[[]]", "Invalid index: [") test(".foo[42.1.2]", "Invalid index: 42.1.2") test(".foo[1f4]", "Invalid index: 1f4") test(".foo[hello]", "Invalid index: hello") test(".foo['hello']", "Invalid index: 'hello'") test(`.foo[\]`, `Invalid index: \`) test(`.foo[\\]`, `Invalid index: \\`) test(`.foo["hello]`, "[ is missing closing ]") test(`.foo["hello`, "[ is missing closing ]") test(`.foo["`, "[ is missing closing ]") test(`.foo["\`, "[ is missing closing ]") test(`.foo["]`, "[ is missing closing ]") test(".foo[#]", "Invalid hash: ") test(".foo[#invalid]", "Invalid hash: invalid") test(`.foo["hello\nworld"]`, `Only " and \ can be escaped`) test(".foo[42]bar", "Invalid operator: b") test("#foo", "Invalid operator: #") test("!foo", "Invalid operator: !") test("@foo", "Invalid operator: @") test("@key", "Invalid operator: @") test(fmt.Sprintf(".foo[#%s]@soup", hash.FromData([]byte{42}).String()), "Unsupported annotation: @soup") }
func hashFromString(s string) hash.Hash { return hash.FromData([]byte(s)) }
func encodeForOID(t *Type, buf nomsWriter, allowUnresolvedCycles bool, root *Type, parentStructTypes []*Type) { // Most types are encoded in a straightforward fashion switch desc := t.Desc.(type) { case CycleDesc: if allowUnresolvedCycles { buf.writeUint8(uint8(desc.Kind())) buf.writeUint32(uint32(desc)) } else { panic("found an unexpected unresolved cycle") } case PrimitiveDesc: buf.writeUint8(uint8(desc.Kind())) case CompoundDesc: switch k := desc.Kind(); k { case ListKind, MapKind, RefKind, SetKind: buf.writeUint8(uint8(k)) buf.writeUint32(uint32(len(desc.ElemTypes))) for _, tt := range desc.ElemTypes { encodeForOID(tt, buf, allowUnresolvedCycles, root, parentStructTypes) } case UnionKind: buf.writeUint8(uint8(k)) if t == root { // If this is where we started we don't need to keep going return } buf.writeUint32(uint32(len(desc.ElemTypes))) // This is the only subtle case: encode each subordinate type, generate the hash, remove duplicates, and xor the results together to form an order indepedant encoding. mbuf := newBinaryNomsWriter() oids := make(map[hash.Hash]struct{}) for _, tt := range desc.ElemTypes { mbuf.reset() encodeForOID(tt, mbuf, allowUnresolvedCycles, root, parentStructTypes) oids[hash.FromData(mbuf.data())] = struct{}{} } data := make([]byte, hash.ByteLen) for o, _ := range oids { digest := o.Digest() for i := 0; i < len(data); i++ { data[i] ^= digest[i] } } buf.writeBytes(data) default: panic("unknown compound type") } case StructDesc: idx, found := indexOfType(t, parentStructTypes) if found { buf.writeUint8(uint8(CycleKind)) buf.writeUint32(uint32(len(parentStructTypes)) - 1 - idx) return } buf.writeUint8(uint8(StructKind)) buf.writeString(desc.Name) parentStructTypes = append(parentStructTypes, t) for _, field := range desc.fields { buf.writeString(field.name) encodeForOID(field.t, buf, allowUnresolvedCycles, root, parentStructTypes) } } }
func generateOID(t *Type, allowUnresolvedCycles bool) { buf := newBinaryNomsWriter() encodeForOID(t, buf, allowUnresolvedCycles, t, nil) oid := hash.FromData(buf.data()) t.oid = &oid }
// NewChunk creates a new Chunk backed by data. This means that the returned Chunk has ownership of this slice of memory. func NewChunk(data []byte) Chunk { r := hash.FromData(data) return Chunk{r, data} }