// Issue 40 (https://github.com/google/badwolf/issues/40) func TestDataAccessSimpleFetchIssue40(t *testing.T) { testBindings, ctx := []string{"?itme", "?t"}, context.Background() n, err := node.Parse("/room<Bedroom>") if err != nil { t.Fatalf("node.Parse failed to parse \"/room<Bedroom>\", %v", err) } cls := &semantic.GraphClause{ SBinding: "?item", PID: "in", PAnchorBinding: "?t", O: triple.NewNodeObject(n), } g, err := getTestStore(t, testTemporalTriples).Graph(ctx, "?test") if err != nil { t.Fatal(err) } tbl, err := simpleFetch(ctx, []storage.Graph{g}, cls, &storage.LookupOptions{}, 0) if err != nil { t.Errorf("simpleFetch failed with errorf %v", err) } if got, want := len(tbl.Bindings()), len(testBindings); got != want { t.Errorf("simpleFetch returned a table with wrong bindings set; got %v, want %v", got, want) } if got, want := tbl.NumRows(), 1; got != want { t.Errorf("simpleFetch returned the wrong number of rows; got %d, want %d\n%s", got, want, tbl) } for _, r := range tbl.Rows() { if got, want := len(r), len(testBindings); got != want { t.Errorf("simpleFetch returned row %v with the incorrect number of bindings; got %d, want %d", r, got, want) } } }
func TestDataAccessUnfeasibleSimpleExist(t *testing.T) { ctx := context.Background() g, err := getTestStore(t, testImmutatbleTriples).Graph(ctx, "?test") if err != nil { t.Fatal(err) } unknown, err := node.Parse("/unknown<unknown>") if err != nil { t.Fatal(err) } tt := getTestTriples(t, testImmutatbleTriples) s, p, o := unknown, tt[0].Predicate(), tt[0].Object() clsNotOK := &semantic.GraphClause{ S: s, P: p, O: o, } tplNotOK, err := triple.New(s, p, o) if err != nil { t.Fatal(err) } unfeasible, tbl, err := simpleExist(ctx, []storage.Graph{g}, clsNotOK, tplNotOK) if err != nil { t.Errorf("simpleExist should have not failed with error %v", err) } if !unfeasible { t.Error(errors.New("simpleExist should have return an unfeasible table instead")) } if got, want := tbl.NumRows(), 0; got != want { t.Errorf("simpleExist failed to return the right number of rows: got %d, want %d", got, want) } }
// dataAccumulator creates a element hook that tracks fully formed triples and // adds them to the Statement when fully formed. func dataAccumulator(b literal.Builder) ElementHook { var ( hook ElementHook s *node.Node p *predicate.Predicate o *triple.Object ) hook = func(st *Statement, ce ConsumedElement) (ElementHook, error) { if ce.IsSymbol() { return hook, nil } tkn := ce.Token() if tkn.Type != lexer.ItemNode && tkn.Type != lexer.ItemPredicate && tkn.Type != lexer.ItemLiteral { return hook, nil } if s == nil { if tkn.Type != lexer.ItemNode { return nil, fmt.Errorf("hook.DataAccumulator requires a node to create a subject, got %v instead", tkn) } tmp, err := node.Parse(tkn.Text) if err != nil { return nil, err } s = tmp return hook, nil } if p == nil { if tkn.Type != lexer.ItemPredicate { return nil, fmt.Errorf("hook.DataAccumulator requires a predicate to create a predicate, got %v instead", tkn) } tmp, err := predicate.Parse(tkn.Text) if err != nil { return nil, err } p = tmp return hook, nil } if o == nil { tmp, err := triple.ParseObject(tkn.Text, b) if err != nil { return nil, err } o = tmp trpl, err := triple.NewTriple(s, p, o) if err != nil { return nil, err } st.AddData(trpl) s, p, o = nil, nil, nil return hook, nil } return nil, fmt.Errorf("hook.DataAccumulator has failed to flush the triple %s, %s, %s", s, p, o) } return hook }
// ToNode converts the node found by the lexer and converts it into a BadWolf // node. func ToNode(ce ConsumedElement) (*node.Node, error) { if ce.IsSymbol() { return nil, fmt.Errorf("semantic.ToNode cannot convert symbol %v to a node", ce) } tkn := ce.Token() if tkn.Type != lexer.ItemNode { return nil, fmt.Errorf("semantic.ToNode cannot convert token type %s to a node", tkn.Type) } return node.Parse(tkn.Text) }
func getTestData(t *testing.T) (*node.Node, *predicate.Predicate, *Object) { s, err := node.Parse("/some/type<some id>") if err != nil { t.Fatalf("Failed to create test node") } p, err := predicate.Parse("\"foo\"@[]") if err != nil { t.Fatalf("Failed to create test predicate") } o := NewNodeObject(s) return s, p, o }
// ParseObject attempts to parse and object. func ParseObject(s string, b literal.Builder) (*Object, error) { n, err := node.Parse(s) if err != nil { l, err := b.Parse(s) if err != nil { o, err := predicate.Parse(s) if err != nil { return nil, err } return NewPredicateObject(o), nil } return NewLiteralObject(l), nil } return NewNodeObject(n), nil }
func testNodeTemporalPredicateLiteral(t *testing.T) (*node.Node, *predicate.Predicate, *literal.Literal) { n, err := node.Parse(`/foo<bar>`) if err != nil { t.Fatal(err) } p, err := predicate.Parse(`"bar"@[1975-01-01T00:01:01.999999999Z]`) if err != nil { t.Fatal(err) } l, err := literal.DefaultBuilder().Parse(`"true"^^type:bool`) if err != nil { t.Fatal(err) } return n, p, l }
func testNodePredicateLiteral(t *testing.T) (*node.Node, *predicate.Predicate, *literal.Literal) { n, err := node.Parse(`/foo<bar>`) if err != nil { t.Fatal(err) } p, err := predicate.Parse(`"foo"@[]`) if err != nil { t.Fatal(err) } l, err := literal.DefaultBuilder().Parse(`"true"^^type:bool`) if err != nil { t.Fatal(err) } return n, p, l }
// inferCell builds a Cell out of the provided string. func inferCell(s string) *table.Cell { if n, err := node.Parse(s); err == nil { return &table.Cell{N: n} } if p, err := predicate.Parse(s); err == nil { return &table.Cell{P: p} } if l, err := literal.DefaultBuilder().Parse(s); err == nil { return &table.Cell{L: l} } t, err := time.Parse(time.RFC3339Nano, s) if err == nil { return &table.Cell{T: &t} } return &table.Cell{S: table.CellString(s)} }
// Parse process the provided text and tries to create a triple. It assumes // that the provided text contains only one triple. func Parse(line string, b literal.Builder) (*Triple, error) { raw := strings.TrimSpace(line) idxp := pSplit.FindIndex([]byte(raw)) idxo := oSplit.FindIndex([]byte(raw)) if len(idxp) == 0 || len(idxo) == 0 { return nil, fmt.Errorf("triple.Parse could not split s p o out of %s", raw) } ss, sp, so := raw[0:idxp[0]+1], raw[idxp[1]-1:idxo[0]+1], raw[idxo[1]-1:] s, err := node.Parse(ss) if err != nil { return nil, fmt.Errorf("triple.Parse failed to parse subject %s with error %v", ss, err) } p, err := predicate.Parse(sp) if err != nil { return nil, fmt.Errorf("triple.Parse failed to parse predicate %s with error %v", sp, err) } o, err := ParseObject(so, b) if err != nil { return nil, fmt.Errorf("triple.Parse failed to parse object %s with error %v", so, err) } return New(s, p, o) }
func TestWhereObjectClauseHook(t *testing.T) { st := &Statement{} f := whereObjectClause() st.ResetWorkingGraphClause() node, err := node.Parse("/_<foo>") if err != nil { t.Fatalf("node.Parse failed with error %v", err) } n := triple.NewNodeObject(node) pred, err := predicate.Parse(`"foo"@[2015-07-19T13:12:04.669618843-07:00]`) if err != nil { t.Fatalf("predicate.Parse failed with error %v", err) } p := triple.NewPredicateObject(pred) tlb, err := time.Parse(time.RFC3339Nano, `2015-07-19T13:12:04.669618843-07:00`) if err != nil { t.Fatalf("time.Parse failed to parse valid lower time bound with error %v", err) } tub, err := time.Parse(time.RFC3339Nano, `2016-07-19T13:12:04.669618843-07:00`) if err != nil { t.Fatalf("time.Parse failed to parse valid upper time bound with error %v", err) } l, err := triple.ParseObject(`"1"^^type:int64`, literal.DefaultBuilder()) if err != nil { t.Fatalf("literal.Parse should have never fail to pars %s with error %v", `"1"^^type:int64`, err) } runTabulatedClauseHookTest(t, "semantic.whereObjectClause", f, []testClauseTable{ { valid: true, id: "node_example", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemNode, Text: "/_<foo>", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemType, Text: "type", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ O: n, OAlias: "?bar", OTypeAlias: "?bar2", OIDAlias: "?bar3", }, }, { valid: true, id: "binding_example", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?foo", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemType, Text: "type", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ OBinding: "?foo", OAlias: "?bar", OTypeAlias: "?bar2", OIDAlias: "?bar3", }, }, { valid: true, id: "valid predicate", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicate, Text: `"foo"@[2015-07-19T13:12:04.669618843-07:00]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ O: p, OAlias: "?bar", OIDAlias: "?bar2", OAnchorAlias: "?bar3", OTemporal: true, }, }, { valid: true, id: "valid predicate with binding", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicate, Text: `"foo"@[?foo]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ OID: "foo", OAnchorBinding: "?foo", OAlias: "?bar", OIDAlias: "?bar2", OAnchorAlias: "?bar3", OTemporal: true, }, }, { valid: true, id: "valid bound with bindings", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicateBound, Text: `"foo"@[?fooLower,?fooUpper]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ OID: "foo", OLowerBoundAlias: "?fooLower", OUpperBoundAlias: "?fooUpper", OAlias: "?bar", OIDAlias: "?bar2", OAnchorAlias: "?bar3", OTemporal: true, }, }, { valid: true, id: "valid bound with dates", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicateBound, Text: `"foo"@[2015-07-19T13:12:04.669618843-07:00,2016-07-19T13:12:04.669618843-07:00]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ OID: "foo", OLowerBound: &tlb, OUpperBound: &tub, OAlias: "?bar", OIDAlias: "?bar2", OAnchorAlias: "?bar3", OTemporal: true, }, }, { valid: false, id: "invalid bound with dates", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicateBound, Text: `"foo"@[2016-07-19T13:12:04.669618843-07:00,2015-07-19T13:12:04.669618843-07:00]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{}, }, { valid: true, id: "literal with alias", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemLiteral, Text: `"1"^^type:int64`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ O: l, OAlias: "?bar"}, }, }) }
func TestWhereSubjectClauseHook(t *testing.T) { st := &Statement{} f := whereSubjectClause() st.ResetWorkingGraphClause() n, err := node.Parse("/_<foo>") if err != nil { t.Fatalf("node.Parse failed with error %v", err) } runTabulatedClauseHookTest(t, "semantic.whereSubjectClause", f, []testClauseTable{ { valid: true, id: "node_example", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemNode, Text: "/_<foo>", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemType, Text: "type", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ S: n, SAlias: "?bar", STypeAlias: "?bar2", SIDAlias: "?bar3", }, }, { valid: true, id: "binding_example", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?foo", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemType, Text: "type", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ SBinding: "?foo", SAlias: "?bar", STypeAlias: "?bar2", SIDAlias: "?bar3", }, }, }) }
func TestWhereSubjectClauseHook(t *testing.T) { st := &Statement{} f := whereSubjectClause() st.ResetWorkingGraphClause() n, err := node.Parse("/_<foo>") if err != nil { t.Fatalf("node.Parse failed with error %v", err) } table := []struct { ces []ConsumedElement want *GraphClause }{ { ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemNode, Text: "/_<foo>", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemType, Text: "type", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ s: n, sAlias: "?bar", sTypeAlias: "?bar2", sIDAlias: "?bar3", }, }, { ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?foo", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemType, Text: "type", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ sBinding: "?foo", sAlias: "?bar", sTypeAlias: "?bar2", sIDAlias: "?bar3", }, }, } for _, entry := range table { for _, ce := range entry.ces { if _, err := f(st, ce); err != nil { t.Errorf("semantic.whereSubjectClause should have never failed with error: %v", err) } } if got, want := st.WorkingClause(), entry.want; !reflect.DeepEqual(got, want) { t.Errorf("smeantic.whereSubjectClause should have populated all subject fields; got %+v, want %+v", got, want) } st.ResetWorkingGraphClause() } }