// processPredicate updates the working graph clause if threre is an available // predcicate. func processPredicate(c *GraphClause, ce ConsumedElement, lastNopToken *lexer.Token) (*predicate.Predicate, string, string, error) { var ( nP *predicate.Predicate pID string pAnchorBinding string ) raw := ce.Token().Text p, err := predicate.Parse(raw) if err == nil { // A fully specified predicate was provided. nP = p return nP, pID, pAnchorBinding, nil } // The predicate may have a binding on the anchor. cmps := predicateRegexp.FindAllStringSubmatch(raw, 2) if len(cmps) != 1 || (len(cmps) == 1 && len(cmps[0]) != 3) { return nil, "", "", fmt.Errorf("failed to extract partialy defined predicate %q, got %v instead", raw, cmps) } id, ta := cmps[0][1], cmps[0][2] pID = id if ta != "" { pAnchorBinding = ta } return nil, pID, pAnchorBinding, nil }
// dataAccumulator creates a element hook that tracks fully formed triples and // adds them to the Statement when fully formed. func dataAccumulator(b literal.Builder) ElementHook { var ( hook ElementHook s *node.Node p *predicate.Predicate o *triple.Object ) hook = func(st *Statement, ce ConsumedElement) (ElementHook, error) { if ce.IsSymbol() { return hook, nil } tkn := ce.Token() if tkn.Type != lexer.ItemNode && tkn.Type != lexer.ItemPredicate && tkn.Type != lexer.ItemLiteral { return hook, nil } if s == nil { if tkn.Type != lexer.ItemNode { return nil, fmt.Errorf("hook.DataAccumulator requires a node to create a subject, got %v instead", tkn) } tmp, err := node.Parse(tkn.Text) if err != nil { return nil, err } s = tmp return hook, nil } if p == nil { if tkn.Type != lexer.ItemPredicate { return nil, fmt.Errorf("hook.DataAccumulator requires a predicate to create a predicate, got %v instead", tkn) } tmp, err := predicate.Parse(tkn.Text) if err != nil { return nil, err } p = tmp return hook, nil } if o == nil { tmp, err := triple.ParseObject(tkn.Text, b) if err != nil { return nil, err } o = tmp trpl, err := triple.NewTriple(s, p, o) if err != nil { return nil, err } st.AddData(trpl) s, p, o = nil, nil, nil return hook, nil } return nil, fmt.Errorf("hook.DataAccumulator has failed to flush the triple %s, %s, %s", s, p, o) } return hook }
// ToPredicate converts the node found by the lexer and converts it into a // BadWolf predicate. func ToPredicate(ce ConsumedElement) (*predicate.Predicate, error) { if ce.IsSymbol() { return nil, fmt.Errorf("semantic.ToPredicate cannot convert symbol %v to a predicate", ce) } tkn := ce.Token() if tkn.Type != lexer.ItemPredicate { return nil, fmt.Errorf("semantic.ToPredicate cannot convert token type %s to a predicate", tkn.Type) } return predicate.Parse(tkn.Text) }
func getTestData(t *testing.T) (*node.Node, *predicate.Predicate, *Object) { s, err := node.Parse("/some/type<some id>") if err != nil { t.Fatalf("Failed to create test node") } p, err := predicate.Parse("\"foo\"@[]") if err != nil { t.Fatalf("Failed to create test predicate") } o := NewNodeObject(s) return s, p, o }
// collectGlobalBounds collects the global time bounds that should be applied // to all temporal predicates. func collectGlobalBounds() ElementHook { var ( f func(st *Statement, ce ConsumedElement) (ElementHook, error) opToken *lexer.Token lastToken *lexer.Token ) f = func(st *Statement, ce ConsumedElement) (ElementHook, error) { if ce.IsSymbol() { return f, nil } tkn := ce.token switch tkn.Type { case lexer.ItemBefore, lexer.ItemAfter, lexer.ItemBetween: if lastToken != nil { return nil, fmt.Errorf("invalid token %v after already valid token %v", tkn, lastToken) } opToken, lastToken = tkn, tkn case lexer.ItemComma: if lastToken == nil || opToken.Type != lexer.ItemBetween { return nil, fmt.Errorf("token %v can only be used in a between clause; previous token %v instead", tkn, lastToken) } lastToken = tkn case lexer.ItemPredicate: if lastToken == nil { return nil, fmt.Errorf("invalid token %v without a global time modifier", tkn) } p, err := predicate.Parse(tkn.Text) if err != nil { return nil, err } if p.ID() != "" { return nil, fmt.Errorf("global time bounds do not accept individual predicate IDs; found %s instead", p) } ta, err := p.TimeAnchor() if err != nil { return nil, err } if lastToken.Type == lexer.ItemComma || lastToken.Type == lexer.ItemBefore { st.lookupOptions.UpperAnchor = ta opToken, lastToken = nil, nil } else { st.lookupOptions.LowerAnchor = ta if opToken.Type != lexer.ItemBetween { opToken, lastToken = nil, nil } } default: return nil, fmt.Errorf("global bound found unexpected token %v", tkn) } return f, nil } return f }
func TestTemporalBoundedLookupChecker(t *testing.T) { lpa, err := predicate.Parse("\"foo\"@[2013-07-19T13:12:04.669618843-07:00]") if err != nil { t.Fatalf("Failed to parse fixture predicate with error %v", err) } mpa, err := predicate.Parse("\"foo\"@[2014-07-19T13:12:04.669618843-07:00]") if err != nil { t.Fatalf("Failed to parse fixture predicate with error %v", err) } upa, err := predicate.Parse("\"foo\"@[2015-07-19T13:12:04.669618843-07:00]") if err != nil { t.Fatalf("Failed to parse fixture predicate with error %v", err) } // Check lower bound lb, _ := lpa.TimeAnchor() blu := &storage.LookupOptions{LowerAnchor: lb} clu := newChecker(blu) if !clu.CheckAndUpdate(mpa) { t.Errorf("Failed to reject invalid predicate %v by checker %v", mpa, clu) } lb, _ = mpa.TimeAnchor() blu = &storage.LookupOptions{LowerAnchor: lb} clu = newChecker(blu) if clu.CheckAndUpdate(lpa) { t.Errorf("Failed to reject invalid predicate %v by checker %v", mpa, clu) } // Check upper bound. ub, _ := upa.TimeAnchor() buu := &storage.LookupOptions{UpperAnchor: ub} cuu := newChecker(buu) if !cuu.CheckAndUpdate(mpa) { t.Errorf("Failed to reject invalid predicate %v by checker %v", mpa, cuu) } ub, _ = mpa.TimeAnchor() buu = &storage.LookupOptions{UpperAnchor: ub} cuu = newChecker(buu) if cuu.CheckAndUpdate(upa) { t.Errorf("Failed to reject invalid predicate %v by checker %v", mpa, cuu) } }
// ParseObject attempts to parse and object. func ParseObject(s string, b literal.Builder) (*Object, error) { n, err := node.Parse(s) if err != nil { l, err := b.Parse(s) if err != nil { o, err := predicate.Parse(s) if err != nil { return nil, err } return NewPredicateObject(o), nil } return NewLiteralObject(l), nil } return NewNodeObject(n), nil }
func testNodeTemporalPredicateLiteral(t *testing.T) (*node.Node, *predicate.Predicate, *literal.Literal) { n, err := node.Parse(`/foo<bar>`) if err != nil { t.Fatal(err) } p, err := predicate.Parse(`"bar"@[1975-01-01T00:01:01.999999999Z]`) if err != nil { t.Fatal(err) } l, err := literal.DefaultBuilder().Parse(`"true"^^type:bool`) if err != nil { t.Fatal(err) } return n, p, l }
func testNodePredicateLiteral(t *testing.T) (*node.Node, *predicate.Predicate, *literal.Literal) { n, err := node.Parse(`/foo<bar>`) if err != nil { t.Fatal(err) } p, err := predicate.Parse(`"foo"@[]`) if err != nil { t.Fatal(err) } l, err := literal.DefaultBuilder().Parse(`"true"^^type:bool`) if err != nil { t.Fatal(err) } return n, p, l }
// inferCell builds a Cell out of the provided string. func inferCell(s string) *table.Cell { if n, err := node.Parse(s); err == nil { return &table.Cell{N: n} } if p, err := predicate.Parse(s); err == nil { return &table.Cell{P: p} } if l, err := literal.DefaultBuilder().Parse(s); err == nil { return &table.Cell{L: l} } t, err := time.Parse(time.RFC3339Nano, s) if err == nil { return &table.Cell{T: &t} } return &table.Cell{S: table.CellString(s)} }
// Parse process the provided text and tries to create a triple. It assumes // that the provided text contains only one triple. func Parse(line string, b literal.Builder) (*Triple, error) { raw := strings.TrimSpace(line) idxp := pSplit.FindIndex([]byte(raw)) idxo := oSplit.FindIndex([]byte(raw)) if len(idxp) == 0 || len(idxo) == 0 { return nil, fmt.Errorf("triple.Parse could not split s p o out of %s", raw) } ss, sp, so := raw[0:idxp[0]+1], raw[idxp[1]-1:idxo[0]+1], raw[idxo[1]-1:] s, err := node.Parse(ss) if err != nil { return nil, fmt.Errorf("triple.Parse failed to parse subject %s with error %v", ss, err) } p, err := predicate.Parse(sp) if err != nil { return nil, fmt.Errorf("triple.Parse failed to parse predicate %s with error %v", sp, err) } o, err := ParseObject(so, b) if err != nil { return nil, fmt.Errorf("triple.Parse failed to parse object %s with error %v", so, err) } return New(s, p, o) }
func TestWhereObjectClauseHook(t *testing.T) { st := &Statement{} f := whereObjectClause() st.ResetWorkingGraphClause() node, err := node.Parse("/_<foo>") if err != nil { t.Fatalf("node.Parse failed with error %v", err) } n := triple.NewNodeObject(node) pred, err := predicate.Parse(`"foo"@[2015-07-19T13:12:04.669618843-07:00]`) if err != nil { t.Fatalf("predicate.Parse failed with error %v", err) } p := triple.NewPredicateObject(pred) tlb, err := time.Parse(time.RFC3339Nano, `2015-07-19T13:12:04.669618843-07:00`) if err != nil { t.Fatalf("time.Parse failed to parse valid lower time bound with error %v", err) } tub, err := time.Parse(time.RFC3339Nano, `2016-07-19T13:12:04.669618843-07:00`) if err != nil { t.Fatalf("time.Parse failed to parse valid upper time bound with error %v", err) } l, err := triple.ParseObject(`"1"^^type:int64`, literal.DefaultBuilder()) if err != nil { t.Fatalf("literal.Parse should have never fail to pars %s with error %v", `"1"^^type:int64`, err) } runTabulatedClauseHookTest(t, "semantic.whereObjectClause", f, []testClauseTable{ { valid: true, id: "node_example", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemNode, Text: "/_<foo>", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemType, Text: "type", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ O: n, OAlias: "?bar", OTypeAlias: "?bar2", OIDAlias: "?bar3", }, }, { valid: true, id: "binding_example", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?foo", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemType, Text: "type", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ OBinding: "?foo", OAlias: "?bar", OTypeAlias: "?bar2", OIDAlias: "?bar3", }, }, { valid: true, id: "valid predicate", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicate, Text: `"foo"@[2015-07-19T13:12:04.669618843-07:00]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ O: p, OAlias: "?bar", OIDAlias: "?bar2", OAnchorAlias: "?bar3", OTemporal: true, }, }, { valid: true, id: "valid predicate with binding", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicate, Text: `"foo"@[?foo]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ OID: "foo", OAnchorBinding: "?foo", OAlias: "?bar", OIDAlias: "?bar2", OAnchorAlias: "?bar3", OTemporal: true, }, }, { valid: true, id: "valid bound with bindings", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicateBound, Text: `"foo"@[?fooLower,?fooUpper]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ OID: "foo", OLowerBoundAlias: "?fooLower", OUpperBoundAlias: "?fooUpper", OAlias: "?bar", OIDAlias: "?bar2", OAnchorAlias: "?bar3", OTemporal: true, }, }, { valid: true, id: "valid bound with dates", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicateBound, Text: `"foo"@[2015-07-19T13:12:04.669618843-07:00,2016-07-19T13:12:04.669618843-07:00]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ OID: "foo", OLowerBound: &tlb, OUpperBound: &tub, OAlias: "?bar", OIDAlias: "?bar2", OAnchorAlias: "?bar3", OTemporal: true, }, }, { valid: false, id: "invalid bound with dates", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicateBound, Text: `"foo"@[2016-07-19T13:12:04.669618843-07:00,2015-07-19T13:12:04.669618843-07:00]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{}, }, { valid: true, id: "literal with alias", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemLiteral, Text: `"1"^^type:int64`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ O: l, OAlias: "?bar"}, }, }) }
func TestWherePredicateClauseHook(t *testing.T) { st := &Statement{} f := wherePredicateClause() st.ResetWorkingGraphClause() p, err := predicate.Parse(`"foo"@[2015-07-19T13:12:04.669618843-07:00]`) if err != nil { t.Fatalf("predicate.Parse failed with error %v", err) } tlb, err := time.Parse(time.RFC3339Nano, `2015-07-19T13:12:04.669618843-07:00`) if err != nil { t.Fatalf("time.Parse failed to parse valid lower time bound with error %v", err) } tub, err := time.Parse(time.RFC3339Nano, `2016-07-19T13:12:04.669618843-07:00`) if err != nil { t.Fatalf("time.Parse failed to parse valid upper time bound with error %v", err) } runTabulatedClauseHookTest(t, "semantic.wherePredicateClause", f, []testClauseTable{ { valid: true, id: "valid predicate", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicate, Text: `"foo"@[2015-07-19T13:12:04.669618843-07:00]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ P: p, PAlias: "?bar", PIDAlias: "?bar2", PAnchorAlias: "?bar3", PTemporal: true, }, }, { valid: true, id: "valid predicate with binding", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicate, Text: `"foo"@[?foo]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ PID: "foo", PAnchorBinding: "?foo", PAlias: "?bar", PIDAlias: "?bar2", PAnchorAlias: "?bar3", PTemporal: true, }, }, { valid: true, id: "valid bound with bindings", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicateBound, Text: `"foo"@[?fooLower,?fooUpper]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ PID: "foo", PLowerBoundAlias: "?fooLower", PUpperBoundAlias: "?fooUpper", PAlias: "?bar", PIDAlias: "?bar2", PAnchorAlias: "?bar3", PTemporal: true, }, }, { valid: true, id: "valid bound with dates", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicateBound, Text: `"foo"@[2015-07-19T13:12:04.669618843-07:00,2016-07-19T13:12:04.669618843-07:00]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ PID: "foo", PLowerBound: &tlb, PUpperBound: &tub, PAlias: "?bar", PIDAlias: "?bar2", PAnchorAlias: "?bar3", PTemporal: true, }, }, { valid: false, id: "invalid bound with dates", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicateBound, Text: `"foo"@[2016-07-19T13:12:04.669618843-07:00,2015-07-19T13:12:04.669618843-07:00]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{}, }, }) }
func TestWherePredicatClauseHook(t *testing.T) { st := &Statement{} f := wherePredicateClause() st.ResetWorkingGraphClause() p, err := predicate.Parse(`"foo"@[2015-07-19T13:12:04.669618843-07:00]`) if err != nil { t.Fatalf("predicate.Parse failed with error %v", err) } tlb, err := time.Parse(time.RFC3339Nano, `2015-07-19T13:12:04.669618843-07:00`) if err != nil { t.Fatalf("time.Parse failed to parse valid lower time bound with error %v", err) } tub, err := time.Parse(time.RFC3339Nano, `2016-07-19T13:12:04.669618843-07:00`) if err != nil { t.Fatalf("time.Parse failed to parse valid upper time bound with error %v", err) } table := []struct { valid bool id string ces []ConsumedElement want *GraphClause }{ { valid: true, id: "valid predicate", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicate, Text: `"foo"@[2015-07-19T13:12:04.669618843-07:00]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ p: p, pAlias: "?bar", pIDAlias: "?bar2", pAnchorAlias: "?bar3", }, }, { valid: true, id: "valid predicate with binding", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicate, Text: `"foo"@[?foo]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ pID: "foo", pAnchorBinding: "?foo", pAlias: "?bar", pIDAlias: "?bar2", pAnchorAlias: "?bar3", }, }, { valid: true, id: "valid bound with bindings", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicateBound, Text: `"foo"@[?fooLower,?fooUpper]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ pID: "foo", pLowerBoundAlias: "?fooLower", pUpperBoundAlias: "?fooUpper", pAlias: "?bar", pIDAlias: "?bar2", pAnchorAlias: "?bar3", }, }, { valid: true, id: "valid bound with dates", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicateBound, Text: `"foo"@[2015-07-19T13:12:04.669618843-07:00,2016-07-19T13:12:04.669618843-07:00]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{ pID: "foo", pLowerBound: &tlb, pUpperBound: &tub, pAlias: "?bar", pIDAlias: "?bar2", pAnchorAlias: "?bar3", }, }, { valid: false, id: "invalid bound with dates", ces: []ConsumedElement{ NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemPredicateBound, Text: `"foo"@[2016-07-19T13:12:04.669618843-07:00,2015-07-19T13:12:04.669618843-07:00]`, }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAs, Text: "as", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar", }), NewConsumedToken(&lexer.Token{ Type: lexer.ItemID, Text: "id", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar2", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemAt, Text: "at", }), NewConsumedSymbol("FOO"), NewConsumedToken(&lexer.Token{ Type: lexer.ItemBinding, Text: "?bar3", }), NewConsumedSymbol("FOO"), }, want: &GraphClause{}, }, } failed := false for _, entry := range table { for _, ce := range entry.ces { if _, err := f(st, ce); err != nil { if entry.valid { t.Errorf("semantic.wherePredicateClause case %q should have never failed with error: %v", entry.id, err) } else { failed = true } } } if entry.valid { if got, want := st.WorkingClause(), entry.want; !reflect.DeepEqual(got, want) { t.Errorf("semantic.wherePredicateClause case %q should have populated all subject fields; got %+v, want %+v", entry.id, got, want) } } else { if !failed { t.Errorf("semantic.wherePredicateClause failed to reject invalid case %q", entry.id) } } st.ResetWorkingGraphClause() } }