func newViewPrimaryIndex(v *viewIndexer, name string) (*primaryIndex, error) { ddoc := newPrimaryDDoc(name) doc := expression.NewIdentifier(v.keyspace.Name()) meta := expression.NewMeta(doc) mdid := expression.NewField(meta, expression.NewFieldName("id", false)) inst := primaryIndex{ viewIndex{ name: name, using: datastore.VIEW, on: datastore.IndexKey{mdid}, ddoc: ddoc, keyspace: v.keyspace, view: v, isPrimary: true, }, } err := inst.putDesignDoc() if err != nil { return nil, err } err = inst.WaitForIndex() if err != nil { return nil, err } return &inst, nil }
func (this *builder) buildScan(keyspace datastore.Keyspace, node *algebra.KeyspaceTerm, limit expression.Expression) ( secondary plan.Operator, primary *plan.PrimaryScan, err error) { var indexes, hintIndexes, otherIndexes []datastore.Index hints := node.Indexes() if hints != nil { indexes, err = allHints(keyspace, hints) hintIndexes = indexes } else { indexes, err = allIndexes(keyspace) otherIndexes = indexes } if err != nil { return } pred := this.where if pred != nil { dnf := NewDNF() pred = pred.Copy() pred, err = dnf.Map(pred) if err != nil { return } formalizer := expression.NewFormalizer() formalizer.Keyspace = node.Alias() primaryKey := expression.Expressions{ expression.NewField( expression.NewMeta(expression.NewConstant(node.Alias())), expression.NewFieldName("id", false)), } sargables, er := sargableIndexes(indexes, pred, primaryKey, dnf, formalizer) if er != nil { return nil, nil, er } minimals, er := minimalIndexes(sargables, pred) if er != nil { return nil, nil, er } if len(minimals) > 0 { secondary, err = this.buildSecondaryScan(minimals, node, limit) return secondary, nil, err } } primary, err = this.buildPrimaryScan(keyspace, node, limit, hintIndexes, otherIndexes) return nil, primary, err }
func (this *builder) buildJoinScan(keyspace datastore.Keyspace, node *algebra.KeyspaceTerm, op string) ( datastore.Index, expression.Covers, error) { indexes, err := allIndexes(keyspace) if err != nil { return nil, nil, err } var pred expression.Expression pred = expression.NewIsNotNull(node.Keys().Copy()) dnf := NewDNF() pred, err = dnf.Map(pred) if err != nil { return nil, nil, err } subset := pred if this.where != nil { subset = expression.NewAnd(subset, this.where.Copy()) subset, err = dnf.Map(subset) if err != nil { return nil, nil, err } } formalizer := expression.NewFormalizer() formalizer.Keyspace = node.Alias() primaryKey := expression.Expressions{ expression.NewField( expression.NewMeta(expression.NewConstant(node.Alias())), expression.NewFieldName("id", false)), } sargables, err := sargableIndexes(indexes, pred, subset, primaryKey, dnf, formalizer) if err != nil { return nil, nil, err } minimals, err := minimalIndexes(sargables, pred) if err != nil { return nil, nil, err } if len(minimals) == 0 { return nil, nil, errors.NewNoIndexJoinError(node.Alias(), op) } return this.buildCoveringJoinScan(minimals, node, op) }
func NewIndexNest(keyspace datastore.Keyspace, nest *algebra.IndexNest, index datastore.Index, covers expression.Covers) *IndexNest { rv := &IndexNest{ keyspace: keyspace, term: nest.Right(), outer: nest.Outer(), keyFor: nest.For(), index: index, covers: covers, } rv.idExpr = expression.NewField( expression.NewMeta(expression.NewIdentifier(rv.keyFor)), expression.NewFieldName("id", false)) return rv }
func (this *builder) buildCoveringScan(secondaries map[datastore.Index]*indexEntry, node *algebra.KeyspaceTerm, limit expression.Expression) (plan.Operator, error) { if this.cover == nil { return nil, nil } alias := node.Alias() exprs := this.cover.Expressions() id := expression.NewField( expression.NewMeta(expression.NewIdentifier(node.Alias())), expression.NewFieldName("id", false)) outer: for index, entry := range secondaries { keys := entry.keys if !index.IsPrimary() { // Matches execution.spanScan.RunOnce() keys = append(keys, id) } // Use the first available covering index for _, expr := range exprs { if !expr.CoveredBy(alias, keys) { continue outer } } covers := make(expression.Covers, 0, len(keys)) for _, key := range keys { covers = append(covers, expression.NewCover(key)) } scan := plan.NewIndexScan(index, node, entry.spans, false, limit, covers) this.coveringScan = scan if len(entry.spans) > 1 { // Use UnionScan to de-dup multiple spans return plan.NewUnionScan(scan), nil } return scan, nil } return nil, nil }
func loadViewIndexes(v *viewIndexer) ([]*datastore.Index, error) { b := v.keyspace rows, err := b.cbbucket.GetDDocsWithRetry() if err != nil { return nil, err } inames := make([]string, 0, len(rows.Rows)) nonUsableIndexes := make([]string, 0) for _, row := range rows.Rows { cdoc := row.DDoc id := cdoc.Meta["id"].(string) if strings.HasPrefix(id, "_design/ddl_") { iname := strings.TrimPrefix(id, "_design/ddl_") inames = append(inames, iname) } else if strings.HasPrefix(id, "_design/dev_") { // append this to the list of non-usuable indexes iname := strings.TrimPrefix(id, "_design/dev_") for _, name := range v.nonUsableIndexes { if iname == name { continue } } nonUsableIndexes = append(nonUsableIndexes, iname) } else if strings.HasPrefix(id, "_design/") { iname := strings.TrimPrefix(id, "_design/") for _, name := range v.nonUsableIndexes { if iname == name { continue } } nonUsableIndexes = append(nonUsableIndexes, iname) } } indexes := make([]*datastore.Index, 0, len(inames)) for _, iname := range inames { ddname := "ddl_" + iname jdoc, err := getDesignDoc(b, ddname) if err != nil { return nil, err } jview, ok := jdoc.Views[iname] if !ok { nonUsableIndexes = append(nonUsableIndexes, iname) logging.Errorf("Missing view for index %v ", iname) continue } exprlist := make([]expression.Expression, 0, len(jdoc.IndexOn)) for _, ser := range jdoc.IndexOn { if jdoc.PrimaryIndex == true { doc := expression.NewIdentifier(b.Name()) meta := expression.NewMeta(doc) mdid := expression.NewField(meta, expression.NewFieldName("id", false)) exprlist = append(exprlist, mdid) } else { expr, err := parser.Parse(ser) if err != nil { nonUsableIndexes = append(nonUsableIndexes, iname) logging.Errorf("Cannot unmarshal expression for index %v", iname) continue } exprlist = append(exprlist, expr) } } if len(exprlist) != len(jdoc.IndexOn) { continue } var conditionExpr expression.Expression if jdoc.Condition != "" { conditionExpr, err = parser.Parse(jdoc.Condition) if err != nil { logging.Errorf("Unable to parse condition expression. Err %v", err) continue } } ddoc := designdoc{ name: ddname, viewname: iname, mapfn: jview.Map, reducefn: jview.Reduce, } if ddoc.checksum() != jdoc.IndexChecksum { nonUsableIndexes = append(nonUsableIndexes, iname) logging.Errorf("Warning - checksum failed on index %v", iname) continue } var index datastore.Index if jdoc.PrimaryIndex == true { index = &primaryIndex{ viewIndex{ name: iname, keyspace: b, view: v, using: datastore.VIEW, ddoc: &ddoc, on: exprlist, where: conditionExpr, isPrimary: jdoc.PrimaryIndex, }, } } else { index = &viewIndex{ name: iname, keyspace: b, view: v, using: datastore.VIEW, ddoc: &ddoc, on: exprlist, where: conditionExpr, isPrimary: jdoc.PrimaryIndex, } } indexes = append(indexes, &index) } v.nonUsableIndexes = nonUsableIndexes if len(indexes) == 0 { return nil, nil } return indexes, nil }
func TestConverter(t *testing.T) { s1 := NewJSConverter().Visit( expression.NewLT(constant("a"), constant("b"))) s2 := "(\"a\" < \"b\")" if s1 != s2 { t.Errorf(" mismatch s1 %s s2 %s", s1, s2) } s1 = NewJSConverter().Visit( expression.NewBetween(constant("a"), constant("b"), constant("c"))) s2 = "(\"a\" > \"b\" && \"a\" < \"c\")" if s1 != s2 { t.Errorf(" mismatch s1 %s s2 %s", s1, s2) } s1 = NewJSConverter().Visit(expression.NewAdd( expression.NewSub(constant("a"), constant("b")), expression.NewDiv(constant("a"), constant("b")))) s2 = "((\"a\" - \"b\") + (\"a\" / \"b\"))" if s1 != s2 { t.Errorf(" mismatch s1 %s s2 %s", s1, s2) } s1 = NewJSConverter().Visit(expression.NewLength(constant("abc"))) s2 = "\"abc\".length" if s1 != s2 { t.Errorf(" mismatch s1 %s s2 %s", s1, s2) } s1 = NewJSConverter().Visit(expression.NewUpper(constant("abc"))) s2 = "\"abc\".toUpperCase()" if s1 != s2 { t.Errorf(" mismatch s1 %s s2 %s", s1, s2) } s1 = NewJSConverter().Visit(expression.NewStrToMillis(constant("Wed, 09 Aug 1995 00:00:00"))) s2 = "Date.parse(\"Wed, 09 Aug 1995 00:00:00\")" if s1 != s2 { t.Errorf(" mismatch s1 %s s2 %s", s1, s2) } s1 = NewJSConverter().Visit(expression.NewContains(constant("dfgabc"), constant("abc"))) s2 = "\"dfgabc\".indexOf(\"abc\")" if s1 != s2 { t.Errorf(" mismatch s1 %s s2 %s", s1, s2) } s1 = NewJSConverter().Visit(expression.NewSubstr(constant("dfgabc"), constant(1), constant(4))) s2 = "\"dfgabc\".substring(1,4)" if s1 != s2 { t.Errorf(" mismatch s1 %s s2 %s", s1, s2) } s1 = NewJSConverter().Visit(expression.NewAdd(expression.NewContains(constant("dfgabc"), constant("abc")), expression.NewSubstr(constant("dfgabc"), constant(1), constant(4)))) s2 = "(\"dfgabc\".indexOf(\"abc\") + \"dfgabc\".substring(1,4))" if s1 != s2 { t.Errorf(" mismatch s1 %s s2 %s", s1, s2) } doc := expression.NewIdentifier("bucket") m1 := expression.NewField(doc, expression.NewFieldName("id", false)) m2 := expression.NewField(doc, expression.NewFieldName("type", false)) s1 = NewJSConverter().Visit(expression.NewOr( expression.NewUpper(m1), expression.NewLower(m2))) s2 = "(`bucket`.`id`.toUpperCase() || `bucket`.`type`.toLowerCase())" if s1 != s2 { t.Errorf(" mismatch s1 %s s2 %s", s1, s2) } doc = expression.NewIdentifier("bucket") m1 = expression.NewField(doc, expression.NewFieldName("geo", false)) m2 = expression.NewField(m1, expression.NewFieldName("accuracy", false)) s1 = NewJSConverter().Visit(m2) s2 = "`bucket`.`geo`.`accuracy`" if s1 != s2 { t.Errorf(" mismatch s1 %s s2 %s", s1, s2) } doc = expression.NewIdentifier("bucket") m1 = expression.NewField(doc, expression.NewElement(expression.NewFieldName("address", false), constant(0))) s1 = NewJSConverter().Visit(m1) s2 = "`bucket`.`address`[0]" if s1 != s2 { t.Errorf(" mismatch s1 %s s2 %s", s1, s2) } s1 = NewJSConverter().Visit(expression.NewLength(expression.NewElement(doc, expression.NewFieldName("type", false)))) s2 = "`bucket`[`type`].length" if s1 != s2 { t.Errorf(" mismatch s1 %s s2 %s", s1, s2) } }
func (this *IndexNest) UnmarshalJSON(body []byte) error { var _unmarshalled struct { _ string `json:"#operator"` Names string `json:"namespace"` Keys string `json:"keyspace"` On string `json:"on_key"` Outer bool `json:"outer"` As string `json:"as"` For string `json:"for"` Scan struct { Index string `json:"index"` Using datastore.IndexType `json:"using"` Covers []string `json:"covers"` } `json:"scan"` } err := json.Unmarshal(body, &_unmarshalled) if err != nil { return err } var keys_expr expression.Expression if _unmarshalled.On != "" { keys_expr, err = parser.Parse(_unmarshalled.On) if err != nil { return err } } this.outer = _unmarshalled.Outer this.keyFor = _unmarshalled.For this.idExpr = expression.NewField( expression.NewMeta(expression.NewIdentifier(this.keyFor)), expression.NewFieldName("id", false)) this.term = algebra.NewKeyspaceTerm(_unmarshalled.Names, _unmarshalled.Keys, nil, _unmarshalled.As, keys_expr, nil) this.keyspace, err = datastore.GetKeyspace(_unmarshalled.Names, _unmarshalled.Keys) if err != nil { return err } indexer, err := this.keyspace.Indexer(_unmarshalled.Scan.Using) if err != nil { return err } this.index, err = indexer.IndexByName(_unmarshalled.Scan.Index) if err != nil { return err } if _unmarshalled.Scan.Covers != nil { this.covers = make(expression.Covers, len(_unmarshalled.Scan.Covers)) for i, c := range _unmarshalled.Scan.Covers { expr, err := parser.Parse(c) if err != nil { return err } this.covers[i] = expression.NewCover(expr) } } return nil }