// A scanner to read from data source func NewSourceJoin(leftFrom, rightFrom *expr.SqlSource, conf *RuntimeConfig) (*SourceJoin, error) { m := &SourceJoin{ TaskBase: NewTaskBase("SourceJoin"), } m.TaskBase.TaskType = m.Type() m.leftStmt = leftFrom m.rightStmt = rightFrom source := conf.Conn(leftFrom.Name) u.Debugf("source: %T", source) // Must provider either Scanner, and or Seeker interfaces if scanner, ok := source.(datasource.Scanner); !ok { u.Errorf("Could not create scanner for %v %T %#v", leftFrom.Name, source, source) return nil, fmt.Errorf("Must Implement Scanner") } else { m.leftSource = scanner } source2 := conf.Conn(rightFrom.Name) u.Debugf("source right: %T", source2) // Must provider either Scanner, and or Seeker interfaces if scanner, ok := source2.(datasource.Scanner); !ok { u.Errorf("Could not create scanner for %v %T %#v", leftFrom.Name, source2, source2) return nil, fmt.Errorf("Must Implement Scanner") } else { m.rightSource = scanner } return m, nil }
func TestExecHaving(t *testing.T) { sqlText := ` select user_id, count(user_id) AS order_ct FROM orders GROUP BY user_id HAVING order_ct > 1 ` ctx := td.TestContext(sqlText) job, err := exec.BuildSqlJob(ctx) assert.Tf(t, err == nil, "no error %v", err) msgs := make([]schema.Message, 0) resultWriter := exec.NewResultBuffer(ctx, &msgs) job.RootTask.Add(resultWriter) err = job.Setup() assert.T(t, err == nil) err = job.Run() time.Sleep(time.Millisecond * 10) assert.Tf(t, err == nil, "no error %v", err) assert.Tf(t, len(msgs) == 1, "should have filtered HAVING orders into 1 users %v", len(msgs)) u.Debugf("msg: %#v", msgs[0]) row := msgs[0].(*datasource.SqlDriverMessageMap).Values() u.Debugf("row: %#v", row) assert.Tf(t, len(row) == 2, "expects 2 cols but got %v", len(row)) assert.T(t, row[0] == "9Ip1aKbeZe2njCDM") // I really don't like this float behavior? assert.Tf(t, int(row[1].(int64)) == 2, "expected 2 orders for %v", row) }
func (m *Upsert) Run(ctx *expr.Context) error { defer ctx.Recover() defer close(m.msgOutCh) var err error var affectedCt int64 switch { case m.insert != nil: //u.Debugf("Insert.Run(): %v %#v", len(m.insert.Rows), m.insert) affectedCt, err = m.insertRows(ctx, m.insert.Rows) case m.upsert != nil && len(m.upsert.Rows) > 0: u.Debugf("Upsert.Run(): %v %#v", len(m.upsert.Rows), m.upsert) affectedCt, err = m.insertRows(ctx, m.upsert.Rows) case m.update != nil: u.Debugf("Update.Run() %s", m.update.String()) affectedCt, err = m.updateValues(ctx) default: u.Warnf("unknown mutation op? %v", m) } if err != nil { return err } vals := make([]driver.Value, 2) vals[0] = int64(0) // status? vals[1] = affectedCt m.msgOutCh <- &datasource.SqlDriverMessage{vals, 1} return nil }
// Get connection for given Database // // @db database name // func (m *RuntimeConfig) Conn(db string) SourceConn { if m.connInfo == "" { u.Debugf("RuntimeConfig.Conn(db='%v') // connInfo='%v'", db, m.connInfo) if source := m.Sources.Get(strings.ToLower(db)); source != nil { u.Debugf("found source: db=%s %T", db, source) conn, err := source.Open(db) if err != nil { u.Errorf("could not open data source: %v %v", db, err) return nil } //u.Infof("source: %T %#v", conn, conn) return conn } else { u.Errorf("DataSource(%s) was not found", db) } } else { u.Debugf("No Conn? RuntimeConfig.Conn(db='%v') // connInfo='%v'", db, m.connInfo) // We have connection info, likely sq/driver source := m.DataSource(m.connInfo) //u.Infof("source=%v about to call Conn() db='%v'", source, db) conn, err := source.Open(db) if err != nil { u.Errorf("could not open data source: %v %v", db, err) return nil } return conn } return nil }
func TestExecGroupBy(t *testing.T) { // TODO: this test is bad, it occasionally fails sqlText := ` select user_id, count(user_id), avg(price) FROM orders GROUP BY user_id ` ctx := td.TestContext(sqlText) job, err := exec.BuildSqlJob(ctx) assert.Tf(t, err == nil, "no error %v", err) msgs := make([]schema.Message, 0) resultWriter := exec.NewResultBuffer(ctx, &msgs) job.RootTask.Add(resultWriter) err = job.Setup() assert.T(t, err == nil) err = job.Run() time.Sleep(time.Millisecond * 10) assert.Tf(t, err == nil, "no error %v", err) assert.Tf(t, len(msgs) == 2, "should have grouped orders into 2 users %v", len(msgs)) u.Debugf("msg: %#v", msgs[0]) row := msgs[0].(*datasource.SqlDriverMessageMap).Values() u.Debugf("row: %#v", row) assert.Tf(t, len(row) == 3, "expects 3 cols but got %v", len(row)) assert.T(t, row[0] == "9Ip1aKbeZe2njCDM", "%#v", row) // I really don't like this float behavior? assert.Tf(t, int(row[1].(int64)) == 2, "expected 2 orders for %v", row) assert.Tf(t, int(row[2].(float64)) == 30, "expected avg=30 for price %v", row) sqlText = ` select avg(len(email)) FROM users GROUP BY "-" ` ctx = td.TestContext(sqlText) job, err = exec.BuildSqlJob(ctx) assert.Tf(t, err == nil, "no error %v", err) msgs = make([]schema.Message, 0) resultWriter = exec.NewResultBuffer(ctx, &msgs) job.RootTask.Add(resultWriter) err = job.Setup() assert.T(t, err == nil) err = job.Run() time.Sleep(time.Millisecond * 10) assert.Tf(t, err == nil, "no error %v", err) assert.Tf(t, len(msgs) == 1, "should have grouped orders into 1 record %v", len(msgs)) u.Debugf("msg: %#v", msgs[0]) row = msgs[0].(*datasource.SqlDriverMessageMap).Values() u.Debugf("row: %#v", row) assert.Tf(t, len(row) == 1, "expects 1 cols but got %v", len(row)) assert.Tf(t, int(row[0].(float64)) == 13, "expected avg(len(email))=15 for %v", int(row[0].(float64))) }
// MultiNode evaluator // // A IN (b,c,d) // func walkMulti(ctx expr.EvalContext, node *expr.MultiArgNode) (value.Value, bool) { a, aok := Eval(ctx, node.Args[0]) //u.Debugf("multi: %T:%v %v", a, a, node.Operator) if !aok || a == nil || a.Type() == value.NilType { // this is expected, most likely to missing data to operate on //u.Debugf("Could not evaluate args, %#v", node.Args[0]) return value.BoolValueFalse, false } if node.Operator.T != lex.TokenIN { u.Warnf("walk multiarg not implemented for node type %#v", node) return value.NilValueVal, false } // Support `"literal" IN identity` if len(node.Args) == 2 && node.Args[1].NodeType() == expr.IdentityNodeType { ident := node.Args[1].(*expr.IdentityNode) mval, ok := walkIdentity(ctx, ident) if !ok { // Failed to lookup ident return value.BoolValueFalse, true } sval, ok := mval.(value.Slice) if !ok { u.Debugf("expected slice but received %T", mval) return value.BoolValueFalse, false } for _, val := range sval.SliceValue() { match, err := value.Equal(val, a) if err != nil { // Couldn't compare values u.Debugf("IN: couldn't compare %s and %s", val, a) continue } if match { return value.BoolValueTrue, true } } // No match, return false return value.BoolValueFalse, true } for i := 1; i < len(node.Args); i++ { v, ok := Eval(ctx, node.Args[i]) if ok && v != nil { //u.Debugf("in? %v %v", a, v) if eq, err := value.Equal(a, v); eq && err == nil { return value.NewBoolValue(true), true } } else { //u.Debugf("could not evaluate arg: %v", node.Args[i]) } } return value.BoolValueFalse, true }
func (m *Upsert) updateValues(ctx *expr.Context) (int64, error) { select { case <-m.SigChan(): return 0, nil default: // fall through } valmap := make(map[string]driver.Value, len(m.update.Values)) for key, valcol := range m.update.Values { //u.Debugf("key:%v val:%v", key, valcol) // TODO: #13 Need a way of expressing which layer (here, db) this expr should run in? // - ie, run in backend datasource? or here? translate the expr to native language if valcol.Expr != nil { exprVal, ok := vm.Eval(nil, valcol.Expr) if !ok { u.Errorf("Could not evaluate: %s", valcol.Expr) return 0, fmt.Errorf("Could not evaluate expression: %v", valcol.Expr) } valmap[key] = exprVal.Value() } else { u.Debugf("%T %v", valcol.Value.Value(), valcol.Value.Value()) valmap[key] = valcol.Value.Value() } //u.Debugf("key:%v col: %v vals:%v", key, valcol, valmap[key]) } // if our backend source supports Where-Patches, ie update multiple dbpatch, ok := m.db.(datasource.PatchWhere) if ok { updated, err := dbpatch.PatchWhere(ctx, m.update.Where, valmap) u.Infof("patch: %v %v", updated, err) if err != nil { return updated, err } return updated, nil } // TODO: If it does not implement Where Patch then we need to do a poly fill // Do we have to recognize if the Where is on a primary key? // - for sources/queries that can't do partial updates we need to do a read first //u.Infof("does not implement PatchWhere") // Create a key from Where key := datasource.KeyFromWhere(m.update.Where) //u.Infof("key: %v", key) if _, err := m.db.Put(ctx, key, valmap); err != nil { u.Errorf("Could not put values: %v", err) return 0, err } u.Debugf("returning 1") return 1, nil }
func parseFilterQlTest(t *testing.T, ql string) { u.Debugf("before: %s", ql) req, err := ParseFilterQL(ql) //u.Debugf("parse filter %#v %s", req, ql) assert.Tf(t, err == nil && req != nil, "Must parse: %s \n\t%v", ql, err) req2, err := ParseFilterQL(req.String()) assert.Tf(t, err == nil, "must parse roundtrip %v", err) req.Raw = "" req2.Raw = "" u.Debugf("after: %s", req2.String()) assert.Equal(t, req, req2, "must roundtrip") }
func encodeExpected( t *testing.T, label string, val interface{}, wantStr string, wantErr error, ) { var buf bytes.Buffer enc := NewEncoder(&buf) err := enc.Encode(val) if err != wantErr { if wantErr != nil { if wantErr == errAnything && err != nil { return } t.Errorf("%s: want Encode error %v, got %v", label, wantErr, err) } else { t.Errorf("%s: Encode failed: %s", label, err) } } if err != nil { return } if got := buf.String(); wantStr != got { u.Debugf("\n\n%s wanted: \n%s\ngot: \n%s", label, wantStr, got) for pos, r := range wantStr { if len(got)-1 <= pos { u.Warnf("len mismatch? %v vs %v", len(got), len(wantStr)) } else if r != rune(got[pos]) { u.Warnf("mismatch at position: %v %s!=%s", pos, string(r), string(got[pos])) break } } t.Fatalf("%s: want\n-----\n%q\n-----\nbut got\n-----\n%q\n-----\n", label, wantStr, got) } }
func walkUnary(ctx expr.EvalContext, node *expr.UnaryNode) (value.Value, bool) { a, ok := Eval(ctx, node.Arg) if !ok { if node.Operator.T == lex.TokenExists { return value.NewBoolValue(false), true } u.Debugf("urnary could not evaluate %#v", node) return a, false } switch node.Operator.T { case lex.TokenNegate: switch argVal := a.(type) { case value.BoolValue: //u.Infof("found urnary bool: res=%v expr=%v", !argVal.v, node.StringAST()) return value.NewBoolValue(!argVal.Val()), true default: //u.Errorf("urnary type not implementedUnknonwn node type: %T", argVal) panic(ErrUnknownNodeType) } case lex.TokenMinus: if an, aok := a.(value.NumericValue); aok { return value.NewNumberValue(-an.Float()), true } case lex.TokenExists: return value.NewBoolValue(true), true default: u.Warnf("urnary not implemented for type %s %#v", node.Operator.T.String(), node) } return value.NewNilValue(), false }
// given connection info, get datasource // @connInfo = csv:///dev/stdin // mockcsv func (m *RuntimeConfig) DataSource(connInfo string) DataSource { // if mysql.tablename allow that convention //u.Debugf("get datasource: conn=%v ", connInfo) //parts := strings.SplitN(from, ".", 2) sourceType := "" if len(connInfo) > 0 { switch { // case strings.HasPrefix(name, "file://"): // name = name[len("file://"):] case strings.HasPrefix(connInfo, "csv://"): sourceType = "csv" m.db = connInfo[len("csv://"):] case strings.Contains(connInfo, "://"): strIdx := strings.Index(connInfo, "://") sourceType = connInfo[0:strIdx] m.db = connInfo[strIdx+3:] default: sourceType = connInfo } } sourceType = strings.ToLower(sourceType) //u.Debugf("source: %v", sourceType) if source := m.Sources.Get(sourceType); source != nil { u.Debugf("source: %T", source) return source } else { u.Errorf("DataSource(conn) was not found: '%v'", sourceType) } return nil }
func TestNumberParse(t *testing.T) { for _, test := range numberTests { n, err := expr.NewNumber(0, test.text) ok := test.isInt || test.isFloat if ok && err != nil { t.Errorf("unexpected error for %q: %s", test.text, err) continue } if !ok && err == nil { t.Errorf("expected error for %q", test.text) continue } if !ok { if *VerboseTests { u.Debugf("%s\n\t%s", test.text, err) } continue } if test.isInt && !n.IsInt { t.Errorf("did not expect unsigned integer for %q", test.text) } if test.isFloat { if !n.IsFloat { t.Errorf("expected float for %q", test.text) } if n.Float64 != test.float64 { t.Errorf("float64 for %q should be %g Is %g", test.text, test.float64, n.Float64) } } else if n.IsFloat { t.Errorf("did not expect float for %q", test.text) } } }
// Eval applies a sql statement to the specified context // // @writeContext = Write out results of projection // @readContext = Message to evaluate does it match where clause? if so proceed to projection // func EvalSql(sel *rel.SqlSelect, writeContext expr.ContextWriter, readContext expr.ContextReader) (bool, error) { // Check and see if we are where Guarded, which would discard the entire message if sel.Where != nil { whereValue, ok := Eval(readContext, sel.Where.Expr) if !ok { // TODO: seriously re-think this. If the where clause is not able to evaluate // such as WHERE contains(ip,"10.120.") due to missing IP, does that mean it is // logically true? Would we not need to correctly evaluate and = true to filter? // Marek made a good point, they would need to expand logical statement to include OR return false, nil } switch whereVal := whereValue.(type) { case value.BoolValue: if whereVal.Val() == false { return false, nil } default: if whereVal.Nil() { return false, nil } } } //u.Infof("colct=%v sql=%v", len(sel.Columns), sel.String()) for _, col := range sel.Columns { //u.Debugf("Eval Col.As:%v mt:%v %#v Has IF Guard?%v ", col.As, col.MergeOp.String(), col, col.Guard != nil) if col.Guard != nil { ifColValue, ok := Eval(readContext, col.Guard) if !ok { u.Debugf("Could not evaluate if: T:%T v:%v", col.Guard, col.Guard.String()) continue } switch ifVal := ifColValue.(type) { case value.BoolValue: if ifVal.Val() == false { continue // filter out this col } default: if ifColValue.Nil() { continue // filter out this col } } } v, ok := Eval(readContext, col.Expr) if !ok { u.Warnf("Could not evaluate %s", col.Expr) } else { //u.Debugf(`writeContext.Put("%v",%v) %s`, col.As, v.Value(), col.String()) writeContext.Put(col, readContext, v) } } return true, nil }
func (m *JobBuilder) VisitJoin(from *expr.SqlSource) (expr.Task, error) { u.Debugf("VisitJoin %s", from.Source) //u.Debugf("from.Name:'%v' : %v", from.Name, from.Source.String()) source := m.schema.Conn(from.SourceName()) //u.Debugf("left source: %T", source) // Must provider either Scanner, SourcePlanner, Seeker interfaces if sourcePlan, ok := source.(datasource.SourcePlanner); ok { // This is flawed, visitor pattern would have you pass in a object which implements interface // but is one of many different objects that implement that interface so that the // Accept() method calls the apppropriate method u.Warnf("SourcePlanner????") scanner, err := sourcePlan.Accept(NewSourcePlan(from)) if err == nil { return NewSourceJoin(from, scanner), nil } u.Errorf("Could not source plan for %v %T %#v", from.Name, source, source) } scanner, ok := source.(datasource.Scanner) if !ok { u.Errorf("Could not create scanner for %v %T %#v", from.Name, source, source) return nil, fmt.Errorf("Must Implement Scanner") } if err := buildColIndex(scanner, from); err != nil { return nil, err } return NewSourceJoin(from, scanner), nil }
// Create a source schema from given named source // we will find Source for that name and introspect func createSchema(sourceName string) (*schema.Schema, bool) { sourceName = strings.ToLower(sourceName) ss := schema.NewSchemaSource(sourceName, sourceName) ds := registry.Get(sourceName) if ds == nil { parts := strings.SplitN(sourceName, "://", 2) //u.Infof("parts: %d %v", len(parts), parts) if len(parts) == 2 { ds = registry.Get(parts[0]) if ds == nil { //return &qlbConn{schema: s, connInfo: parts[1]}, nil u.Warnf("not able to find schema %q", sourceName) return nil, false } } else { //u.WarnT(7) u.Warnf("not able to find schema %q", sourceName) return nil, false } } u.Infof("reg p:%p source=%q ds %#v tables:%v", registry, sourceName, ds, ds.Tables()) ss.DS = ds schema := schema.NewSchema(sourceName) ss.Schema = schema u.Debugf("schema:%p ss:%p createSchema(%q) NEW ", schema, ss, sourceName) loadSchema(ss) return schema, true }
func (m *PlannerDefault) WalkDelete(p *Delete) error { u.Debugf("VisitDelete %+v", p.Stmt) conn, err := m.Ctx.Schema.Open(p.Stmt.Table) if err != nil { u.Warnf("%p no schema for %q err=%v", m.Ctx.Schema, p.Stmt.Table, err) return err } mutatorSource, hasMutator := conn.(schema.ConnMutation) if hasMutator { mutator, err := mutatorSource.CreateMutator(m.Ctx) if err != nil { u.Warnf("%p could not create mutator for %q err=%v", m.Ctx.Schema, p.Stmt.Table, err) //return nil, err } else { p.Source = mutator return nil } } deleteDs, isDelete := conn.(schema.ConnDeletion) if !isDelete { return fmt.Errorf("%T does not implement required schema.Deletion for deletions", conn) } p.Source = deleteDs return nil }
func (m *Source) LoadConn() error { //u.Debugf("LoadConn() nil?%v", m.Conn == nil) if m.Conn != nil { return nil } if m.DataSource == nil { // Not all sources require a source, ie literal queries // and some, information schema, or fully qualifyied schema queries // requires schema switching if m.IsSchemaQuery() && m.ctx != nil { m.ctx.Schema = m.ctx.Schema.InfoSchema u.Infof("switching to info schema") if err := m.load(); err != nil { u.Errorf("could not load schema? %v", err) return err } if m.DataSource == nil { return u.LogErrorf("could not load info schema source %v", m.Stmt) } } else { u.Debugf("return bc no datasource ctx=nil?%v schema?%v", m.ctx == nil, m.IsSchemaQuery()) return nil } } source, err := m.DataSource.Open(m.Stmt.SourceName()) if err != nil { return err } m.Conn = source return nil }
// given connection info, get datasource // @connInfo = csv:///dev/stdin // mockcsv func (m *Registry) DataSource(connInfo string) schema.Source { // if mysql.tablename allow that convention u.Debugf("get datasource: conn=%q ", connInfo) //parts := strings.SplitN(from, ".", 2) // TODO: move this to a csv, or other source not in global registry sourceType := "" if len(connInfo) > 0 { switch { // case strings.HasPrefix(name, "file://"): // name = name[len("file://"):] case strings.HasPrefix(connInfo, "csv://"): sourceType = "csv" //m.db = connInfo[len("csv://"):] case strings.Contains(connInfo, "://"): strIdx := strings.Index(connInfo, "://") sourceType = connInfo[0:strIdx] //m.db = connInfo[strIdx+3:] default: sourceType = connInfo } } sourceType = strings.ToLower(sourceType) //u.Debugf("source: %v", sourceType) if source := m.Get(sourceType); source != nil { //u.Debugf("source: %T", source) return source } else { u.Errorf("DataSource(conn) was not found: '%v'", sourceType) } return nil }
func (m *MockCsvSource) loadTable(tableName string) error { csvRaw, ok := m.raw[tableName] if !ok { return schema.ErrNotFound } sr := strings.NewReader(csvRaw) u.Debugf("mockcsv:%p load mockcsv: %q data:%v", m, tableName, csvRaw) csvSource, _ := datasource.NewCsvSource(tableName, 0, sr, make(<-chan bool, 1)) tbl := membtree.NewStaticData(tableName) u.Infof("loaded columns %v", csvSource.Columns()) tbl.SetColumns(csvSource.Columns()) //u.Infof("set index col for %v: %v -- %v", tableName, 0, csvSource.Columns()[0]) m.tables[tableName] = tbl // Now we are going to page through the Csv rows and Put into // Static Data Source, ie copy into memory btree structure for { msg := csvSource.Next() if msg == nil { //u.Infof("table:%v len=%v", tableName, tbl.Length()) return nil } dm, ok := msg.Body().(*datasource.SqlDriverMessageMap) if !ok { return fmt.Errorf("Expected *datasource.SqlDriverMessageMap but got %T", msg.Body()) } // We don't know the Key tbl.Put(nil, nil, dm.Values()) } return nil }
// Exec executes a query that doesn't return rows, such // as an INSERT, UPDATE, DELETE func (m *qlbStmt) Exec(args []driver.Value) (driver.Result, error) { var err error if len(args) > 0 { m.query, err = queryArgsConvert(m.query, args) if err != nil { return nil, err } } //u.Infof("query: %v", m.query) // Create a Job, which is Dag of Tasks that Run() job, err := BuildSqlJob(m.conn.rtConf, m.conn.conn, m.query) if err != nil { return nil, err } m.job = job resultWriter := NewResultExecWriter() job.Tasks.Add(resultWriter) job.Setup() err = job.Run() u.Debugf("After job.Run()") if err != nil { u.Errorf("error on Query.Run(): %v", err) //resultWriter.ErrChan() <- err //job.Close() } return resultWriter.Result(), nil }
func parseJsonKeyValue(pg TokenPager, jh u.JsonHelper) error { if pg.Cur().T != lex.TokenIdentity { return fmt.Errorf("Expected json key/identity but got: %v", pg.Cur().String()) } key := pg.Cur().V pg.Next() //u.Debug(key, " ", pg.Cur()) switch pg.Cur().T { case lex.TokenColon: pg.Next() switch pg.Cur().T { case lex.TokenLeftBrace: // { obj := make(u.JsonHelper) if err := parseJsonObject(pg, obj); err != nil { return err } jh[key] = obj case lex.TokenLeftBracket: // [ list, err := parseJsonArray(pg) if err != nil { return err } u.Debugf("list after: %#v", list) jh[key] = list case lex.TokenValue: jh[key] = pg.Cur().V pg.Next() case lex.TokenBool: bv, err := strconv.ParseBool(pg.Cur().V) if err != nil { return err } jh[key] = bv pg.Next() case lex.TokenInteger: iv, err := strconv.ParseInt(pg.Cur().V, 10, 64) if err != nil { return err } jh[key] = iv pg.Next() case lex.TokenFloat: fv, err := strconv.ParseFloat(pg.Cur().V, 64) if err != nil { return err } jh[key] = fv pg.Next() default: u.Warnf("got unexpected token: %s", pg.Cur()) return fmt.Errorf("Expected json { or [ but got: %v", pg.Cur().T.String()) } //u.Debug(key, " ", pg.Cur()) return nil default: return fmt.Errorf("Expected json colon but got: %v", pg.Cur().String()) } return fmt.Errorf("Unreachable json error: %v", pg.Cur().String()) }
// Delete using a Where Expression func (m *dbConn) DeleteExpression(where expr.Node) (int, error) { //return 0, fmt.Errorf("not implemented") evaluator := vm.Evaluator(where) var deletedKeys []schema.Key txn := m.db.Txn(true) iter, err := txn.Get(m.md.tbl.Name, m.md.primaryIndex) if err != nil { txn.Abort() u.Errorf("could not get values %v", err) return 0, err } deleteLoop: for { item := iter.Next() if item == nil { break } msg, ok := item.(datasource.SqlDriverMessage) if !ok { u.Warnf("wat? %T %#v", item, item) err = fmt.Errorf("unexpected message type %T", item) break } whereValue, ok := evaluator(msg.ToMsgMap(m.md.tbl.FieldPositions)) if !ok { u.Debugf("could not evaluate where: %v", msg) } switch whereVal := whereValue.(type) { case value.BoolValue: if whereVal.Val() == false { //this means do NOT delete } else { // Delete! if err = txn.Delete(m.md.tbl.Name, msg); err != nil { u.Errorf("could not delete %v", err) break deleteLoop } indexVal := msg.Vals[0] deletedKeys = append(deletedKeys, schema.NewKeyUint(makeId(indexVal))) } case nil: // ?? u.Warnf("this should be fine, couldn't evaluate so don't delete %v", msg) default: if whereVal.Nil() { // Doesn't match, so don't delete } else { u.Warnf("unknown where eval result? %T", whereVal) } } } if err != nil { txn.Abort() return 0, err } txn.Commit() return len(deletedKeys), nil }
func (m *ResultBuffer) Close() error { u.Debugf("%p ResultBuffer.Close()???? already closed?%v", m, m.closed) if m.closed { return nil } m.closed = true return m.TaskBase.Close() }
func mergeUv(m1, m2 *datasource.ContextUrlValues) *datasource.ContextUrlValues { out := datasource.NewContextUrlValues(m1.Data) for k, val := range m2.Data { u.Debugf("k=%v v=%v", k, val) out.Data[k] = val } return out }
func (m *JobBuilder) VisitSelect(stmt *expr.SqlSelect) (interface{}, error) { u.Debugf("VisitSelect %+v", stmt) tasks := make(Tasks, 0) if len(stmt.From) == 1 { // One From Source This entire Source needs to be moved into // a From().Accept(m) or m.visitSubselect() from := stmt.From[0] if from.Name != "" && from.Source == nil { sourceConn := m.conf.Conn(from.Name) //u.Debugf("sourceConn: %T", sourceConn) // Must provider either Scanner, and or Seeker interfaces if scanner, ok := sourceConn.(datasource.Scanner); !ok { return nil, fmt.Errorf("Must Implement Scanner") } else { in := NewSource(from.Name, scanner) tasks.Add(in) } } } else { // for now, only support 1 join if len(stmt.From) != 2 { return nil, fmt.Errorf("3 or more Table/Join not currently implemented") } // u.Debugf("we are going to do a join on two dbs: ") // for _, from := range stmt.From { // u.Infof("from: %#v", from) // } in, err := NewSourceJoin(stmt.From[0], stmt.From[1], m.conf) if err != nil { return nil, err } tasks.Add(in) } //u.Debugf("has where? %v", stmt.Where != nil) if stmt.Where != nil { switch { case stmt.Where.Source != nil: u.Warnf("Found un-supported subquery: %#v", stmt.Where) case stmt.Where.Expr != nil: where := NewWhere(stmt.Where.Expr) tasks.Add(where) default: u.Warnf("Found un-supported where type: %#v", stmt.Where) } } // Add a Projection projection := NewProjection(stmt) tasks.Add(projection) return tasks, nil }
func (m *PlannerDefault) WalkUpsert(p *Upsert) error { u.Debugf("VisitUpsert %+v", p.Stmt) src, err := upsertSource(m.Ctx, p.Stmt.Table) if err != nil { return err } p.Source = src return nil }
func compareAst(t *testing.T, in1, in2 SqlStatement) { switch s1 := in1.(type) { case *SqlSelect: s2, ok := in2.(*SqlSelect) assert.T(t, ok, "Must also be SqlSelect") u.Debugf("original:\n%s", s1.StringAST()) u.Debugf("after:\n%s", s2.StringAST()) //assert.T(t, s1.Alias == s2.Alias) //assert.T(t, len(s1.Columns) == len(s2.Columns)) for i, c := range s1.Columns { compareAstColumn(t, c, s2.Columns[i]) } //compareWhere(s1.Where) compareFroms(t, s1.From, s2.From) default: t.Fatalf("Must be SqlSelect") } }
func verifyTokens(t *testing.T, sql string, tokens []lex.Token) { l := lex.NewLexer(sql, InfluxQlDialect) for _, goodToken := range tokens { tok := l.NextToken() u.Debugf("%#v %#v", tok, goodToken) assert.Equalf(t, tok.V, goodToken.V, "has='%v' want='%v'", tok.V, goodToken.V) assert.Equalf(t, tok.T, goodToken.T, "has='%v' want='%v'", tok.V, goodToken.V) } }
func (m *MockCsvSource) Open(connInfo string) (datasource.SourceConn, error) { if data, ok := m.data[connInfo]; ok { sr := strings.NewReader(data) u.Debugf("open mockcsv: %v", connInfo) return datasource.NewCsvSource(sr, make(<-chan bool, 1)) } u.Errorf("not found? %v", connInfo) return nil, fmt.Errorf("not found") }
func valueArray(pg TokenPager) (value.Value, error) { u.Debugf("valueArray cur:%v peek:%v", pg.Cur().V, pg.Peek().V) vals := make([]value.Value, 0) arrayLoop: for { tok := pg.Next() // consume token u.Infof("valueArray() consumed token?: %v", tok) switch tok.T { case lex.TokenComma: // continue case lex.TokenRightParenthesis: u.Warnf("found right paren %v cur: %v", tok, pg.Cur()) break arrayLoop case lex.TokenEOF, lex.TokenEOS, lex.TokenFrom, lex.TokenAs: u.Debugf("return: %v", tok) break arrayLoop case lex.TokenValue: vals = append(vals, value.NewStringValue(tok.V)) case lex.TokenFloat, lex.TokenInteger: fv, err := strconv.ParseFloat(tok.V, 64) if err == nil { vals = append(vals, value.NewNumberValue(fv)) } return value.NilValueVal, err default: return value.NilValueVal, fmt.Errorf("Could not recognize token: %v", tok) } tok = pg.Next() switch tok.T { case lex.TokenComma: // fine, consume the comma case lex.TokenRightBracket: u.Warnf("right bracket: %v", tok) break arrayLoop default: u.Warnf("unrecognized token: %v", tok) return value.NilValueVal, fmt.Errorf("unrecognized token %v", tok) } } u.Infof("returning array: %v", vals) return value.NewSliceValues(vals), nil }