func (m *Sqlbridge) parseHaving(req *SqlSelect) (err error) { if m.Cur().T != lex.TokenHaving { return nil } defer func() { if r := recover(); r != nil { u.Errorf("having error? %v \n %v", r, m.Cur()) if m.Cur().T == lex.TokenSelect { // TODO this is deeply flawed, need to fix/use tokenpager // with rewind ability err = m.parseWhereSelect(req) return } err = fmt.Errorf("panic err: %v", r) } }() m.Next() //u.Infof("%v", m.Cur()) tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { u.Warnf("could not parse: %v", err) return err } req.Having = tree.Root //u.Debugf("having: %v", m.Cur()) return err }
func (m *Sqlbridge) parseWhere() (*SqlWhere, error) { var err error if m.Cur().T != lex.TokenWhere { return nil, nil } m.Next() // Consume the Where //u.Debugf("cur: %v peek=%v", m.Cur(), m.Peek()) where := SqlWhere{} // We are going to Peek forward at the next 3 tokens used // to determine which type of where clause //t1 := m.Cur().T m.Next() // x t2 := m.Cur().T m.Next() t3 := m.Cur().T m.Next() t4 := m.Cur().T m.Backup() m.Backup() m.Backup() // Check for Types of Where // t1 T2 T3 T4 // SELECT x FROM user WHERE user_id IN ( SELECT user_id from orders where ...) // SELECT * FROM t1 WHERE column1 = ( SELECT column1 FROM t2); // select a FROM movies WHERE director IN ( "Quentin","copola","Bay","another") // select b FROM movies WHERE director = "bob"; // select b FROM movies WHERE create BETWEEN "2015" AND "2010"; // select b from movies WHERE director LIKE "%bob" // TODO: // SELECT * FROM t3 WHERE ROW(5*t2.s1,77) = ( SELECT 50,11*s1 FROM t4) switch { case (t2 == lex.TokenIN || t2 == lex.TokenEqual) && t3 == lex.TokenLeftParenthesis && t4 == lex.TokenSelect: //u.Infof("in parseWhere: %v", m.Cur()) m.Next() // T1 ?? this might be udf? m.Next() // t2 (IN | =) m.Next() // t3 = ( //m.Next() // t4 = SELECT where.Op = t2 where.Source = &SqlSelect{} return &where, m.parseWhereSubSelect(where.Source) } //u.Debugf("doing Where: %v %v", m.Cur(), m.Peek()) tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { u.Errorf("could not parse: %v", err) return nil, err } where.Expr = tree.Root //u.Debugf("where: %v", m.Cur()) return &where, err }
func (m *Sqlbridge) parseWhereDelete(req *SqlDelete) error { if m.Cur().T != lex.TokenWhere { return nil } m.Next() tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { u.Warnf("could not parse: %v", err) return err } req.Where = &SqlWhere{Expr: tree.Root} return nil }
func (m *Sqlbridge) parseUpdateList() (map[string]*ValueColumn, error) { cols := make(map[string]*ValueColumn) lastColName := "" for { //u.Debugf("col:%v cur:%v", lastColName, m.Cur().String()) switch m.Cur().T { case lex.TokenWhere, lex.TokenLimit, lex.TokenEOS, lex.TokenEOF: return cols, nil case lex.TokenValue: cols[lastColName] = &ValueColumn{Value: value.NewStringValue(m.Cur().V)} case lex.TokenInteger: iv, _ := strconv.ParseInt(m.Cur().V, 10, 64) cols[lastColName] = &ValueColumn{Value: value.NewIntValue(iv)} case lex.TokenComma, lex.TokenEqual: // don't need to do anything case lex.TokenIdentity: // TODO: this is a bug in lexer lv := m.Cur().V if bv, err := strconv.ParseBool(lv); err == nil { cols[lastColName] = &ValueColumn{Value: value.NewBoolValue(bv)} } else { lastColName = m.Cur().V } case lex.TokenUdfExpr: tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { u.Errorf("could not parse: %v", err) return nil, err } cols[lastColName] = &ValueColumn{Expr: tree.Root} default: u.Warnf("don't know how to handle ? %v", m.Cur()) return nil, fmt.Errorf("expected column but got: %v", m.Cur().String()) } m.Next() } panic("unreachable") }
func (m *Sqlbridge) parseCommandColumns(req *SqlCommand) (err error) { var col *CommandColumn for { //u.Debugf("command col? %v", m.Cur()) switch m.Cur().T { case lex.TokenIdentity: //u.Warnf("?? %v", m.Cur()) col = &CommandColumn{Name: m.Cur().V} tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { u.Warnf("could not parse: %v", err) return err } col.Expr = tree.Root convertIdentityToValue(col.Expr) //u.Infof("expr: %T :: %s", col.Expr, col.Expr) default: return fmt.Errorf("expected idenity but got: %v", m.Cur()) } //u.Debugf("command after colstart?: %v ", m.Cur()) // since we can have multiple columns switch m.Cur().T { case lex.TokenEOS, lex.TokenEOF: req.Columns = append(req.Columns, col) return nil case lex.TokenComma: req.Columns = append(req.Columns, col) default: u.Errorf("expected col? %v", m.Cur()) return fmt.Errorf("expected command column but got: %v", m.Cur().String()) } m.Next() } }
func (m *Sqlbridge) parseSources(req *SqlSelect) error { //u.Debugf("parseSources cur %v", m.Cur()) if m.Cur().T != lex.TokenFrom { return fmt.Errorf("expected From but got: %v", m.Cur()) } m.Next() // page forward off of From //u.Debugf("found from? %v", m.Cur()) if m.Cur().T == lex.TokenIdentity { if err := m.parseSourceTable(req); err != nil { return err } } for { src := &SqlSource{} //u.Debugf("parseSources %v", m.Cur()) switch m.Cur().T { case lex.TokenRightParenthesis: return nil case lex.TokenLeftParenthesis: // SELECT [columns] FROM [table] AS t1 // INNER JOIN (select a,b,c from users WHERE d is not null) u ON u.user_id = t1.user_id if err := m.parseSourceSubQuery(src); err != nil { return err } //u.Infof("wat? %v", m.Cur()) if m.Cur().T == lex.TokenRightParenthesis { m.Next() } case lex.TokenLeft, lex.TokenRight, lex.TokenInner, lex.TokenOuter, lex.TokenJoin: // JOIN if err := m.parseSourceJoin(src); err != nil { return err } case lex.TokenEOF, lex.TokenEOS, lex.TokenWhere, lex.TokenGroupBy, lex.TokenLimit, lex.TokenOffset, lex.TokenWith, lex.TokenAlias, lex.TokenOrderBy: return nil default: u.Warnf("unrecognized token? %v clauseEnd?%v", m.Cur(), m.SqlTokenPager.ClauseEnd()) return fmt.Errorf("unexpected token got: %v", m.Cur()) } //u.Debugf("cur: %v", m.Cur()) switch m.Cur().T { case lex.TokenAs: m.Next() // Skip over As, we don't need it src.Alias = m.Cur().V m.Next() //u.Debugf("found source alias: %v AS %v", src.Name, src.Alias) // select u.name, order.date FROM user AS u INNER JOIN .... case lex.TokenIdentity: //u.Warnf("found identity? %v", m.Cur()) src.Alias = m.Cur().V m.Next() } //u.Debugf("cur: %v", m.Cur()) if m.Cur().T == lex.TokenOn { src.Op = m.Cur().T m.Next() //u.Debugf("cur = %v", m.Cur()) tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { u.Errorf("could not parse: %v", err) return err } src.JoinExpr = tree.Root //u.Debugf("join expression: %v", tree.Root.String()) //u.Debugf("join: %#v", src) } req.From = append(req.From, src) } return nil }
func (m *Sqlbridge) parseValueList() ([][]*ValueColumn, error) { if m.Cur().T != lex.TokenLeftParenthesis { return nil, fmt.Errorf("Expecting opening paren ( but got %v", m.Cur()) } var row []*ValueColumn values := make([][]*ValueColumn, 0) for { //u.Debug(m.Cur().String()) switch m.Cur().T { case lex.TokenLeftParenthesis: // start of row if len(row) > 0 { values = append(values, row) } row = make([]*ValueColumn, 0) case lex.TokenRightParenthesis: values = append(values, row) case lex.TokenFrom, lex.TokenInto, lex.TokenLimit, lex.TokenEOS, lex.TokenEOF: if len(row) > 0 { values = append(values, row) } return values, nil case lex.TokenValue: row = append(row, &ValueColumn{Value: value.NewStringValue(m.Cur().V)}) case lex.TokenInteger: iv, err := strconv.ParseInt(m.Cur().V, 10, 64) if err != nil { return nil, err } row = append(row, &ValueColumn{Value: value.NewIntValue(iv)}) case lex.TokenFloat: fv, err := strconv.ParseFloat(m.Cur().V, 64) if err != nil { return nil, err } row = append(row, &ValueColumn{Value: value.NewNumberValue(fv)}) case lex.TokenBool: bv, err := strconv.ParseBool(m.Cur().V) if err != nil { return nil, err } row = append(row, &ValueColumn{Value: value.NewBoolValue(bv)}) case lex.TokenIdentity: // TODO: this is a bug in lexer lv := m.Cur().V if bv, err := strconv.ParseBool(lv); err == nil { row = append(row, &ValueColumn{Value: value.NewBoolValue(bv)}) } else { // error? u.Warnf("Could not figure out how to use: %v", m.Cur()) } case lex.TokenLeftBracket: // an array of values? m.Next() // Consume the [ arrayVal, err := expr.ValueArray(m.SqlTokenPager) if err != nil { return nil, err } //n := NewValueNode(arrayVal) row = append(row, &ValueColumn{Value: arrayVal}) u.Infof("what is token? %v peek:%v", m.Cur(), m.Peek()) //t.Next() case lex.TokenComma: // don't need to do anything case lex.TokenUdfExpr: tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { u.Errorf("could not parse: %v", err) return nil, err } //col.Expr = tree.Root row = append(row, &ValueColumn{Expr: tree.Root}) default: u.Warnf("don't know how to handle ? %v", m.Cur()) return nil, fmt.Errorf("expected column but got: %v", m.Cur().String()) } m.Next() } panic("unreachable") }
func parseColumns(m expr.TokenPager, fr expr.FuncResolver, buildVm bool, stmt ColumnsStatement) error { var col *Column for { //u.Debug(m.Cur()) switch m.Cur().T { case lex.TokenStar, lex.TokenMultiply: col = &Column{Star: true} m.Next() case lex.TokenUdfExpr: // we have a udf/functional expression column col = NewColumnFromToken(m.Cur()) funcName := strings.ToLower(m.Cur().V) tree := expr.NewTreeFuncs(m, fr) if err := tree.BuildTree(buildVm); err != nil { u.Errorf("could not parse: %v", err) return err } col.Expr = tree.Root col.SourceField = expr.FindIdentityField(col.Expr) if strings.Contains(col.SourceField, ".") { if _, right, hasLeft := expr.LeftRight(col.SourceField); hasLeft { col.SourceField = right } } col.Agg = expr.IsAgg(funcName) if m.Cur().T != lex.TokenAs { switch n := col.Expr.(type) { case *expr.FuncNode: // lets lowercase name n.Name = funcName col.As = expr.FindIdentityName(0, n, "") //u.Infof("col %#v", col) if col.As == "" { if strings.ToLower(n.Name) == "count" { //u.Warnf("count*") col.As = "count(*)" } else { col.As = n.Name } } case *expr.BinaryNode: //u.Debugf("udf? %T ", col.Expr) col.As = expr.FindIdentityName(0, n, "") if col.As == "" { u.Errorf("could not find as name: %#v", n) } } } else { switch n := col.Expr.(type) { case *expr.FuncNode: n.Name = funcName } } //u.Debugf("next? %v", m.Cur()) case lex.TokenIdentity: col = NewColumnFromToken(m.Cur()) tree := expr.NewTreeFuncs(m, fr) if err := tree.BuildTree(buildVm); err != nil { u.Errorf("could not parse: %v", err) return err } col.Expr = tree.Root case lex.TokenValue, lex.TokenInteger: // Value Literal col = NewColumnValue(m.Cur()) tree := expr.NewTreeFuncs(m, fr) if err := tree.BuildTree(buildVm); err != nil { u.Errorf("could not parse: %v", err) return err } col.Expr = tree.Root } //u.Debugf("after colstart?: %v ", m.Cur()) // since we can loop inside switch statement switch m.Cur().T { case lex.TokenAs: m.Next() switch m.Cur().T { case lex.TokenIdentity, lex.TokenValue: col.As = m.Cur().V col.originalAs = col.As col.asQuoteByte = m.Cur().Quote m.Next() continue } return fmt.Errorf("expected identity but got: %v", m.Cur().String()) case lex.TokenFrom, lex.TokenInto, lex.TokenLimit, lex.TokenEOS, lex.TokenEOF: // This indicates we have come to the End of the columns stmt.AddColumn(*col) //u.Debugf("Ending column ") return nil case lex.TokenIf: // If guard m.Next() //u.Infof("if guard: %v", m.Cur()) tree := expr.NewTreeFuncs(m, fr) if err := tree.BuildTree(buildVm); err != nil { u.Errorf("could not parse: %v", err) return err } col.Guard = tree.Root // Hm, we need to backup here? Parse Node went to deep? continue //u.Infof("if guard 2: %v", m.Cur()) //u.Debugf("after if guard?: %v ", m.Cur()) case lex.TokenCommentSingleLine: m.Next() col.Comment = m.Cur().V case lex.TokenRightParenthesis: // loop on my friend case lex.TokenComma: //u.Infof("? %#v", stmt) //u.Infof("col?%+v", col) stmt.AddColumn(*col) //u.Debugf("comma, added cols: %v", len(stmt.Columns)) default: return fmt.Errorf("expected column but got: %v", m.Cur().String()) } m.Next() } //u.Debugf("cols: %d", len(stmt.Columns)) return nil }
// First keyword was SHOW func (m *Sqlbridge) parseShow() (*SqlShow, error) { /* don't currently support all these http://dev.mysql.com/doc/refman/5.7/en/show.html SHOW [FULL] COLUMNS FROM tbl_name [FROM db_name] [like_or_where] SHOW CREATE DATABASE db_name SHOW CREATE TABLE tbl_name SHOW CREATE TRIGGER trigger_name SHOW CREATE VIEW view_name SHOW DATABASES [like_or_where] SHOW ENGINE engine_name {STATUS | MUTEX} SHOW [STORAGE] ENGINES SHOW INDEX FROM tbl_name [FROM db_name] SHOW [FULL] TABLES [FROM db_name] [like_or_where] SHOW TRIGGERS [FROM db_name] [like_or_where] SHOW [GLOBAL | SESSION] VARIABLES [like_or_where] SHOW WARNINGS [LIMIT [offset,] row_count] */ likeLhs := "Table" req := &SqlShow{} req.Raw = m.l.RawInput() m.Next() // Consume Show //u.Infof("cur: %v", m.Cur()) switch strings.ToLower(m.Cur().V) { case "full": req.Full = true m.Next() case "global", "session": req.Scope = strings.ToLower(m.Next().V) //u.Infof("scope:%q next:%v", req.Scope, m.Cur()) case "create": // SHOW CREATE TABLE `temp_schema`.`users` req.ShowType = "create" m.Next() // consume create req.Create = true //u.Debugf("create what %v", m.Cur()) req.CreateWhat = m.Next().V // {TABLE | DATABASE | EVENT ...} //u.Debugf("create which %v", m.Cur()) if m.Cur().T == lex.TokenIdentity { req.Identity = m.Next().V return req, nil } return nil, fmt.Errorf("Expected IDENTITY for SHOW CREATE {TABLE | DATABASE | EVENT} IDENTITY but got %s", m.Cur()) } //u.Debugf("show %v", m.Cur()) objectType := strings.ToLower(m.Cur().V) switch objectType { case "databases": req.ShowType = "databases" m.Next() case "variables": req.ShowType = "variables" likeLhs = "Variable_name" m.Next() case "columns": m.Next() // consume columns likeLhs = "Field" req.ShowType = "columns" //SHOW [FULL] COLUMNS {FROM | IN} tbl_name [{FROM | IN} db_name] [LIKE 'pattern' | WHERE expr] // | Field | Type | Null | Key | Default | Extra | if err := m.parseShowFromTable(req); err != nil { return nil, err } if err := m.parseShowFromDatabase(req); err != nil { return nil, err } case "tables": req.ShowType = objectType m.Next() // consume Tables // SHOW [FULL] TABLES [FROM db_name] [like_or_where] if err := m.parseShowFromDatabase(req); err != nil { return nil, err } } //u.Debugf("show %v", m.Cur()) switch m.Cur().T { case lex.TokenEOF, lex.TokenEOS: return req, nil case lex.TokenLike: // SHOW TABLES LIKE '%' //u.Debugf("doing Like: %v %v", m.Cur(), m.Peek()) m.Next() // Consume Like ex, err := expr.ParseExpression(fmt.Sprintf("%s LIKE %q", likeLhs, m.Cur().V)) m.Next() if err != nil { u.Errorf("Error parsing fake expression: %v", err) } else { req.Like = ex.Root } //u.Debugf("doing Like: %v %v", m.Cur(), m.Peek()) case lex.TokenWhere: m.Next() // consume where //u.Debugf("doing where: %v %v", m.Cur(), m.Peek()) tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { u.Errorf("could not parse: %v", err) return nil, err } req.Where = tree.Root } return req, nil }
func (m *Sqlbridge) parseOrderBy(req *SqlSelect) (err error) { if m.Cur().T != lex.TokenOrderBy { return nil } m.Next() // Consume Order By var col *Column for { //u.Debugf("Order By? %v", m.Cur()) switch m.Cur().T { case lex.TokenUdfExpr: // we have a udf/functional expression column //u.Infof("udf: %v", m.Cur().V) col = NewColumnFromToken(m.Cur()) tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { u.Warnf("could not parse: %v", err) return err } col.Expr = tree.Root switch n := col.Expr.(type) { case *expr.FuncNode: col.As = expr.FindIdentityName(0, n, "") if col.As == "" { col.As = n.Name } case *expr.BinaryNode: //u.Debugf("udf? %T ", n) col.As = expr.FindIdentityName(0, n, "") if col.As == "" { u.Errorf("could not find as name: %#v", n) } } //u.Debugf("next? %v", m.Cur()) case lex.TokenIdentity: //u.Warnf("?? %v", m.Cur()) col = NewColumnFromToken(m.Cur()) tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { u.Warnf("could not parse: %v", err) return err } col.Expr = tree.Root } //u.Debugf("OrderBy after colstart?: %v ", m.Cur()) // since we can loop inside switch statement switch m.Cur().T { case lex.TokenAsc, lex.TokenDesc: col.Order = strings.ToUpper(m.Cur().V) case lex.TokenInto, lex.TokenLimit, lex.TokenEOS, lex.TokenEOF: // This indicates we have come to the End of the columns req.OrderBy = append(req.OrderBy, col) //u.Debugf("Ending column ") return nil case lex.TokenCommentSingleLine: m.Next() col.Comment = m.Cur().V case lex.TokenRightParenthesis: // loop on my friend case lex.TokenComma: req.OrderBy = append(req.OrderBy, col) //u.Debugf("comma, added groupby: %v", len(stmt.OrderBy)) default: return fmt.Errorf("expected column but got: %v", m.Cur().String()) } m.Next() } }
func (m *Sqlbridge) parseGroupBy(req *SqlSelect) (err error) { if m.Cur().T != lex.TokenGroupBy { return nil } m.Next() var col *Column for { //u.Debugf("Group By? %v", m.Cur()) switch m.Cur().T { case lex.TokenUdfExpr: // we have a udf/functional expression column //u.Infof("udf: %v", m.Cur().V) col = NewColumnFromToken(m.Cur()) tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { return err } col.Expr = tree.Root if m.Cur().T != lex.TokenAs { switch n := col.Expr.(type) { case *expr.FuncNode: col.As = expr.FindIdentityName(0, n, "") if col.As == "" { col.As = n.Name } case *expr.BinaryNode: //u.Debugf("udf? %T ", n) col.As = expr.FindIdentityName(0, n, "") if col.As == "" { u.Errorf("could not find as name: %#v", n) } } } //u.Debugf("next? %v", m.Cur()) case lex.TokenIdentity: //u.Warnf("?? %v", m.Cur()) col = NewColumnFromToken(m.Cur()) tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { return err } col.Expr = tree.Root case lex.TokenValue: // Value Literal col = NewColumnFromToken(m.Cur()) tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { return err } col.Expr = tree.Root } //u.Debugf("GroupBy after colstart?: %v ", m.Cur()) // since we can loop inside switch statement switch m.Cur().T { case lex.TokenAs: m.Next() //u.Debug(m.Cur()) switch m.Cur().T { case lex.TokenIdentity, lex.TokenValue: col.As = m.Cur().V col.originalAs = col.As //u.Infof("set AS=%v", col.As) m.Next() continue } return fmt.Errorf("expected identity but got: %v", m.Cur().String()) case lex.TokenFrom, lex.TokenOrderBy, lex.TokenInto, lex.TokenLimit, lex.TokenHaving, lex.TokenWith, lex.TokenEOS, lex.TokenEOF: // This indicates we have come to the End of the columns req.GroupBy = append(req.GroupBy, col) //u.Debugf("Ending column ") return nil case lex.TokenIf: // If guard m.Next() //u.Infof("if guard: %v", m.Cur()) tree := expr.NewTreeFuncs(m.SqlTokenPager, m.funcs) if err := m.parseNode(tree); err != nil { return err } col.Guard = tree.Root //u.Debugf("after if guard?: %v ", m.Cur()) case lex.TokenCommentSingleLine: m.Next() col.Comment = m.Cur().V case lex.TokenRightParenthesis: // loop on my friend case lex.TokenComma: req.GroupBy = append(req.GroupBy, col) //u.Debugf("comma, added groupby: %v", len(stmt.GroupBy)) default: u.Errorf("expected col? %v", m.Cur()) return fmt.Errorf("expected column but got: %v", m.Cur().String()) } m.Next() } //u.Debugf("groupby: %d", len(req.GroupBy)) return nil }