//This parses the stream comment. func parseStreamComment(dmlComment string, autoincId uint64) (EventNode *parser.Node) { EventNode = parser.NewSimpleParseNode(parser.NODE_LIST, "") tokenizer := parser.NewStringTokenizer(dmlComment) var node *parser.Node var pkTuple *parser.Node node = tokenizer.Scan() if node.Type == parser.ID { //Table Name EventNode.Push(node) } for node = tokenizer.Scan(); node.Type != ';'; node = tokenizer.Scan() { switch node.Type { case '(': //pkTuple is a list of pk value Nodes pkTuple = parsePkTuple(tokenizer, &autoincId) EventNode.Push(pkTuple) default: panic(NewBinlogParseError(EVENT_ERROR, fmt.Sprintf("Error in parsing stream comment: illegal node type %v %v", node.Type, string(node.Value)))) } } return EventNode }
//This parese a paricular pk tuple. func parsePkTuple(tokenizer *parser.Tokenizer, autoincIdPtr *uint64) (pkTuple *parser.Node) { //pkTuple is a list of pk value Nodes pkTuple = parser.NewSimpleParseNode(parser.NODE_LIST, "") autoincId := *autoincIdPtr //start scanning the list for tempNode := tokenizer.Scan(); tempNode.Type != ')'; tempNode = tokenizer.Scan() { switch tempNode.Type { case parser.NULL: pkTuple.Push(parser.NewParseNode(parser.NUMBER, []byte(strconv.FormatUint(autoincId, 10)))) autoincId++ case '-': //handle negative numbers t2 := tokenizer.Scan() if t2.Type != parser.NUMBER { panic(NewBinlogParseError(CODE_ERROR, "Illegal stream comment construct, - followed by a non-number")) } t2.Value = append(tempNode.Value, t2.Value...) pkTuple.Push(t2) case parser.ID, parser.NUMBER: pkTuple.Push(tempNode) case parser.STRING: b := tempNode.Value decoded := make([]byte, base64.StdEncoding.DecodedLen(len(b))) numDecoded, err := base64.StdEncoding.Decode(decoded, b) if err != nil { panic(NewBinlogParseError(CODE_ERROR, "Error in base64 decoding pkValue")) } tempNode.Value = decoded[:numDecoded] pkTuple.Push(tempNode) default: panic(NewBinlogParseError(EVENT_ERROR, fmt.Sprintf("Error in parsing stream comment: illegal node type %v %v", tempNode.Type, string(tempNode.Value)))) } } return pkTuple }
// parsePkTuple parese one pk tuple. func parsePkTuple(tokenizer *sqlparser.Tokenizer) (pkTuple *sqlparser.Node, err error) { // pkTuple is a list of pk value Nodes pkTuple = sqlparser.NewSimpleParseNode(sqlparser.NODE_LIST, "") // start scanning the list for tempNode := tokenizer.Scan(); tempNode.Type != ')'; tempNode = tokenizer.Scan() { switch tempNode.Type { case '-': // handle negative numbers t2 := tokenizer.Scan() if t2.Type != sqlparser.NUMBER { return nil, fmt.Errorf("expecing number after '-'") } t2.Value = append(tempNode.Value, t2.Value...) pkTuple.Push(t2) case sqlparser.ID, sqlparser.NUMBER, sqlparser.NULL: pkTuple.Push(tempNode) case sqlparser.STRING: b := tempNode.Value decoded := make([]byte, base64.StdEncoding.DecodedLen(len(b))) numDecoded, err := base64.StdEncoding.Decode(decoded, b) if err != nil { return nil, err } tempNode.Value = decoded[:numDecoded] pkTuple.Push(tempNode) default: return nil, fmt.Errorf("unexpected token: '%v'", string(tempNode.Value)) } } return pkTuple, nil }
// Example query: insert into vtocc_e(foo) values ('foo') /* _stream vtocc_e (eid id name ) (null 1 'bmFtZQ==' ); */ // the "null" value is used for auto-increment columns. func parseStreamComment(dmlComment string) (EventNode *sqlparser.Node, err error) { EventNode = sqlparser.NewSimpleParseNode(sqlparser.NODE_LIST, "") tokenizer := sqlparser.NewStringTokenizer(dmlComment) node := tokenizer.Scan() if node.Type != sqlparser.ID { return nil, fmt.Errorf("expecting table name in stream comment") } EventNode.Push(node) for node = tokenizer.Scan(); node.Type != ';'; node = tokenizer.Scan() { switch node.Type { case '(': // pkTuple is a list of pk value Nodes pkTuple, err := parsePkTuple(tokenizer) if err != nil { return nil, err } EventNode.Push(pkTuple) default: return nil, fmt.Errorf("expecting '('") } } return EventNode, nil }
func newArgumentNode(counter *int) *sqlparser.Node { return sqlparser.NewSimpleParseNode(sqlparser.VALUE_ARG, fmt.Sprintf("%%(v%d)s", *counter)) }