Exemple #1
0
func applyCommand(table *Table, cmd string) error {
	s := toki.NewScanner(tokens)
	s.SetInput(strings.Replace(cmd, "  ", " ## ", -1)) // token library skips whitespace but for us double space is significant
	t := s.Next()
	switch t.Token {
	case addAgg:
		return readAddAgg(s, table)
	case addBlack:
		return readAddBlack(s, table)
	case addDest:
		return errors.New("sorry, addDest is not implemented yet")
	case addRouteSendAllMatch:
		return readAddRoute(s, table, NewRouteSendAllMatch)
	case addRouteSendFirstMatch:
		return readAddRoute(s, table, NewRouteSendFirstMatch)
	case addRouteConsistentHashing:
		return readAddRouteConsistentHashing(s, table)
	case addRouteGrafanaNet:
		return readAddRouteGrafanaNet(s, table)
	case modDest:
		return readModDest(s, table)
	case modRoute:
		return readModRoute(s, table)
	default:
		return fmt.Errorf("unrecognized command %q", t.Value)
	}
	if t = s.Next(); t.Token != toki.EOF {
		return fmt.Errorf("extraneous input %q", t.Value)
	}
	return nil
}
Exemple #2
0
func NewExprLexer() *lexer {
	l := &lexer{}
	l.scanner = toki.NewScanner(
		[]toki.Def{
			{Token: DOT, Pattern: "\\."},
			{Token: COLON, Pattern: ":"},
			{Token: LBRACKET, Pattern: "\\["},
			{Token: RBRACKET, Pattern: "\\]"},
			{Token: NUMBER, Pattern: "([+-]?([0-9]*\\.)?[0-9]+)"},
			{Token: KEY, Pattern: "[a-zA-Z\\~\\$\\_][a-zA-Z0-9\\/\\%_\\-]*"},
		})
	return l
}
Exemple #3
0
func NewSQLex(s string) *sqLex {
	scanner := toki.NewScanner(
		[]toki.Def{
			{Token: WHERE, Pattern: "where"},
			{Token: SELECT, Pattern: "select"},
			{Token: APPLY, Pattern: "apply"},
			{Token: DELETE, Pattern: "delete"},
			{Token: DISTINCT, Pattern: "distinct"},
			{Token: STATISTICAL, Pattern: "statistical"},
			{Token: STATISTICS, Pattern: "statistics"},
			{Token: WINDOW, Pattern: "window"},
			{Token: LIMIT, Pattern: "limit"},
			{Token: STREAMLIMIT, Pattern: "streamlimit"},
			{Token: ALL, Pattern: "\\*"},
			{Token: NOW, Pattern: "now"},
			{Token: SET, Pattern: "set"},
			{Token: BEFORE, Pattern: "before"},
			{Token: AFTER, Pattern: "after"},
			{Token: COMMA, Pattern: ","},
			{Token: AND, Pattern: "and"},
			{Token: AS, Pattern: "as"},
			{Token: TO, Pattern: "to"},
			{Token: DATA, Pattern: "data"},
			{Token: OR, Pattern: "or"},
			{Token: IN, Pattern: "in"},
			{Token: HAS, Pattern: "has"},
			{Token: NOT, Pattern: "not"},
			{Token: NEQ, Pattern: "!="},
			{Token: EQ, Pattern: "="},
			{Token: LEFTPIPE, Pattern: "<"},
			{Token: LPAREN, Pattern: "\\("},
			{Token: RPAREN, Pattern: "\\)"},
			{Token: LBRACK, Pattern: "\\["},
			{Token: RBRACK, Pattern: "\\]"},
			{Token: SEMICOLON, Pattern: ";"},
			{Token: NEWLINE, Pattern: "\n"},
			{Token: LIKE, Pattern: "(like)|~"},
			{Token: NUMBER, Pattern: "([+-]?([0-9]*\\.)?[0-9]+)"},
			{Token: LVALUE, Pattern: "[a-zA-Z\\~\\$\\_][a-zA-Z0-9\\/\\%_\\-]*"},
			{Token: QSTRING, Pattern: "(\"[^\"\\\\]*?(\\.[^\"\\\\]*?)*?\")|('[^'\\\\]*?(\\.[^'\\\\]*?)*?')"},
		})
	scanner.SetInput(s)
	q := &query{Contents: []string{}, distinct: false}
	return &sqLex{query: q, querystring: s, scanner: scanner, error: nil, lasttoken: "", _keys: map[string]struct{}{}, tokens: []string{}}
}
Exemple #4
0
func NewQueryLexer(s string) *QueryLex {
	scanner := toki.NewScanner(
		[]toki.Def{
			{Token: WHERE, Pattern: "where"},
			{Token: SELECT, Pattern: "select"},
			{Token: DISTINCT, Pattern: "distinct"},
			{Token: ALL, Pattern: "\\*"},
			{Token: NOW, Pattern: "now"},
			{Token: SET, Pattern: "set"},
			{Token: BEFORE, Pattern: "before"},
			{Token: FIRST, Pattern: "first"},
			{Token: LAST, Pattern: "last"},
			{Token: IBEFORE, Pattern: "ibefore"},
			{Token: BETWEEN, Pattern: "between"},
			{Token: HAPPENS, Pattern: "happens"},
			{Token: AT, Pattern: "at"},
			{Token: AFTER, Pattern: "after"},
			{Token: IAFTER, Pattern: "iafter"},
			{Token: COMMA, Pattern: ","},
			{Token: AND, Pattern: "and"},
			{Token: AS, Pattern: "as"},
			{Token: TO, Pattern: "to"},
			{Token: FOR, Pattern: "for"},
			{Token: OR, Pattern: "or"},
			{Token: IN, Pattern: "in"},
			{Token: HAS, Pattern: "has"},
			{Token: NOT, Pattern: "not"},
			{Token: NEQ, Pattern: "!="},
			{Token: EQ, Pattern: "="},
			{Token: LPAREN, Pattern: "\\("},
			{Token: RPAREN, Pattern: "\\)"},
			{Token: SEMICOLON, Pattern: ";"},
			{Token: NEWLINE, Pattern: "\n"},
			{Token: LIKE, Pattern: "(like)|~"},
			{Token: NUMBER, Pattern: "([+-]?([0-9]*\\.)?[0-9]+)"},
			{Token: LVALUE, Pattern: "[a-zA-Z\\~\\$\\_][a-zA-Z0-9\\/\\%_\\-]*"},
			{Token: QSTRING, Pattern: "(\"[^\"\\\\]*?(\\.[^\"\\\\]*?)*?\")|('[^'\\\\]*?(\\.[^'\\\\]*?)*?')"},
		})
	scanner.SetInput(s)
	lex := &QueryLex{Query: &Query{}, Now: _time.Now(), querystring: s, scanner: scanner, Err: nil, lasttoken: "", tokens: []string{}}
	//lex.Rewrite(s)
	return lex
}
func TestScanner(t *testing.T) {
	cases := []struct {
		cmd string
		exp []toki.Token
	}{
		{
			"addBlack prefix collectd.localhost",
			[]toki.Token{addBlack, word, word},
		},
		{
			`addBlack regex ^foo\..*\.cpu+`,
			[]toki.Token{addBlack, word, word},
		},
		{
			`addAgg sum ^stats\.timers\.(app|proxy|static)[0-9]+\.requests\.(.*) stats.timers._sum_$1.requests.$2 10 20`,
			[]toki.Token{addAgg, sumFn, word, word, num, num},
		},
		{
			`addAgg avg ^stats\.timers\.(app|proxy|static)[0-9]+\.requests\.(.*) stats.timers._avg_$1.requests.$2 5 10`,
			[]toki.Token{addAgg, avgFn, word, word, num, num},
		},
		{
			"addRoute sendAllMatch carbon-default  127.0.0.1:2005 spool=true pickle=false",
			[]toki.Token{addRouteSendAllMatch, word, sep, word, optSpool, optTrue, optPickle, optFalse},
		},
		{
			"addRoute sendAllMatch carbon-tagger sub==  127.0.0.1:2006",
			[]toki.Token{addRouteSendAllMatch, word, optSub, word, sep, word},
		},
		{
			"addRoute sendFirstMatch analytics regex=(Err/s|wait_time|logger)  graphite.prod:2003 prefix=prod. spool=true pickle=true  graphite.staging:2003 prefix=staging. spool=true pickle=true",
			[]toki.Token{addRouteSendFirstMatch, word, optRegex, word, sep, word, optPrefix, word, optSpool, optTrue, optPickle, optTrue, sep, word, optPrefix, word, optSpool, optTrue, optPickle, optTrue},
		},
		//{ disabled cause tries to read the schemas.conf file
		//	"addRoute grafanaNet grafanaNet  http://localhost:8081/metrics your-grafana.net-api-key /path/to/storage-schemas.conf",
		//	[]toki.Token{addRouteGrafanaNet, word, sep, word, word},
		//},
	}
	for i, c := range cases {
		s := toki.NewScanner(tokens)
		s.SetInput(strings.Replace(c.cmd, "  ", " ## ", -1))
		for j, e := range c.exp {
			r := s.Next()
			if e != r.Token {
				t.Fatalf("case %d pos %d - expected %v, got %v", i, j, e, r.Token)
			}
		}
	}

	table = NewTable("")
	for _, c := range cases {
		err := applyCommand(table, c.cmd)
		if err != nil {
			t.Fatalf("could not apply init cmd %q: %s", c.cmd, err)
		}
	}
	tablePrinted := table.Print()
	t.Log("===========================")
	t.Log("========== TABLE ==========")
	t.Log("===========================")
	for _, line := range strings.Split(tablePrinted, "\n") {
		t.Logf(line)
	}

}