Esempio n. 1
0
func TestExpressionsToString(t *testing.T) {

	for i, expr := range testExpressions {
		exprToString := expr.String()
		if exprToString != testExpressionStrings[i] {
			t.Errorf("Expected expression to evalute to %v, got: %v", testExpressionStrings[i], exprToString)
		}
	}

	// manually test function call, couldn't be set up as literal
	f := parser.NewFunction("sum")
	f.AddArguments(parser.ExpressionList{parser.NewProperty("bob"), parser.NewProperty("jay")})
	fStr := f.String()
	if fStr != "__func__.sum.bob,jay" {
		t.Errorf("Expected expression to evalute to __func__.sum.bob,jay, got: %v", fStr)
	}

	// manually test object literally separately, can be either of 2 strings (order not guaranteed)
	ol := parser.NewObjectLiteral(parser.Object{"field": parser.NewProperty("bob"), "field2": parser.NewProperty("jay")})
	olStr := ol.String()
	if olStr != "{\"field\": bob, \"field2\": jay}" && olStr != "{\"field2\": jay, \"field\": bob}" {
		t.Errorf("Expected expression to evalute to {\"field\": bob, \"field2\": jay} OR {\"field2\": jay, \"field\": bob}, got: %v", olStr)
	}

}
Esempio n. 2
0
func (no *NaiveOptimizer) TrySortMerge(plan planner.Plan) {
	last, curr := planner.FindNextPipelineComponentOfType(plan.Root.GetSource(), planner.JoinerType)
	if curr != nil {
		currJoiner := curr.(planner.Joiner)
		sortMergeJoiner := planner.NewOttoSortMergeJoiner()
		err := sortMergeJoiner.SetCondition(currJoiner.GetCondition())
		if err == nil {
			// now we need to make sure that the upstream sources can be sorted propertly
			leftSource := currJoiner.GetLeftSource()
			rightSource := currJoiner.GetRightSource()

			// FIXME for now this only works if the immidiate sources are datasources
			// needs to be made to work through joins and other pipeline components

			leftDataSource, isLeftDataSource := leftSource.(planner.DataSource)
			rightDataSource, isRightDataSource := rightSource.(planner.DataSource)
			if isLeftDataSource && isRightDataSource {
				if leftDataSource.GetOrderBy() == nil && rightDataSource.GetOrderBy() == nil {
					leftSort := parser.NewSortItem(parser.NewProperty(sortMergeJoiner.LeftExpr.SymbolsReferenced()[0]), true)
					err = leftDataSource.SetOrderBy(parser.SortList{*leftSort})
					if err != nil {
						//log.Printf("merge join not possible left datasource rejected order by")
						return
					}

					rightSort := parser.NewSortItem(parser.NewProperty(sortMergeJoiner.RightExpr.SymbolsReferenced()[0]), true)
					err = rightDataSource.SetOrderBy(parser.SortList{*rightSort})
					if err != nil {
						//log.Printf("merge join not possible right datasource rejected order by")
						return
					}

					//if we made it this far, it shoudl work
					//lets attach the left and right datasources to the new joiner
					sortMergeJoiner.SetLeftSource(leftDataSource)
					sortMergeJoiner.SetRightSource(rightDataSource)

					//now we just need to replace the existing joiner with the new one
					if last == nil {
						plan.Root.SetSource(sortMergeJoiner)
					} else {
						last.SetSource(sortMergeJoiner)
					}
				} else {
					//log.Printf("merge join optimization only supports direct datasources that are not already ordered")
				}
			} else {
				//log.Printf("merge join optimization only supports direct datasources before join")
			}
		} else {
			//log.Printf("merge join not going to work here %v", err)
		}
	}
}
Esempio n. 3
0
func TestOver(t *testing.T) {

	ds := NewStubDataSource("stub", "stub")

	o := NewOttoOver()
	o.SetSource(ds)
	o.SetPath(parser.NewProperty("stub.roles"))
	o.SetAs("role")

	go o.Run()

	output := o.GetDocumentChannel()

	rows := 0
	for doc := range output {
		rows += 1
		_, ok := doc["role"].(string)
		if !ok {
			t.Errorf("Row should have string role")
		}
	}

	if rows != 12 {
		t.Errorf("Expected 12 rows, got %v", rows)
	}
}
Esempio n. 4
0
func TestSymbolsReferenced(t *testing.T) {

	for _, v := range zeroSymbolExpressions {
		symbols := v.SymbolsReferenced()
		if len(symbols) != 0 {
			t.Errorf("Expected 0 symbols and found: %v", symbols)
		}
	}

	for _, v := range oneSymbolExpressions {
		symbols := v.SymbolsReferenced()
		if len(symbols) != 1 {
			t.Errorf("Expected 1 symbols and found: %v", symbols)
		}
		if symbols[0] != "bob" {
			t.Errorf("Expected symbol to be bob, got: %v", symbols[0])
		}
	}

	for _, v := range twoSymbolExpressions {
		symbols := v.SymbolsReferenced()
		if len(symbols) != 2 {
			t.Errorf("Expected 2 symbols and found: %v", symbols)
		}
		if symbols[0] != "bob" {
			t.Errorf("Expected symbol 1 to be bob, got: %v", symbols[0])
		}
		if symbols[1] != "jay" {
			t.Errorf("Expected symbol 2 to be jay, got: %v", symbols[0])
		}
	}

	for _, v := range threeSymbolExpressions {
		symbols := v.SymbolsReferenced()
		if len(symbols) != 3 {
			t.Errorf("Expected 3 symbols and found: %v", symbols)
		}
		if symbols[0] != "bob" {
			t.Errorf("Expected symbol 1 to be bob, got: %v", symbols[0])
		}
		if symbols[1] != "jay" {
			t.Errorf("Expected symbol 2 to be jay, got: %v", symbols[0])
		}
		if symbols[2] != "cat" {
			t.Errorf("Expected symbol 3 to be cat, got: %v", symbols[0])
		}
	}

	// manually test function call, couldn't be set up as literal
	f := parser.NewFunction("sum")
	f.AddArguments(parser.ExpressionList{parser.NewProperty("bob")})
	symbols := f.SymbolsReferenced()
	if len(symbols) != 1 {
		t.Errorf("Expected 1 symbols and found: %v", symbols)
	}
	if symbols[0] != "bob" {
		t.Errorf("Expected symbol to be bob, got: %v", symbols[0])
	}
}
Esempio n. 5
0
func TestOttoSelect(t *testing.T) {

	ds := NewStubDataSource("stub", "stub")

	s := NewOttoSelecter()
	s.SetSource(ds)
	s.SetSelect(parser.NewProperty("stub.age"))

	go s.Run()

	output := s.GetRowChannel()

	for row := range output {
		_, ok := row.(float64)
		if !ok {
			t.Errorf("Row should be float64")
		}
	}
}
Esempio n. 6
0
func TestFilter(t *testing.T) {

	ds := NewStubDataSource("stub", "stub")

	f := NewOttoFilter()
	f.SetSource(ds)
	f.SetFilter(parser.NewGreaterThanExpression(parser.NewProperty("stub.age"), parser.NewIntegerLiteral(30)))

	go f.Run()

	output := f.GetDocumentChannel()

	for doc := range output {
		if doc["stub"].(Document)["age"].(int) <= 30 {
			t.Errorf("Row does not match expected filter output")
		}

	}
}
Esempio n. 7
0
func TestComplexProperties(t *testing.T) {
	cp := parser.NewProperty("a.b.c")
	head := cp.Head()
	if head != "a" {
		t.Errorf("Expected property head to be a, got %v", head)
	}

	tail := cp.Tail()
	if tail.String() != "b.c" {
		t.Errorf("Expected property tail to be b.c, got %v", tail)
	}

	if cp.HasSubProperty() != true {
		t.Errorf("Expected property a.b.c to have a sub property")
	}

	if cp.IsReferencingDataSource("a") != true {
		t.Errorf("Expected property a.b.c to refer to datasource a")
	}
}
Esempio n. 8
0
func TestSimpleProperties(t *testing.T) {
	sp := parser.NewProperty("a")
	head := sp.Head()
	if head != "a" {
		t.Errorf("Expected property head to be a, got %v", head)
	}

	tail := sp.Tail()
	if tail != nil {
		t.Errorf("Expected property tail to nil got %v", tail)
	}

	if sp.HasSubProperty() != false {
		t.Errorf("Expected property a.b.c to not have a sub property")
	}

	if sp.IsReferencingDataSource("b") != false {
		t.Errorf("Expected property a to not refer to datasource b")
	}
}
Esempio n. 9
0
func TestOrder(t *testing.T) {

	ds := NewStubDataSource("stub", "stub")

	o := NewOttoOrderer()
	o.SetSource(ds)
	o.SetOrderBy(parser.SortList{parser.SortItem{Sort: parser.NewProperty("stub.age"), Ascending: true}})

	go o.Run()

	output := o.GetDocumentChannel()

	lastAge := 0
	for doc := range output {
		age := doc["stub"].(Document)["age"].(int)
		if !(age > lastAge) {
			t.Errorf("Results not ordered correctly")
		}
		lastAge = age
	}
}
Esempio n. 10
0
func TestGrouper(t *testing.T) {
	ds := NewStubDataSource("stub", "stub")

	g := NewOttoGrouper()
	g.SetSource(ds)
	g.SetGroupByWithStatsFields(parser.ExpressionList{parser.NewProperty("stub.type")}, []string{})

	go g.Run()

	output := g.GetDocumentChannel()

	for doc := range output {
		group := doc["stub"].(Document)["type"].(string)
		count := doc["__func__"].(Document)["count"].(Document)["stub"].(Document)["type"].(int)
		if group == "contract" && count != 2 {
			t.Errorf("Expected 2 contract workers, got %v", count)
		}
		if group == "employee" && count != 4 {
			t.Errorf("Expected 4 contract workers, got %v", count)
		}
	}
}
Esempio n. 11
0
			t.Errorf("Invalid Query Parsed Successfully: %v - %v", v, err)
		}
	}

}

var zeroSymbolExpressions = []parser.Expression{
	parser.NewIntegerLiteral(7),
	parser.NewFloatLiteral(5.5),
	parser.NewNull(),
	parser.NewStringLiteral("name"),
	parser.NewBoolLiteral(true),
}

var oneSymbolExpressions = []parser.Expression{
	parser.NewProperty("bob"),
	parser.NewArrayLiteral(parser.ExpressionList{parser.NewProperty("bob")}),
	parser.NewObjectLiteral(parser.Object{"field": parser.NewProperty("bob")}),
	parser.NewNotExpression(parser.NewProperty("bob")),
	parser.NewBracketMemberExpression(parser.NewProperty("bob"), parser.NewIntegerLiteral(0)),
}

var twoSymbolExpressions = []parser.Expression{
	parser.NewPlusExpression(parser.NewProperty("bob"), parser.NewProperty("jay")),
	parser.NewMinusExpression(parser.NewProperty("bob"), parser.NewProperty("jay")),
	parser.NewMultiplyExpression(parser.NewProperty("bob"), parser.NewProperty("jay")),
	parser.NewDivideExpression(parser.NewProperty("bob"), parser.NewProperty("jay")),
	parser.NewOrExpression(parser.NewProperty("bob"), parser.NewProperty("jay")),
	parser.NewAndExpression(parser.NewProperty("bob"), parser.NewProperty("jay")),
	parser.NewLessThanExpression(parser.NewProperty("bob"), parser.NewProperty("jay")),
	parser.NewLessThanOrEqualExpression(parser.NewProperty("bob"), parser.NewProperty("jay")),
Esempio n. 12
0
File: y.go Progetto: mschoch/tuq
func yyParse(yylex yyLexer) int {
	var yyn int
	var yylval yySymType
	var yyVAL yySymType
	yyS := make([]yySymType, yyMaxDepth)

	Nerrs := 0   /* number of errors */
	Errflag := 0 /* error recovery flag */
	yystate := 0
	yychar := -1
	yyp := -1
	goto yystack

ret0:
	return 0

ret1:
	return 1

yystack:
	/* put a state and value onto the stack */
	if yyDebug >= 4 {
		fmt.Printf("char %v in %v\n", yyTokname(yychar), yyStatname(yystate))
	}

	yyp++
	if yyp >= len(yyS) {
		nyys := make([]yySymType, len(yyS)*2)
		copy(nyys, yyS)
		yyS = nyys
	}
	yyS[yyp] = yyVAL
	yyS[yyp].yys = yystate

yynewstate:
	yyn = yyPact[yystate]
	if yyn <= yyFlag {
		goto yydefault /* simple state */
	}
	if yychar < 0 {
		yychar = yylex1(yylex, &yylval)
	}
	yyn += yychar
	if yyn < 0 || yyn >= yyLast {
		goto yydefault
	}
	yyn = yyAct[yyn]
	if yyChk[yyn] == yychar { /* valid shift */
		yychar = -1
		yyVAL = yylval
		yystate = yyn
		if Errflag > 0 {
			Errflag--
		}
		goto yystack
	}

yydefault:
	/* default state action */
	yyn = yyDef[yystate]
	if yyn == -2 {
		if yychar < 0 {
			yychar = yylex1(yylex, &yylval)
		}

		/* look through exception table */
		xi := 0
		for {
			if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate {
				break
			}
			xi += 2
		}
		for xi += 2; ; xi += 2 {
			yyn = yyExca[xi+0]
			if yyn < 0 || yyn == yychar {
				break
			}
		}
		yyn = yyExca[xi+1]
		if yyn < 0 {
			goto ret0
		}
	}
	if yyn == 0 {
		/* error ... attempt to resume parsing */
		switch Errflag {
		case 0: /* brand new error */
			yylex.Error("syntax error")
			Nerrs++
			if yyDebug >= 1 {
				fmt.Printf("%s", yyStatname(yystate))
				fmt.Printf("saw %s\n", yyTokname(yychar))
			}
			fallthrough

		case 1, 2: /* incompletely recovered error ... try again */
			Errflag = 3

			/* find a state where "error" is a legal shift action */
			for yyp >= 0 {
				yyn = yyPact[yyS[yyp].yys] + yyErrCode
				if yyn >= 0 && yyn < yyLast {
					yystate = yyAct[yyn] /* simulate a shift of "error" */
					if yyChk[yystate] == yyErrCode {
						goto yystack
					}
				}

				/* the current p has no shift on "error", pop stack */
				if yyDebug >= 2 {
					fmt.Printf("error recovery pops state %d\n", yyS[yyp].yys)
				}
				yyp--
			}
			/* there is no state on the stack with an error shift ... abort */
			goto ret1

		case 3: /* no shift yet; clobber input char */
			if yyDebug >= 2 {
				fmt.Printf("error recovery discards %s\n", yyTokname(yychar))
			}
			if yychar == yyEofCode {
				goto ret1
			}
			yychar = -1
			goto yynewstate /* try again in the same state */
		}
	}

	/* reduction by production yyn */
	if yyDebug >= 2 {
		fmt.Printf("reduce %v in:\n\t%v\n", yyn, yyStatname(yystate))
	}

	yynt := yyn
	yypt := yyp
	_ = yypt // guard against "declared and not used"

	yyp -= yyR2[yyn]
	yyVAL = yyS[yyp+1]

	/* consult goto table to find next state */
	yyn = yyR1[yyn]
	yyg := yyPgo[yyn]
	yyj := yyg + yyS[yyp].yys + 1

	if yyj >= yyLast {
		yystate = yyAct[yyg]
	} else {
		yystate = yyAct[yyj]
		if yyChk[yystate] != -yyn {
			yystate = yyAct[yyg]
		}
	}
	// dummy call; replaced with literal code
	switch yynt {

	case 1:
		//line tuql.y:36
		{
			logDebugGrammar("INPUT")
		}
	case 3:
		//line tuql.y:40
		{
			logDebugGrammar("PRAGMA: %v", yyS[yypt-3])
			right := parsingStack.Pop()
			left := parsingStack.Pop()
			parser.ProcessPragma(left.(parser.Expression), right.(parser.Expression))
		}
	case 4:
		//line tuql.y:47
		{
			logDebugGrammar("SELECT_STMT")
			parsingQuery.ParsedSuccessfully = true
		}
	case 6:
		//line tuql.y:52
		{
			parsingQuery.IsExplainOnly = true
		}
	case 12:
		//line tuql.y:64
		{
			thisExpression := parsingStack.Pop()
			parsingQuery.Limit = thisExpression.(parser.Expression)
		}
	case 13:
		//line tuql.y:69
		{
			thisExpression := parsingStack.Pop()
			parsingQuery.Offset = thisExpression.(parser.Expression)
		}
	case 16:
		//line tuql.y:78
		{
			thisExpression := parser.NewSortItem(parsingStack.Pop().(parser.Expression), true)
			parsingQuery.Orderby = append(parsingQuery.Orderby, *thisExpression)
		}
	case 17:
		//line tuql.y:81
		{
			thisExpression := parser.NewSortItem(parsingStack.Pop().(parser.Expression), true)
			parsingQuery.Orderby = append(parsingQuery.Orderby, *thisExpression)
		}
	case 18:
		//line tuql.y:84
		{
			thisExpression := parser.NewSortItem(parsingStack.Pop().(parser.Expression), false)
			parsingQuery.Orderby = append(parsingQuery.Orderby, *thisExpression)
		}
	case 19:
		//line tuql.y:89
		{
			logDebugGrammar("SELECT_COMPOUND")
		}
	case 25:
		//line tuql.y:99
		{
			logDebugGrammar("SELECT_CORE")
		}
	case 26:
		//line tuql.y:102
		{
			logDebugGrammar("SELECT GROUP")
			parsingQuery.IsAggregateQuery = true
			parsingQuery.Groupby = parsingStack.Pop().(parser.ExpressionList)
		}
	case 27:
		//line tuql.y:107
		{
			logDebugGrammar("SELECT GROUP HAVING - EMPTY")
		}
	case 28:
		//line tuql.y:108
		{
			logDebugGrammar("SELECT GROUP HAVING - SELECT GROUP")
		}
	case 29:
		//line tuql.y:109
		{
			logDebugGrammar("SELECT GROUP HAVING - SELECT GROUP SELECT HAVING")
		}
	case 30:
		//line tuql.y:114
		{
			parsingQuery.Having = parsingStack.Pop().(parser.Expression)
		}
	case 31:
		//line tuql.y:117
		{
			logDebugGrammar("SELECT WHERE - EMPTY")
		}
	case 32:
		//line tuql.y:118
		{
			logDebugGrammar("SELECT WHERE - EXPR")
			where_part := parsingStack.Pop()
			parsingQuery.Where = where_part.(parser.Expression)
		}
	case 34:
		//line tuql.y:124
		{
			logDebugGrammar("SELECT_FROM")
		}
	case 37:
		//line tuql.y:131
		{
			ds := parser.NewDataSource(yyS[yypt-0].s)
			parsingQuery.AddDataSource(ds)
		}
	case 38:
		//line tuql.y:134
		{
			ds := parser.NewDataSource(yyS[yypt-1].s)
			nextOver := parsingStack.Pop()
			for nextOver != nil {
				ds.AddOver(nextOver.(*parser.Over))
				nextOver = parsingStack.Pop()
			}
			parsingQuery.AddDataSource(ds)
		}
	case 39:
		//line tuql.y:142
		{
			ds := parser.NewDataSourceWithAs(yyS[yypt-2].s, yyS[yypt-0].s)
			parsingQuery.AddDataSource(ds)
		}
	case 40:
		//line tuql.y:145
		{
			ds := parser.NewDataSourceWithAs(yyS[yypt-3].s, yyS[yypt-1].s)
			nextOver := parsingStack.Pop()
			for nextOver != nil {
				ds.AddOver(nextOver.(*parser.Over))
				nextOver = parsingStack.Pop()
			}
			parsingQuery.AddDataSource(ds)
		}
	case 43:
		//line tuql.y:159
		{
			prop := parsingStack.Pop().(*parser.Property)
			over := parser.NewOver(prop, yyS[yypt-0].s)
			parsingStack.Push(over)
		}
	case 45:
		//line tuql.y:167
		{
			logDebugGrammar("SELECT_SELECT")
		}
	case 46:
		//line tuql.y:170
		{
			logDebugGrammar("SELECT_SELECT_HEAD")
		}
	case 49:
		//line tuql.y:175
		{
			logDebugGrammar("SELECT SELECT TAIL - EMPTY")
		}
	case 50:
		//line tuql.y:176
		{
			logDebugGrammar("SELECT SELECT TAIL - EXPR")
			thisExpression := parsingStack.Pop()
			parsingQuery.Sel = thisExpression.(parser.Expression)
		}
	case 51:
		//line tuql.y:182
		{
			logDebugGrammar("EXPRESSION")
		}
	case 52:
		//line tuql.y:183
		{
			logDebugGrammar("EXPRESSION - TERNARY")
			elsee := parsingStack.Pop().(parser.Expression)
			thenn := parsingStack.Pop().(parser.Expression)
			iff := parsingStack.Pop().(parser.Expression)
			thisExpr := parser.NewTernaryExpression(iff, thenn, elsee)
			parsingStack.Push(thisExpr)
		}
	case 53:
		//line tuql.y:192
		{
			logDebugGrammar("EXPR - PLUS")
			right := parsingStack.Pop()
			left := parsingStack.Pop()
			thisExpression := parser.NewPlusExpression(left.(parser.Expression), right.(parser.Expression))
			parsingStack.Push(thisExpression)
		}
	case 54:
		//line tuql.y:198
		{
			logDebugGrammar("EXPR - MINUS")
			right := parsingStack.Pop()
			left := parsingStack.Pop()
			thisExpression := parser.NewMinusExpression(left.(parser.Expression), right.(parser.Expression))
			parsingStack.Push(thisExpression)
		}
	case 55:
		//line tuql.y:204
		{
			logDebugGrammar("EXPR - MULT")
			right := parsingStack.Pop()
			left := parsingStack.Pop()
			thisExpression := parser.NewMultiplyExpression(left.(parser.Expression), right.(parser.Expression))
			parsingStack.Push(thisExpression)
		}
	case 56:
		//line tuql.y:210
		{
			logDebugGrammar("EXPR - DIV")
			right := parsingStack.Pop()
			left := parsingStack.Pop()
			thisExpression := parser.NewDivideExpression(left.(parser.Expression), right.(parser.Expression))
			parsingStack.Push(thisExpression)
		}
	case 57:
		//line tuql.y:216
		{
			logDebugGrammar("EXPR - AND")
			right := parsingStack.Pop()
			left := parsingStack.Pop()
			thisExpression := parser.NewAndExpression(left.(parser.Expression), right.(parser.Expression))
			parsingStack.Push(thisExpression)
		}
	case 58:
		//line tuql.y:222
		{
			logDebugGrammar("EXPR - OR")
			right := parsingStack.Pop()
			left := parsingStack.Pop()
			thisExpression := parser.NewOrExpression(left.(parser.Expression), right.(parser.Expression))
			parsingStack.Push(thisExpression)
		}
	case 59:
		//line tuql.y:228
		{
			logDebugGrammar("EXPR - EQ")
			right := parsingStack.Pop()
			left := parsingStack.Pop()
			thisExpression := parser.NewEqualsExpression(left.(parser.Expression), right.(parser.Expression))
			parsingStack.Push(thisExpression)
		}
	case 60:
		//line tuql.y:234
		{
			logDebugGrammar("EXPR - LT")
			right := parsingStack.Pop()
			left := parsingStack.Pop()
			thisExpression := parser.NewLessThanExpression(left.(parser.Expression), right.(parser.Expression))
			parsingStack.Push(thisExpression)
		}
	case 61:
		//line tuql.y:240
		{
			logDebugGrammar("EXPR - LTE")
			right := parsingStack.Pop()
			left := parsingStack.Pop()
			thisExpression := parser.NewLessThanOrEqualExpression(left.(parser.Expression), right.(parser.Expression))
			parsingStack.Push(thisExpression)
		}
	case 62:
		//line tuql.y:246
		{
			logDebugGrammar("EXPR - GT")
			right := parsingStack.Pop()
			left := parsingStack.Pop()
			thisExpression := parser.NewGreaterThanExpression(left.(parser.Expression), right.(parser.Expression))
			parsingStack.Push(thisExpression)
		}
	case 63:
		//line tuql.y:252
		{
			logDebugGrammar("EXPR - GTE")
			right := parsingStack.Pop()
			left := parsingStack.Pop()
			thisExpression := parser.NewGreaterThanOrEqualExpression(left.(parser.Expression), right.(parser.Expression))
			parsingStack.Push(thisExpression)
		}
	case 64:
		//line tuql.y:258
		{
			logDebugGrammar("EXPR - NE")
			right := parsingStack.Pop()
			left := parsingStack.Pop()
			thisExpression := parser.NewNotEqualsExpression(left.(parser.Expression), right.(parser.Expression))
			parsingStack.Push(thisExpression)
		}
	case 67:
		//line tuql.y:269
		{
			logDebugGrammar("EXPR - NOT")
			curr := parsingStack.Pop().(parser.Expression)
			thisExpression := parser.NewNotExpression(curr)
			parsingStack.Push(thisExpression)
		}
	case 69:
		//line tuql.y:277
		{
			logDebugGrammar("SUFFIX_EXPR")
		}
	case 70:
		//line tuql.y:280
		{
			logDebugGrammar("NULL")
			thisExpression := parser.NewNull()
			parsingStack.Push(thisExpression)
		}
	case 71:
		//line tuql.y:284
		{
		}
	case 72:
		//line tuql.y:285
		{
			logDebugGrammar("ATOM - prop[]")
			rightExpr := parsingStack.Pop().(parser.Expression)
			leftProp := parsingStack.Pop().(*parser.Property)
			thisExpression := parser.NewBracketMemberExpression(leftProp, rightExpr)
			parsingStack.Push(thisExpression)
		}
	case 73:
		//line tuql.y:291
		{
			thisExpression := parser.NewIntegerLiteral(yyS[yypt-0].n)
			parsingStack.Push(thisExpression)
		}
	case 74:
		//line tuql.y:293
		{
			thisExpression := parser.NewIntegerLiteral(-yyS[yypt-1].n)
			parsingStack.Push(thisExpression)
		}
	case 75:
		//line tuql.y:295
		{
			thisExpression := parser.NewFloatLiteral(yyS[yypt-0].f)
			parsingStack.Push(thisExpression)
		}
	case 76:
		//line tuql.y:297
		{
			thisExpression := parser.NewFloatLiteral(-yyS[yypt-1].f)
			parsingStack.Push(thisExpression)
		}
	case 77:
		//line tuql.y:299
		{
			thisExpression := parser.NewStringLiteral(yyS[yypt-0].s)
			parsingStack.Push(thisExpression)
		}
	case 78:
		//line tuql.y:301
		{
			thisExpression := parser.NewBoolLiteral(true)
			parsingStack.Push(thisExpression)
		}
	case 79:
		//line tuql.y:303
		{
			thisExpression := parser.NewBoolLiteral(false)
			parsingStack.Push(thisExpression)
		}
	case 80:
		//line tuql.y:305
		{
			logDebugGrammar("ATOM - {}")
		}
	case 81:
		//line tuql.y:307
		{
			logDebugGrammar("ATOM - []")
			exp_list := parsingStack.Pop().(parser.ExpressionList)
			thisExpression := parser.NewArrayLiteral(exp_list)
			parsingStack.Push(thisExpression)
		}
	case 82:
		//line tuql.y:312
		{
			logDebugGrammar("FUNCTION - $1.s")
			exp_list := parsingStack.Pop().(parser.ExpressionList)
			function := parsingStack.Pop().(*parser.Function)
			function.AddArguments(exp_list)
			parsingStack.Push(function)
		}
	case 85:
		//line tuql.y:322
		{
			logDebugGrammar("EXPRESSION_LIST - EXPRESSION")
			exp_list := make(parser.ExpressionList, 0)
			exp_list = append(exp_list, parsingStack.Pop().(parser.Expression))
			parsingStack.Push(exp_list)
		}
	case 86:
		//line tuql.y:327
		{
			logDebugGrammar("EXPRESSION_LIST - EXPRESSION COMMA EXPRESSION_LIST")
			rest := parsingStack.Pop().(parser.ExpressionList)
			last := parsingStack.Pop()
			new_list := make(parser.ExpressionList, 0)
			new_list = append(new_list, last.(parser.Expression))
			for _, v := range rest {
				new_list = append(new_list, v)
			}
			parsingStack.Push(new_list)
		}
	case 88:
		//line tuql.y:340
		{
			last := parsingStack.Pop().(*parser.ObjectLiteral)
			rest := parsingStack.Pop().(*parser.ObjectLiteral)
			rest.AddAll(last)
			parsingStack.Push(rest)
		}
	case 89:
		//line tuql.y:347
		{
			thisKey := yyS[yypt-2].s
			thisValue := parsingStack.Pop().(parser.Expression)
			thisExpression := parser.NewObjectLiteral(parser.Object{thisKey: thisValue})
			parsingStack.Push(thisExpression)
		}
	case 90:
		//line tuql.y:354
		{
			thisExpression := parser.NewProperty(yyS[yypt-0].s)
			parsingStack.Push(thisExpression)
		}
	case 91:
		//line tuql.y:358
		{
			thisValue := parsingStack.Pop().(*parser.Property)
			thisExpression := parser.NewProperty(yyS[yypt-2].s + "." + thisValue.Symbol)
			parsingStack.Push(thisExpression)
		}
	case 92:
		//line tuql.y:365
		{
			parsingQuery.IsAggregateQuery = true
			thisExpression := parser.NewFunction("min")
			parsingStack.Push(thisExpression)
		}
	case 93:
		//line tuql.y:370
		{
			parsingQuery.IsAggregateQuery = true
			thisExpression := parser.NewFunction("max")
			parsingStack.Push(thisExpression)
		}
	case 94:
		//line tuql.y:375
		{
			parsingQuery.IsAggregateQuery = true
			thisExpression := parser.NewFunction("avg")
			parsingStack.Push(thisExpression)
		}
	case 95:
		//line tuql.y:380
		{
			parsingQuery.IsAggregateQuery = true
			thisExpression := parser.NewFunction("count")
			parsingStack.Push(thisExpression)
		}
	case 96:
		//line tuql.y:385
		{
			parsingQuery.IsAggregateQuery = true
			thisExpression := parser.NewFunction("sum")
			parsingStack.Push(thisExpression)
		}
	}
	goto yystack /* stack new state and value */
}
Esempio n. 13
0
func RowsFromFacetSearchResults(sr core.SearchResult, as string) ([]planner.Document, error) {
	result := make([]planner.Document, 0)

	var facetResults map[string]interface{}

	// unmarshall from json
	jsonErr := json.Unmarshal(sr.Facets, &facetResults)
	if jsonErr != nil {
		return nil, jsonErr
	}

	// look for the group by field (if there was one)
	group_by := ""
	for stat_facet_name, _ := range facetResults {
		if !strings.HasPrefix(stat_facet_name, StatsPrefix) {
			group_by = stat_facet_name
			break
		}
	}

	if group_by == "" {
		// if there was no group by, then there is only 1 row

		row := make(map[string]interface{})

		for stat_facet_name, stat_facet_details := range facetResults {

			if strings.HasPrefix(stat_facet_name, StatsPrefix) {
				stat_field := stat_facet_name[len(StatsPrefix):]
				stat_facet_details_map := stat_facet_details.(map[string]interface{})
				planner.SetDocumentProperty(row, parser.NewProperty(fmt.Sprintf("__func__.count.%v.%v", as, stat_field)), stat_facet_details_map["count"])
				planner.SetDocumentProperty(row, parser.NewProperty(fmt.Sprintf("__func__.min.%v.%v", as, stat_field)), stat_facet_details_map["min"])
				planner.SetDocumentProperty(row, parser.NewProperty(fmt.Sprintf("__func__.max.%v.%v", as, stat_field)), stat_facet_details_map["max"])
				planner.SetDocumentProperty(row, parser.NewProperty(fmt.Sprintf("__func__.avg.%v.%v", as, stat_field)), stat_facet_details_map["mean"])
				planner.SetDocumentProperty(row, parser.NewProperty(fmt.Sprintf("__func__.sum.%v.%v", as, stat_field)), stat_facet_details_map["total"])
			}

		}

		// add this row to the result set
		result = append(result, row)
	} else {

		facet_name := group_by
		facet_details := facetResults[facet_name]

		facet_details_map := facet_details.(map[string]interface{})

		other := facet_details_map["other"]
		if other.(float64) != 0 {
			return nil, fmt.Errorf("Facet results reported %#v \"other\" rows, increate your esMaxAggregate value and try again", other)
		}

		facet_type := facet_details_map["_type"]

		if facet_type == "terms" {
			terms := facet_details_map["terms"]

			for _, term := range terms.([]interface{}) {
				term_map := term.(map[string]interface{})
				row := make(map[string]interface{})
				planner.SetDocumentProperty(row, parser.NewProperty(fmt.Sprintf("%v.%v", as, facet_name)), term_map["term"])
				planner.SetDocumentProperty(row, parser.NewProperty(fmt.Sprintf("__func__.count.%v.%v", as, facet_name)), term_map["count"])

				// now look for any other stats facets with the same term
				// and add those results to this row

				for stat_facet_name, stat_facet_details := range facetResults {

					if strings.HasPrefix(stat_facet_name, StatsPrefix) {
						stat_field := stat_facet_name[len(StatsPrefix):]
						stat_facet_details_map := stat_facet_details.(map[string]interface{})
						stat_terms := stat_facet_details_map["terms"]
						for _, stat_term := range stat_terms.([]interface{}) {
							stat_term_map := stat_term.(map[string]interface{})
							if term_map["term"] == stat_term_map["term"] {
								//this is the term we're looking for
								planner.SetDocumentProperty(row, parser.NewProperty(fmt.Sprintf("__func__.count.%v.%v", as, stat_field)), stat_term_map["count"])
								planner.SetDocumentProperty(row, parser.NewProperty(fmt.Sprintf("__func__.min.%v.%v", as, stat_field)), stat_term_map["min"])
								planner.SetDocumentProperty(row, parser.NewProperty(fmt.Sprintf("__func__.max.%v.%v", as, stat_field)), stat_term_map["max"])
								planner.SetDocumentProperty(row, parser.NewProperty(fmt.Sprintf("__func__.avg.%v.%v", as, stat_field)), stat_term_map["mean"])
								planner.SetDocumentProperty(row, parser.NewProperty(fmt.Sprintf("__func__.sum.%v.%v", as, stat_field)), stat_term_map["total"])
								//MergeContext(row, stat_term_sum)
								// once we've found what we're looking for
								// break out of the inner loop
								break
							}
						}

					}

				}

				// add this row to the result set
				result = append(result, row)
			}
		}
	}

	return result, nil
}
Esempio n. 14
0
func (og *OttoGrouper) Run() {

	// tell our source to start
	go og.Source.Run()

	// get our sources channel
	sourceChannel := og.Source.GetDocumentChannel()

	// read from the channel until its closed
	for doc := range sourceChannel {
		putDocumentIntoEnvironment(og.Otto, doc)

		groupDocument := Document{}
		groupKey := ""
		for _, expr := range og.GroupBy {
			value := evaluateExpressionInEnvironment(og.Otto, expr)

			val := convertToPrimitive(value)
			//groupDocument[expr.String()] = val
			SetDocumentProperty(groupDocument, expr, val)

			key, err := value.ToString() // this probably doesnt do what we want for Objects or Arrays
			if err != nil {
				log.Printf("Error converting to string %v", err)
			} // FIXME handle error

			groupKey += key
		}

		// now walk through the group by expressions again
		// this time to compute any necessary stats
		// we need a complete list of fields to compute stats on
		// this should be og.Stats and og.Group, preferrably without duplicates
		// we dont expect huge volume here, lets just build a map
		// and then walk the keys
		statsFieldMap := make(map[string]interface{})
		for _, expr := range og.GroupBy {
			statsFieldMap[expr.String()] = nil
		}
		for _, field := range og.Stats {
			statsFieldMap[field] = nil
		}

		for expr, _ := range statsFieldMap {
			// find the entry in the group by map (if it exists
			statsMap, ok := og.groupValues[groupKey]

			value := evaluateExpressionInEnvironment(og.Otto, parser.NewProperty(expr))
			if ok {
				expr_stats, ok := statsMap[expr]
				if ok {
					expr_stats.ConsiderValue(value)
					statsMap[expr] = expr_stats
				} else {
					stats := NewExpressionStats()
					stats.ConsiderValue(value)
					statsMap[expr] = *stats
				}
			} else {
				// first time we've seen this value
				statsMap = make(ExpressionStatsMap)
				stats := NewExpressionStats()
				stats.ConsiderValue(value)
				statsMap[expr] = *stats
				og.groupValues[groupKey] = statsMap
			}

			for stat_field, stats := range statsMap {
				SetDocumentProperty(groupDocument, parser.NewProperty(fmt.Sprintf("__func__.sum.%v", stat_field)), stats.Sum)
				SetDocumentProperty(groupDocument, parser.NewProperty(fmt.Sprintf("__func__.avg.%v", stat_field)), stats.Avg)
				SetDocumentProperty(groupDocument, parser.NewProperty(fmt.Sprintf("__func__.count.%v", stat_field)), stats.Count)

				if math.IsInf(stats.Min, 1) {
					SetDocumentProperty(groupDocument, parser.NewProperty(fmt.Sprintf("__func__.min.%v", stat_field)), "Infinity")
				} else if math.IsInf(stats.Min, -1) {
					SetDocumentProperty(groupDocument, parser.NewProperty(fmt.Sprintf("__func__.min.%v", stat_field)), "-Infinity")
				} else {
					SetDocumentProperty(groupDocument, parser.NewProperty(fmt.Sprintf("__func__.min.%v", stat_field)), stats.Min)
				}
				if math.IsInf(stats.Max, 1) {
					SetDocumentProperty(groupDocument, parser.NewProperty(fmt.Sprintf("__func__.max.%v", stat_field)), "Infinity")
				} else if math.IsInf(stats.Max, -1) {
					SetDocumentProperty(groupDocument, parser.NewProperty(fmt.Sprintf("__func__.max.%v", stat_field)), "-Infinity")
				} else {
					SetDocumentProperty(groupDocument, parser.NewProperty(fmt.Sprintf("__func__.max.%v", stat_field)), stats.Max)
				}
			}
		}

		og.groupDocs[groupKey] = groupDocument
		cleanupDocumentFromEnvironment(og.Otto, doc)
	}

	for _, v := range og.groupDocs {
		og.OutputChannel <- v
	}

	close(og.OutputChannel)

}