Example #1
0
// Run is part of the processor interface.
func (ev *evaluator) Run(wg *sync.WaitGroup) {
	if wg != nil {
		defer wg.Done()
	}

	ctx, span := tracing.ChildSpan(ev.ctx, "evaluator")
	defer tracing.FinishSpan(span)

	if log.V(2) {
		log.Infof(ctx, "starting evaluator process")
		defer log.Infof(ctx, "exiting evaluator")
	}

	first := true
	for {
		row, err := ev.input.NextRow()
		if err != nil || row == nil {
			ev.output.Close(err)
			return
		}

		if first {
			first = false

			types := make([]sqlbase.ColumnType_Kind, len(row))
			for i := range types {
				types[i] = row[i].Type
			}
			for i, expr := range ev.specExprs {
				err := ev.exprs[i].init(expr, types, ev.flowCtx.evalCtx)
				if err != nil {
					ev.output.Close(err)
					return
				}
				ev.exprTypes[i] = sqlbase.DatumTypeToColumnKind(ev.exprs[i].expr.ResolvedType())
			}
		}

		outRow, err := ev.eval(row)
		if err != nil {
			ev.output.Close(err)
			return
		}

		if log.V(3) {
			log.Infof(ctx, "pushing %s\n", outRow)
		}
		// Push the row to the output RowReceiver; stop if they don't need more
		// rows.
		if !ev.output.PushRow(outRow) {
			if log.V(2) {
				log.Infof(ctx, "no more rows required")
			}
			ev.output.Close(nil)
			return
		}
	}
}
Example #2
0
func newAggregator(
	ctx *FlowCtx, spec *AggregatorSpec, input RowSource, output RowReceiver,
) (*aggregator, error) {
	ag := &aggregator{
		input:       input,
		output:      output,
		ctx:         log.WithLogTag(ctx.Context, "Agg", nil),
		rows:        &RowBuffer{},
		buckets:     make(map[string]struct{}),
		inputCols:   make(columns, len(spec.Exprs)),
		outputTypes: make([]sqlbase.ColumnType_Kind, len(spec.Exprs)),
		groupCols:   make(columns, len(spec.GroupCols)),
	}

	inputTypes := make([]sqlbase.ColumnType_Kind, len(spec.Exprs))
	for i, expr := range spec.Exprs {
		ag.inputCols[i] = expr.ColIdx
		inputTypes[i] = spec.Types[expr.ColIdx]
	}
	copy(ag.groupCols, spec.GroupCols)

	// Loop over the select expressions and extract any aggregate functions --
	// non-aggregation functions are replaced with parser.NewIdentAggregate,
	// (which just returns the last value added to them for a bucket) to provide
	// grouped-by values for each bucket.  ag.funcs is updated to contain all
	// the functions which need to be fed values.
	eh := &exprHelper{types: inputTypes}
	eh.vars = parser.MakeIndexedVarHelper(eh, len(eh.types))
	for i, expr := range spec.Exprs {
		fn, retType, err := ag.extractFunc(expr, eh)
		if err != nil {
			return nil, err
		}
		ag.funcs = append(ag.funcs, fn)

		// The aggregate function extracted is an identity function, the return
		// type of this therefore being the i-th input type.
		if retType == nil {
			ag.outputTypes[i] = inputTypes[i]
		} else {
			ag.outputTypes[i] = sqlbase.DatumTypeToColumnKind(retType)
		}
	}

	return ag, nil
}