Ejemplo n.º 1
0
func query(q string, timestamp clientmodel.Timestamp, storage local.Storage) (queryResult, error) {
	exprNode, err := rules.LoadExprFromString(q)
	if err != nil {
		return nil, err
	}
	queryStats := stats.NewTimerGroup()
	vector, err := ast.EvalToVector(exprNode, timestamp, storage, queryStats)
	if err != nil {
		return nil, err
	}

	// ast.Vector is hard to work with in templates, so convert to
	// base data types.
	var result = make(queryResult, len(vector))
	for n, v := range vector {
		s := sample{
			Value:  float64(v.Value),
			Labels: make(map[string]string),
		}
		for label, value := range v.Metric.Metric {
			s.Labels[string(label)] = string(value)
		}
		result[n] = &s
	}
	return result, nil
}
Ejemplo n.º 2
0
func (serv MetricsService) Query(w http.ResponseWriter, r *http.Request) {
	setAccessControlHeaders(w)

	params := http_utils.GetQueryParams(r)
	expr := params.Get("expr")
	asText := params.Get("asText")

	var format ast.OutputFormat
	// BUG(julius): Use Content-Type negotiation.
	if asText == "" {
		format = ast.JSON
		w.Header().Set("Content-Type", "application/json")
	} else {
		format = ast.TEXT
		w.Header().Set("Content-Type", "text/plain")
	}

	exprNode, err := rules.LoadExprFromString(expr)
	if err != nil {
		fmt.Fprint(w, ast.ErrorToJSON(err))
		return
	}

	timestamp := clientmodel.TimestampFromTime(serv.time.Now())

	queryStats := stats.NewTimerGroup()
	result := ast.EvalToString(exprNode, timestamp, format, serv.Storage, queryStats)
	glog.Infof("Instant query: %s\nQuery stats:\n%s\n", expr, queryStats)
	fmt.Fprint(w, result)
}
Ejemplo n.º 3
0
func testMakeView(t test.Tester, flushToDisk bool) {
	type in struct {
		atTime     []getValuesAtTimeOp
		atInterval []getValuesAtIntervalOp
		alongRange []getValuesAlongRangeOp
	}

	type out struct {
		atTime     []metric.Values
		atInterval []metric.Values
		alongRange []metric.Values
	}
	m := clientmodel.Metric{clientmodel.MetricNameLabel: "request_count"}
	fingerprint := &clientmodel.Fingerprint{}
	fingerprint.LoadFromMetric(m)
	var (
		instant   = clientmodel.TimestampFromTime(time.Date(1984, 3, 30, 0, 0, 0, 0, time.Local))
		scenarios = []struct {
			data     clientmodel.Samples
			in       in
			out      out
			diskOnly bool
		}{
			// No sample, but query asks for one.
			{
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant},
						},
					},
				},
				out: out{
					atTime: []metric.Values{{}},
				},
			},
			// Single sample, query asks for exact sample time.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant,
								Value:     0,
							},
						},
					},
				},
			},
			// Single sample, query time before the sample.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant.Add(time.Second),
					},
					{
						Metric:    m,
						Value:     1,
						Timestamp: instant.Add(time.Second * 2),
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second),
								Value:     0,
							},
						},
					},
				},
			},
			// Single sample, query time after the sample.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant.Add(time.Second)},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant,
								Value:     0,
							},
						},
					},
				},
			},
			// Two samples, query asks for first sample time.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
					{
						Metric:    m,
						Value:     1,
						Timestamp: instant.Add(time.Second),
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant,
								Value:     0,
							},
						},
					},
				},
			},
			// Three samples, query asks for second sample time.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
					{
						Metric:    m,
						Value:     1,
						Timestamp: instant.Add(time.Second),
					},
					{
						Metric:    m,
						Value:     2,
						Timestamp: instant.Add(time.Second * 2),
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant.Add(time.Second)},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second),
								Value:     1,
							},
						},
					},
				},
			},
			// Three samples, query asks for time between first and second samples.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
					{
						Metric:    m,
						Value:     1,
						Timestamp: instant.Add(time.Second * 2),
					},
					{
						Metric:    m,
						Value:     2,
						Timestamp: instant.Add(time.Second * 4),
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant.Add(time.Second)},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant,
								Value:     0,
							},
							{
								Timestamp: instant.Add(time.Second * 2),
								Value:     1,
							},
						},
					},
				},
			},
			// Three samples, query asks for time between second and third samples.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
					{
						Metric:    m,
						Value:     1,
						Timestamp: instant.Add(time.Second * 2),
					},
					{
						Metric:    m,
						Value:     2,
						Timestamp: instant.Add(time.Second * 4),
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant.Add(time.Second * 3)},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second * 2),
								Value:     1,
							},
							{
								Timestamp: instant.Add(time.Second * 4),
								Value:     2,
							},
						},
					},
				},
			},
			// Two chunks of samples, query asks for values from second chunk.
			{
				data: buildSamples(
					instant,
					instant.Add(time.Duration(*leveldbChunkSize*4)*time.Second),
					2*time.Second,
					m,
				),
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant.Add(time.Second*time.Duration(*leveldbChunkSize*2) + clientmodel.MinimumTick)},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2)),
								Value:     200,
							},
							{
								Timestamp: instant.Add(time.Second * (time.Duration(*leveldbChunkSize*2) + 2)),
								Value:     201,
							},
						},
					},
				},
			},
			// Two chunks of samples, query asks for values between both chunks.
			{
				data: buildSamples(
					instant,
					instant.Add(time.Duration(*leveldbChunkSize*4)*time.Second),
					2*time.Second,
					m,
				),
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant.Add(time.Second*time.Duration(*leveldbChunkSize*2) - clientmodel.MinimumTick)},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second * (time.Duration(*leveldbChunkSize*2) - 2)),
								Value:     199,
							},
							{
								Timestamp: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2)),
								Value:     200,
							},
						},
					},
				},
			},
			// Two chunks of samples, getValuesAtIntervalOp spanning both.
			{
				data: buildSamples(
					instant,
					instant.Add(time.Duration(*leveldbChunkSize*6)*time.Second),
					2*time.Second,
					m,
				),
				in: in{
					atInterval: []getValuesAtIntervalOp{
						{
							getValuesAlongRangeOp: getValuesAlongRangeOp{
								baseOp:  baseOp{current: instant.Add(time.Second*time.Duration(*leveldbChunkSize*2-4) - clientmodel.MinimumTick)},
								through: instant.Add(time.Second*time.Duration(*leveldbChunkSize*2+4) + clientmodel.MinimumTick),
							},
							interval: time.Second * 6,
						},
					},
				},
				out: out{
					atInterval: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2-6)),
								Value:     197,
							},
							{
								Timestamp: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2-4)),
								Value:     198,
							},
							{
								Timestamp: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2)),
								Value:     200,
							},
							{
								Timestamp: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2+2)),
								Value:     201,
							},
						},
					},
				},
			},
			// Three chunks of samples, getValuesAlongRangeOp spanning all of them.
			{
				data: buildSamples(
					instant,
					instant.Add(time.Duration(*leveldbChunkSize*6)*time.Second),
					2*time.Second,
					m,
				),
				in: in{
					alongRange: []getValuesAlongRangeOp{
						{
							baseOp:  baseOp{current: instant.Add(time.Second*time.Duration(*leveldbChunkSize*2-4) - clientmodel.MinimumTick)},
							through: instant.Add(time.Second*time.Duration(*leveldbChunkSize*4+2) + clientmodel.MinimumTick),
						},
					},
				},
				out: out{
					alongRange: []metric.Values{buildValues(
						clientmodel.SampleValue(198),
						instant.Add(time.Second*time.Duration(*leveldbChunkSize*2-4)),
						instant.Add(time.Second*time.Duration(*leveldbChunkSize*4+2)+clientmodel.MinimumTick),
						2*time.Second,
					)},
				},
			},
			// Three chunks of samples and a getValuesAlongIntervalOp with an
			// interval larger than the natural sample interval, spanning the gap
			// between the second and third chunks. To test two consecutive
			// ExtractSamples() calls for the same op, we need three on-disk chunks,
			// because the first two chunks are loaded from disk together and passed
			// as one unit into ExtractSamples(). Especially, we want to test that
			// the first sample of the last chunk is included in the result.
			//
			// This is a regression test for an interval operator advancing too far
			// past the end of the currently available chunk, effectively skipping
			// over a value which is only available in the next chunk passed to
			// ExtractSamples().
			//
			// Chunk and operator layout, assuming 200 samples per chunk:
			//
			//         Chunk 1      Chunk 2        Chunk 3
			// Values: 0......199   200......399   400......599
			// Times:  0......398   400......798   800......1198
			//              |                          |
			//              |_________ Operator _______|
			//             395 399 ......  795  799  803
			{
				data: buildSamples(
					instant,
					instant.Add(time.Duration(*leveldbChunkSize*6)*time.Second),
					2*time.Second,
					m,
				),
				in: in{
					atInterval: []getValuesAtIntervalOp{
						{
							getValuesAlongRangeOp: getValuesAlongRangeOp{
								baseOp:  baseOp{current: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2-5))},
								through: instant.Add(time.Second * time.Duration(*leveldbChunkSize*4+3)),
							},
							interval: time.Second * 4,
						},
					},
				},
				out: out{
					atInterval: []metric.Values{
						// We need two overlapping buildValues() calls here since the last
						// value of the second chunk is extracted twice (value 399, time
						// offset 798s).
						append(
							// Values 197...399.
							// Times  394...798.
							buildValues(
								clientmodel.SampleValue(197),
								instant.Add(time.Second*time.Duration(*leveldbChunkSize*2-6)),
								instant.Add(time.Second*time.Duration(*leveldbChunkSize*4)),
								2*time.Second,
							),
							// Values 399...402.
							// Times  798...804.
							buildValues(
								clientmodel.SampleValue(399),
								instant.Add(time.Second*time.Duration(*leveldbChunkSize*4-2)),
								instant.Add(time.Second*time.Duration(*leveldbChunkSize*4+6)),
								2*time.Second,
							)...,
						),
					},
				},
				// This example only works with on-disk chunks due to the repeatedly
				// extracted value at the end of the second chunk.
				diskOnly: true,
			},
			// Single sample, getValuesAtIntervalOp starting after the sample.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
				},
				in: in{
					atInterval: []getValuesAtIntervalOp{
						{
							getValuesAlongRangeOp: getValuesAlongRangeOp{
								baseOp:  baseOp{current: instant.Add(time.Second)},
								through: instant.Add(time.Second * 2),
							},
							interval: time.Second,
						},
					},
				},
				out: out{
					atInterval: []metric.Values{
						{
							{
								Timestamp: instant,
								Value:     0,
							},
						},
					},
				},
			},
			// Single sample, getValuesAtIntervalOp starting before the sample.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant.Add(time.Second),
					},
				},
				in: in{
					atInterval: []getValuesAtIntervalOp{
						{
							getValuesAlongRangeOp: getValuesAlongRangeOp{
								baseOp:  baseOp{current: instant},
								through: instant.Add(time.Second * 2),
							},
							interval: time.Second,
						},
					},
				},
				out: out{
					atInterval: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second),
								Value:     0,
							},
							{
								Timestamp: instant.Add(time.Second),
								Value:     0,
							},
						},
					},
				},
			},
		}
	)

	for i, scenario := range scenarios {
		if scenario.diskOnly && !flushToDisk {
			continue
		}

		tiered, closer := NewTestTieredStorage(t)

		err := tiered.AppendSamples(scenario.data)
		if err != nil {
			t.Fatalf("%d. failed to add fixture data: %s", i, err)
		}

		if flushToDisk {
			tiered.Flush()
		}

		requestBuilder := tiered.NewViewRequestBuilder()

		for _, atTime := range scenario.in.atTime {
			requestBuilder.GetMetricAtTime(fingerprint, atTime.current)
		}

		for _, atInterval := range scenario.in.atInterval {
			requestBuilder.GetMetricAtInterval(fingerprint, atInterval.current, atInterval.through, atInterval.interval)
		}

		for _, alongRange := range scenario.in.alongRange {
			requestBuilder.GetMetricRange(fingerprint, alongRange.current, alongRange.through)
		}

		v, err := requestBuilder.Execute(time.Second*5, stats.NewTimerGroup())

		if err != nil {
			t.Fatalf("%d. failed due to %s", i, err)
		}

		// To get all values in the View, ask for the 'forever' interval.
		interval := metric.Interval{OldestInclusive: math.MinInt64, NewestInclusive: math.MaxInt64}

		for j, atTime := range scenario.out.atTime {
			actual := v.GetRangeValues(fingerprint, interval)

			if len(actual) != len(atTime) {
				t.Fatalf("%d.%d. expected %d output, got %d", i, j, len(atTime), len(actual))
			}

			for k, value := range atTime {
				if value.Value != actual[k].Value {
					t.Errorf("%d.%d.%d expected %v value, got %v", i, j, k, value.Value, actual[k].Value)
				}
				if !value.Timestamp.Equal(actual[k].Timestamp) {
					t.Errorf("%d.%d.%d expected %s (offset %ss) timestamp, got %s (offset %ss)", i, j, k, value.Timestamp, value.Timestamp.Sub(instant), actual[k].Timestamp, actual[k].Timestamp.Sub(instant))
				}
			}
		}

		for j, atInterval := range scenario.out.atInterval {
			actual := v.GetRangeValues(fingerprint, interval)

			if len(actual) != len(atInterval) {
				t.Fatalf("%d.%d. expected %d output, got %d", i, j, len(atInterval), len(actual))
			}

			for k, value := range atInterval {
				if value.Value != actual[k].Value {
					t.Errorf("%d.%d.%d expected %v value, got %v", i, j, k, value.Value, actual[k].Value)
				}
				if !value.Timestamp.Equal(actual[k].Timestamp) {
					t.Errorf("%d.%d.%d expected %s (offset %ds) timestamp, got %s (offset %ds, value %s)", i, j, k, value.Timestamp, int(value.Timestamp.Sub(instant)/time.Second), actual[k].Timestamp, int(actual[k].Timestamp.Sub(instant)/time.Second), actual[k].Value)
				}
			}
		}

		for j, alongRange := range scenario.out.alongRange {
			actual := v.GetRangeValues(fingerprint, interval)

			if len(actual) != len(alongRange) {
				t.Fatalf("%d.%d. expected %d output, got %d", i, j, len(alongRange), len(actual))
			}

			for k, value := range alongRange {
				if value.Value != actual[k].Value {
					t.Fatalf("%d.%d.%d expected %v value, got %v", i, j, k, value.Value, actual[k].Value)
				}
				if !value.Timestamp.Equal(actual[k].Timestamp) {
					t.Fatalf("%d.%d.%d expected %s (offset %ss) timestamp, got %s (offset %ss)", i, j, k, value.Timestamp, value.Timestamp.Sub(instant), actual[k].Timestamp, actual[k].Timestamp.Sub(instant))
				}
			}
		}

		closer.Close()
	}
}
Ejemplo n.º 4
0
func (rule *AlertingRule) EvalRaw(timestamp clientmodel.Timestamp, storage metric.PreloadingPersistence) (ast.Vector, error) {
	return ast.EvalVectorInstant(rule.vector, timestamp, storage, stats.NewTimerGroup())
}
Ejemplo n.º 5
0
func (serv MetricsService) QueryRange(w http.ResponseWriter, r *http.Request) {
	setAccessControlHeaders(w)
	w.Header().Set("Content-Type", "application/json")

	params := http_utils.GetQueryParams(r)
	expr := params.Get("expr")
	end, _ := strconv.ParseInt(params.Get("end"), 0, 64)
	duration, _ := strconv.ParseInt(params.Get("range"), 0, 64)
	step, _ := strconv.ParseInt(params.Get("step"), 0, 64)

	exprNode, err := rules.LoadExprFromString(expr)
	if err != nil {
		fmt.Fprint(w, ast.ErrorToJSON(err))
		return
	}
	if exprNode.Type() != ast.VECTOR {
		fmt.Fprint(w, ast.ErrorToJSON(errors.New("Expression does not evaluate to vector type")))
		return
	}

	if end == 0 {
		end = clientmodel.Now().Unix()
	}

	if step < 1 {
		step = 1
	}

	if end-duration < 0 {
		duration = end
	}

	// Align the start to step "tick" boundary.
	end -= end % step

	queryStats := stats.NewTimerGroup()

	evalTimer := queryStats.GetTimer(stats.TotalEvalTime).Start()
	matrix, err := ast.EvalVectorRange(
		exprNode.(ast.VectorNode),
		clientmodel.TimestampFromUnix(end-duration),
		clientmodel.TimestampFromUnix(end),
		time.Duration(step)*time.Second,
		serv.Storage,
		queryStats)
	if err != nil {
		fmt.Fprint(w, ast.ErrorToJSON(err))
		return
	}
	evalTimer.Stop()

	sortTimer := queryStats.GetTimer(stats.ResultSortTime).Start()
	sort.Sort(matrix)
	sortTimer.Stop()

	jsonTimer := queryStats.GetTimer(stats.JsonEncodeTime).Start()
	result := ast.TypedValueToJSON(matrix, "matrix")
	jsonTimer.Stop()

	glog.Infof("Range query: %s\nQuery stats:\n%s\n", expr, queryStats)
	fmt.Fprint(w, result)
}
Ejemplo n.º 6
0
// QueryRange handles the /api/query_range endpoint.
func (serv MetricsService) QueryRange(w http.ResponseWriter, r *http.Request) {
	setAccessControlHeaders(w)
	w.Header().Set("Content-Type", "application/json")

	params := httputils.GetQueryParams(r)
	expr := params.Get("expr")

	// Input times and durations are in seconds and get converted to nanoseconds.
	endFloat, _ := strconv.ParseFloat(params.Get("end"), 64)
	durationFloat, _ := strconv.ParseFloat(params.Get("range"), 64)
	stepFloat, _ := strconv.ParseFloat(params.Get("step"), 64)
	nanosPerSecond := int64(time.Second / time.Nanosecond)
	end := int64(endFloat) * nanosPerSecond
	duration := int64(durationFloat) * nanosPerSecond
	step := int64(stepFloat) * nanosPerSecond

	exprNode, err := rules.LoadExprFromString(expr)
	if err != nil {
		fmt.Fprint(w, ast.ErrorToJSON(err))
		return
	}
	if exprNode.Type() != ast.VectorType {
		fmt.Fprint(w, ast.ErrorToJSON(errors.New("expression does not evaluate to vector type")))
		return
	}

	if end == 0 {
		end = clientmodel.Now().UnixNano()
	}

	if step <= 0 {
		step = nanosPerSecond
	}

	if end-duration < 0 {
		duration = end
	}

	// For safety, limit the number of returned points per timeseries.
	// This is sufficient for 60s resolution for a week or 1h resolution for a year.
	if duration/step > 11000 {
		fmt.Fprint(w, ast.ErrorToJSON(errors.New("exceeded maximum resolution of 11,000 points per timeseries. Try decreasing the query resolution (?step=XX)")))
		return
	}

	// Align the start to step "tick" boundary.
	end -= end % step

	queryStats := stats.NewTimerGroup()

	evalTimer := queryStats.GetTimer(stats.TotalEvalTime).Start()
	matrix, err := ast.EvalVectorRange(
		exprNode.(ast.VectorNode),
		clientmodel.TimestampFromUnixNano(end-duration),
		clientmodel.TimestampFromUnixNano(end),
		time.Duration(step),
		serv.Storage,
		queryStats)
	if err != nil {
		fmt.Fprint(w, ast.ErrorToJSON(err))
		return
	}
	evalTimer.Stop()

	sortTimer := queryStats.GetTimer(stats.ResultSortTime).Start()
	sort.Sort(matrix)
	sortTimer.Stop()

	jsonTimer := queryStats.GetTimer(stats.JSONEncodeTime).Start()
	result := ast.TypedValueToJSON(matrix, "matrix")
	jsonTimer.Stop()

	glog.V(1).Infof("Range query: %s\nQuery stats:\n%s\n", expr, queryStats)
	fmt.Fprint(w, result)
}
Ejemplo n.º 7
0
func TestExpressions(t *testing.T) {
	// Labels in expected output need to be alphabetically sorted.
	var expressionTests = []struct {
		expr           string
		output         []string
		shouldFail     bool
		checkOrder     bool
		fullRanges     int
		intervalRanges int
	}{
		{
			expr:           `SUM(http_requests)`,
			output:         []string{`http_requests => 3600 @[%v]`},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests{instance="0"}) BY(job)`,
			output: []string{
				`http_requests{job="api-server"} => 400 @[%v]`,
				`http_requests{job="app-server"} => 1200 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 4,
		}, {
			expr: `SUM(http_requests{instance="0"}) BY(job) KEEPING_EXTRA`,
			output: []string{
				`http_requests{instance="0", job="api-server"} => 400 @[%v]`,
				`http_requests{instance="0", job="app-server"} => 1200 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 4,
		}, {
			expr: `SUM(http_requests) BY (job)`,
			output: []string{
				`http_requests{job="api-server"} => 1000 @[%v]`,
				`http_requests{job="app-server"} => 2600 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			// Non-existent labels mentioned in BY-clauses shouldn't propagate to output.
			expr: `SUM(http_requests) BY (job, nonexistent)`,
			output: []string{
				`http_requests{job="api-server"} => 1000 @[%v]`,
				`http_requests{job="app-server"} => 2600 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `
				// Test comment.
				SUM(http_requests) BY /* comments shouldn't
				have any effect */ (job) // another comment`,
			output: []string{
				`http_requests{job="api-server"} => 1000 @[%v]`,
				`http_requests{job="app-server"} => 2600 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `COUNT(http_requests) BY (job)`,
			output: []string{
				`http_requests{job="api-server"} => 4 @[%v]`,
				`http_requests{job="app-server"} => 4 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job, group)`,
			output: []string{
				`http_requests{group="canary", job="api-server"} => 700 @[%v]`,
				`http_requests{group="canary", job="app-server"} => 1500 @[%v]`,
				`http_requests{group="production", job="api-server"} => 300 @[%v]`,
				`http_requests{group="production", job="app-server"} => 1100 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `AVG(http_requests) BY (job)`,
			output: []string{
				`http_requests{job="api-server"} => 250 @[%v]`,
				`http_requests{job="app-server"} => 650 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `MIN(http_requests) BY (job)`,
			output: []string{
				`http_requests{job="api-server"} => 100 @[%v]`,
				`http_requests{job="app-server"} => 500 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `MAX(http_requests) BY (job)`,
			output: []string{
				`http_requests{job="api-server"} => 400 @[%v]`,
				`http_requests{job="app-server"} => 800 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) - COUNT(http_requests) BY (job)`,
			output: []string{
				`http_requests{job="api-server"} => 996 @[%v]`,
				`http_requests{job="app-server"} => 2596 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `2 - SUM(http_requests) BY (job)`,
			output: []string{
				`http_requests{job="api-server"} => -998 @[%v]`,
				`http_requests{job="app-server"} => -2598 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `1000 / SUM(http_requests) BY (job)`,
			output: []string{
				`http_requests{job="api-server"} => 1 @[%v]`,
				`http_requests{job="app-server"} => 0.38461538461538464 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) - 2`,
			output: []string{
				`http_requests{job="api-server"} => 998 @[%v]`,
				`http_requests{job="app-server"} => 2598 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) % 3`,
			output: []string{
				`http_requests{job="api-server"} => 1 @[%v]`,
				`http_requests{job="app-server"} => 2 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) / 0`,
			output: []string{
				`http_requests{job="api-server"} => +Inf @[%v]`,
				`http_requests{job="app-server"} => +Inf @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) > 1000`,
			output: []string{
				`http_requests{job="app-server"} => 2600 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `1000 < SUM(http_requests) BY (job)`,
			output: []string{
				`http_requests{job="app-server"} => 1000 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) <= 1000`,
			output: []string{
				`http_requests{job="api-server"} => 1000 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) != 1000`,
			output: []string{
				`http_requests{job="app-server"} => 2600 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) == 1000`,
			output: []string{
				`http_requests{job="api-server"} => 1000 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) + SUM(http_requests) BY (job)`,
			output: []string{
				`http_requests{job="api-server"} => 2000 @[%v]`,
				`http_requests{job="app-server"} => 5200 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `http_requests{job="api-server", group="canary"}`,
			output: []string{
				`http_requests{group="canary", instance="0", job="api-server"} => 300 @[%v]`,
				`http_requests{group="canary", instance="1", job="api-server"} => 400 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 2,
		}, {
			expr: `http_requests{job="api-server", group="canary"} + delta(http_requests{job="api-server"}[5m], 1)`,
			output: []string{
				`http_requests{group="canary", instance="0", job="api-server"} => 330 @[%v]`,
				`http_requests{group="canary", instance="1", job="api-server"} => 440 @[%v]`,
			},
			fullRanges:     4,
			intervalRanges: 0,
		}, {
			expr: `delta(http_requests[25m], 1)`,
			output: []string{
				`http_requests{group="canary", instance="0", job="api-server"} => 150 @[%v]`,
				`http_requests{group="canary", instance="0", job="app-server"} => 350 @[%v]`,
				`http_requests{group="canary", instance="1", job="api-server"} => 200 @[%v]`,
				`http_requests{group="canary", instance="1", job="app-server"} => 400 @[%v]`,
				`http_requests{group="production", instance="0", job="api-server"} => 50 @[%v]`,
				`http_requests{group="production", instance="0", job="app-server"} => 250 @[%v]`,
				`http_requests{group="production", instance="1", job="api-server"} => 100 @[%v]`,
				`http_requests{group="production", instance="1", job="app-server"} => 300 @[%v]`,
			},
			fullRanges:     8,
			intervalRanges: 0,
		}, {
			expr: `sort(http_requests)`,
			output: []string{
				`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
				`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
				`http_requests{group="canary", instance="0", job="api-server"} => 300 @[%v]`,
				`http_requests{group="canary", instance="1", job="api-server"} => 400 @[%v]`,
				`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
				`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
				`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
				`http_requests{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
			},
			checkOrder:     true,
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `sort_desc(http_requests)`,
			output: []string{
				`http_requests{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
				`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
				`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
				`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
				`http_requests{group="canary", instance="1", job="api-server"} => 400 @[%v]`,
				`http_requests{group="canary", instance="0", job="api-server"} => 300 @[%v]`,
				`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
				`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
			},
			checkOrder:     true,
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			// Single-letter label names and values.
			expr: `x{y="testvalue"}`,
			output: []string{
				`x{y="testvalue"} => 100 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 1,
		}, {
			// Lower-cased aggregation operators should work too.
			expr: `sum(http_requests) by (job) + min(http_requests) by (job) + max(http_requests) by (job) + avg(http_requests) by (job)`,
			output: []string{
				`http_requests{job="app-server"} => 4550 @[%v]`,
				`http_requests{job="api-server"} => 1750 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			// Deltas should be adjusted for target interval vs. samples under target interval.
			expr:           `delta(http_requests{group="canary", instance="1", job="app-server"}[18m], 1)`,
			output:         []string{`http_requests{group="canary", instance="1", job="app-server"} => 288 @[%v]`},
			fullRanges:     1,
			intervalRanges: 0,
		}, {
			// Rates should transform per-interval deltas to per-second rates.
			expr:           `rate(http_requests{group="canary", instance="1", job="app-server"}[10m])`,
			output:         []string{`http_requests{group="canary", instance="1", job="app-server"} => 0.26666666666666666 @[%v]`},
			fullRanges:     1,
			intervalRanges: 0,
		}, {
			// Counter resets in middle of range are ignored by delta() if counter == 1.
			expr:           `delta(testcounter_reset_middle[50m], 1)`,
			output:         []string{`testcounter_reset_middle => 90 @[%v]`},
			fullRanges:     1,
			intervalRanges: 0,
		}, {
			// Counter resets in middle of range are not ignored by delta() if counter == 0.
			expr:           `delta(testcounter_reset_middle[50m], 0)`,
			output:         []string{`testcounter_reset_middle => 50 @[%v]`},
			fullRanges:     1,
			intervalRanges: 0,
		}, {
			// Counter resets at end of range are ignored by delta() if counter == 1.
			expr:           `delta(testcounter_reset_end[5m], 1)`,
			output:         []string{`testcounter_reset_end => 0 @[%v]`},
			fullRanges:     1,
			intervalRanges: 0,
		}, {
			// Counter resets at end of range are not ignored by delta() if counter == 0.
			expr:           `delta(testcounter_reset_end[5m], 0)`,
			output:         []string{`testcounter_reset_end => -90 @[%v]`},
			fullRanges:     1,
			intervalRanges: 0,
		}, {
			// count_scalar for a non-empty vector should return scalar element count.
			expr:           `count_scalar(http_requests)`,
			output:         []string{`scalar: 8 @[%v]`},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			// count_scalar for an empty vector should return scalar 0.
			expr:           `count_scalar(nonexistent)`,
			output:         []string{`scalar: 0 @[%v]`},
			fullRanges:     0,
			intervalRanges: 0,
		}, {
			// Empty expressions shouldn"t parse.
			expr:       ``,
			shouldFail: true,
		}, {
			// Interval durations can"t be in quotes.
			expr:       `http_requests["1m"]`,
			shouldFail: true,
		}, {
			// Binop arguments need to be scalar or vector.
			expr:       `http_requests - http_requests[1m]`,
			shouldFail: true,
		}, {
			expr: `http_requests{group!="canary"}`,
			output: []string{
				`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
				`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
				`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
				`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 4,
		}, {
			expr: `http_requests{job=~"server",group!="canary"}`,
			output: []string{
				`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
				`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
				`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
				`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 4,
		}, {
			expr: `http_requests{job!~"api",group!="canary"}`,
			output: []string{
				`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
				`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 2,
		}, {
			expr:           `count_scalar(http_requests{job=~"^server$"})`,
			output:         []string{`scalar: 0 @[%v]`},
			fullRanges:     0,
			intervalRanges: 0,
		}, {
			expr: `http_requests{group="production",job=~"^api"}`,
			output: []string{
				`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
				`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 2,
		},
	}

	tieredStorage, closer := newTestStorage(t)
	defer closer.Close()
	tieredStorage.Flush()

	for i, exprTest := range expressionTests {
		expectedLines := annotateWithTime(exprTest.output, testEvalTime)

		testExpr, err := LoadExprFromString(exprTest.expr)

		if err != nil {
			if exprTest.shouldFail {
				continue
			}
			t.Errorf("%d. Error during parsing: %v", i, err)
			t.Errorf("%d. Expression: %v", i, exprTest.expr)
		} else {
			if exprTest.shouldFail {
				t.Errorf("%d. Test should fail, but didn't", i)
			}
			failed := false
			resultStr := ast.EvalToString(testExpr, testEvalTime, ast.TEXT, tieredStorage, stats.NewTimerGroup())
			resultLines := strings.Split(resultStr, "\n")

			if len(exprTest.output) != len(resultLines) {
				t.Errorf("%d. Number of samples in expected and actual output don't match", i)
				failed = true
			}

			if exprTest.checkOrder {
				for j, expectedSample := range expectedLines {
					if resultLines[j] != expectedSample {
						t.Errorf("%d.%d. Expected sample '%v', got '%v'", i, j, resultLines[j], expectedSample)
						failed = true
					}
				}
			} else {
				for j, expectedSample := range expectedLines {
					found := false
					for _, actualSample := range resultLines {
						if actualSample == expectedSample {
							found = true
						}
					}
					if !found {
						t.Errorf("%d.%d. Couldn't find expected sample in output: '%v'", i, j, expectedSample)
						failed = true
					}
				}
			}

			analyzer := ast.NewQueryAnalyzer(tieredStorage)
			analyzer.AnalyzeQueries(testExpr)
			if exprTest.fullRanges != len(analyzer.FullRanges) {
				t.Errorf("%d. Count of full ranges didn't match: %v vs %v", i, exprTest.fullRanges, len(analyzer.FullRanges))
				failed = true
			}
			if exprTest.intervalRanges != len(analyzer.IntervalRanges) {
				t.Errorf("%d. Count of interval ranges didn't match: %v vs %v", i, exprTest.intervalRanges, len(analyzer.IntervalRanges))
				failed = true
			}

			if failed {
				t.Errorf("%d. Expression: %v\n%v", i, exprTest.expr, vectorComparisonString(expectedLines, resultLines))
			}
		}
	}
}
Ejemplo n.º 8
0
// EvalRaw returns the raw value of the rule expression, without creating alerts.
func (rule *AlertingRule) EvalRaw(timestamp clientmodel.Timestamp, storage local.Storage) (ast.Vector, error) {
	return ast.EvalVectorInstant(rule.Vector, timestamp, storage, stats.NewTimerGroup())
}
Ejemplo n.º 9
0
func TestRangedEvaluationRegressions(t *testing.T) {
	scenarios := []struct {
		in   ast.Matrix
		out  ast.Matrix
		expr string
	}{
		{
			// Testing COWMetric behavior in drop_common_labels.
			in: ast.Matrix{
				{
					Metric: clientmodel.COWMetric{
						Metric: clientmodel.Metric{
							clientmodel.MetricNameLabel: "testmetric",
							"testlabel":                 "1",
						},
					},
					Values: metric.Values{
						{
							Timestamp: testStartTime,
							Value:     1,
						},
						{
							Timestamp: testStartTime.Add(time.Hour),
							Value:     1,
						},
					},
				},
				{
					Metric: clientmodel.COWMetric{
						Metric: clientmodel.Metric{
							clientmodel.MetricNameLabel: "testmetric",
							"testlabel":                 "2",
						},
					},
					Values: metric.Values{
						{
							Timestamp: testStartTime.Add(time.Hour),
							Value:     2,
						},
					},
				},
			},
			out: ast.Matrix{
				{
					Metric: clientmodel.COWMetric{
						Metric: clientmodel.Metric{
							clientmodel.MetricNameLabel: "testmetric",
						},
					},
					Values: metric.Values{
						{
							Timestamp: testStartTime,
							Value:     1,
						},
					},
				},
				{
					Metric: clientmodel.COWMetric{
						Metric: clientmodel.Metric{
							clientmodel.MetricNameLabel: "testmetric",
							"testlabel":                 "1",
						},
					},
					Values: metric.Values{
						{
							Timestamp: testStartTime.Add(time.Hour),
							Value:     1,
						},
					},
				},
				{
					Metric: clientmodel.COWMetric{
						Metric: clientmodel.Metric{
							clientmodel.MetricNameLabel: "testmetric",
							"testlabel":                 "2",
						},
					},
					Values: metric.Values{
						{
							Timestamp: testStartTime.Add(time.Hour),
							Value:     2,
						},
					},
				},
			},
			expr: "drop_common_labels(testmetric)",
		},
		{
			// Testing COWMetric behavior in vector aggregation.
			in: ast.Matrix{
				{
					Metric: clientmodel.COWMetric{
						Metric: clientmodel.Metric{
							clientmodel.MetricNameLabel: "testmetric",
							"testlabel":                 "1",
						},
					},
					Values: metric.Values{
						{
							Timestamp: testStartTime,
							Value:     1,
						},
						{
							Timestamp: testStartTime.Add(time.Hour),
							Value:     1,
						},
					},
				},
				{
					Metric: clientmodel.COWMetric{
						Metric: clientmodel.Metric{
							clientmodel.MetricNameLabel: "testmetric",
							"testlabel":                 "2",
						},
					},
					Values: metric.Values{
						{
							Timestamp: testStartTime,
							Value:     2,
						},
					},
				},
			},
			out: ast.Matrix{
				{
					Metric: clientmodel.COWMetric{
						Metric: clientmodel.Metric{},
					},
					Values: metric.Values{
						{
							Timestamp: testStartTime,
							Value:     3,
						},
					},
				},
				{
					Metric: clientmodel.COWMetric{
						Metric: clientmodel.Metric{
							"testlabel": "1",
						},
					},
					Values: metric.Values{
						{
							Timestamp: testStartTime.Add(time.Hour),
							Value:     1,
						},
					},
				},
			},
			expr: "sum(testmetric) keeping_extra",
		},
	}

	for i, s := range scenarios {
		storage, closer := local.NewTestStorage(t)
		storeMatrix(storage, s.in)

		expr, err := LoadExprFromString(s.expr)
		if err != nil {
			t.Fatalf("%d. Error parsing expression: %v", i, err)
		}

		got, err := ast.EvalVectorRange(
			expr.(ast.VectorNode),
			testStartTime,
			testStartTime.Add(time.Hour),
			time.Hour,
			storage,
			stats.NewTimerGroup(),
		)
		if err != nil {
			t.Fatalf("%d. Error evaluating expression: %v", i, err)
		}

		if got.String() != s.out.String() {
			t.Fatalf("%d. Expression: %s\n\ngot:\n=====\n%v\n====\n\nwant:\n=====\n%v\n=====\n", i, s.expr, got.String(), s.out.String())
		}

		closer.Close()
	}
}
Ejemplo n.º 10
0
func TestExpressions(t *testing.T) {
	// Labels in expected output need to be alphabetically sorted.
	expressionTests := []struct {
		expr           string
		output         []string
		shouldFail     bool
		checkOrder     bool
		fullRanges     int
		intervalRanges int
	}{
		{
			expr:           `SUM(http_requests)`,
			output:         []string{`{} => 3600 @[%v]`},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests{instance="0"}) BY(job)`,
			output: []string{
				`{job="api-server"} => 400 @[%v]`,
				`{job="app-server"} => 1200 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 4,
		}, {
			expr: `SUM(http_requests{instance="0"}) BY(job) KEEPING_EXTRA`,
			output: []string{
				`{instance="0", job="api-server"} => 400 @[%v]`,
				`{instance="0", job="app-server"} => 1200 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 4,
		}, {
			expr: `SUM(http_requests) BY (job)`,
			output: []string{
				`{job="api-server"} => 1000 @[%v]`,
				`{job="app-server"} => 2600 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			// Non-existent labels mentioned in BY-clauses shouldn't propagate to output.
			expr: `SUM(http_requests) BY (job, nonexistent)`,
			output: []string{
				`{job="api-server"} => 1000 @[%v]`,
				`{job="app-server"} => 2600 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `
				// Test comment.
				SUM(http_requests) BY /* comments shouldn't
				have any effect */ (job) // another comment`,
			output: []string{
				`{job="api-server"} => 1000 @[%v]`,
				`{job="app-server"} => 2600 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `COUNT(http_requests) BY (job)`,
			output: []string{
				`{job="api-server"} => 4 @[%v]`,
				`{job="app-server"} => 4 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job, group)`,
			output: []string{
				`{group="canary", job="api-server"} => 700 @[%v]`,
				`{group="canary", job="app-server"} => 1500 @[%v]`,
				`{group="production", job="api-server"} => 300 @[%v]`,
				`{group="production", job="app-server"} => 1100 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `AVG(http_requests) BY (job)`,
			output: []string{
				`{job="api-server"} => 250 @[%v]`,
				`{job="app-server"} => 650 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `MIN(http_requests) BY (job)`,
			output: []string{
				`{job="api-server"} => 100 @[%v]`,
				`{job="app-server"} => 500 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `MAX(http_requests) BY (job)`,
			output: []string{
				`{job="api-server"} => 400 @[%v]`,
				`{job="app-server"} => 800 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) - COUNT(http_requests) BY (job)`,
			output: []string{
				`{job="api-server"} => 996 @[%v]`,
				`{job="app-server"} => 2596 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `2 - SUM(http_requests) BY (job)`,
			output: []string{
				`{job="api-server"} => -998 @[%v]`,
				`{job="app-server"} => -2598 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `1000 / SUM(http_requests) BY (job)`,
			output: []string{
				`{job="api-server"} => 1 @[%v]`,
				`{job="app-server"} => 0.38461538461538464 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) - 2`,
			output: []string{
				`{job="api-server"} => 998 @[%v]`,
				`{job="app-server"} => 2598 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) % 3`,
			output: []string{
				`{job="api-server"} => 1 @[%v]`,
				`{job="app-server"} => 2 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) / 0`,
			output: []string{
				`{job="api-server"} => +Inf @[%v]`,
				`{job="app-server"} => +Inf @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) > 1000`,
			output: []string{
				`{job="app-server"} => 2600 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `1000 < SUM(http_requests) BY (job)`,
			output: []string{
				`{job="app-server"} => 1000 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) <= 1000`,
			output: []string{
				`{job="api-server"} => 1000 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) != 1000`,
			output: []string{
				`{job="app-server"} => 2600 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) == 1000`,
			output: []string{
				`{job="api-server"} => 1000 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `SUM(http_requests) BY (job) + SUM(http_requests) BY (job)`,
			output: []string{
				`{job="api-server"} => 2000 @[%v]`,
				`{job="app-server"} => 5200 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `http_requests{job="api-server", group="canary"}`,
			output: []string{
				`http_requests{group="canary", instance="0", job="api-server"} => 300 @[%v]`,
				`http_requests{group="canary", instance="1", job="api-server"} => 400 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 2,
		}, {
			expr: `http_requests{job="api-server", group="canary"} + rate(http_requests{job="api-server"}[5m]) * 5 * 60`,
			output: []string{
				`{group="canary", instance="0", job="api-server"} => 330 @[%v]`,
				`{group="canary", instance="1", job="api-server"} => 440 @[%v]`,
			},
			fullRanges:     4,
			intervalRanges: 0,
		}, {
			expr: `rate(http_requests[25m]) * 25 * 60`,
			output: []string{
				`{group="canary", instance="0", job="api-server"} => 150 @[%v]`,
				`{group="canary", instance="0", job="app-server"} => 350 @[%v]`,
				`{group="canary", instance="1", job="api-server"} => 200 @[%v]`,
				`{group="canary", instance="1", job="app-server"} => 400 @[%v]`,
				`{group="production", instance="0", job="api-server"} => 50 @[%v]`,
				`{group="production", instance="0", job="app-server"} => 249.99999999999997 @[%v]`,
				`{group="production", instance="1", job="api-server"} => 100 @[%v]`,
				`{group="production", instance="1", job="app-server"} => 300 @[%v]`,
			},
			fullRanges:     8,
			intervalRanges: 0,
		}, {
			expr: `delta(http_requests[25m], 1)`,
			output: []string{
				`{group="canary", instance="0", job="api-server"} => 150 @[%v]`,
				`{group="canary", instance="0", job="app-server"} => 350 @[%v]`,
				`{group="canary", instance="1", job="api-server"} => 200 @[%v]`,
				`{group="canary", instance="1", job="app-server"} => 400 @[%v]`,
				`{group="production", instance="0", job="api-server"} => 50 @[%v]`,
				`{group="production", instance="0", job="app-server"} => 250 @[%v]`,
				`{group="production", instance="1", job="api-server"} => 100 @[%v]`,
				`{group="production", instance="1", job="app-server"} => 300 @[%v]`,
			},
			fullRanges:     8,
			intervalRanges: 0,
		}, {
			expr: `sort(http_requests)`,
			output: []string{
				`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
				`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
				`http_requests{group="canary", instance="0", job="api-server"} => 300 @[%v]`,
				`http_requests{group="canary", instance="1", job="api-server"} => 400 @[%v]`,
				`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
				`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
				`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
				`http_requests{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
			},
			checkOrder:     true,
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `sort_desc(http_requests)`,
			output: []string{
				`http_requests{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
				`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
				`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
				`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
				`http_requests{group="canary", instance="1", job="api-server"} => 400 @[%v]`,
				`http_requests{group="canary", instance="0", job="api-server"} => 300 @[%v]`,
				`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
				`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
			},
			checkOrder:     true,
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `topk(3, http_requests)`,
			output: []string{
				`http_requests{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
				`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
				`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
			},
			checkOrder:     true,
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `topk(5, http_requests{group="canary",job="app-server"})`,
			output: []string{
				`http_requests{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
				`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
			},
			checkOrder:     true,
			fullRanges:     0,
			intervalRanges: 2,
		}, {
			expr: `bottomk(3, http_requests)`,
			output: []string{
				`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
				`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
				`http_requests{group="canary", instance="0", job="api-server"} => 300 @[%v]`,
			},
			checkOrder:     true,
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			expr: `bottomk(5, http_requests{group="canary",job="app-server"})`,
			output: []string{
				`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
				`http_requests{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
			},
			checkOrder:     true,
			fullRanges:     0,
			intervalRanges: 2,
		}, {
			// Single-letter label names and values.
			expr: `x{y="testvalue"}`,
			output: []string{
				`x{y="testvalue"} => 100 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 1,
		}, {
			// Lower-cased aggregation operators should work too.
			expr: `sum(http_requests) by (job) + min(http_requests) by (job) + max(http_requests) by (job) + avg(http_requests) by (job)`,
			output: []string{
				`{job="app-server"} => 4550 @[%v]`,
				`{job="api-server"} => 1750 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			// Deltas should be adjusted for target interval vs. samples under target interval.
			expr:           `delta(http_requests{group="canary", instance="1", job="app-server"}[18m])`,
			output:         []string{`{group="canary", instance="1", job="app-server"} => 288 @[%v]`},
			fullRanges:     1,
			intervalRanges: 0,
		}, {
			// Deltas should perform the same operation when 2nd argument is 0.
			expr:           `delta(http_requests{group="canary", instance="1", job="app-server"}[18m], 0)`,
			output:         []string{`{group="canary", instance="1", job="app-server"} => 288 @[%v]`},
			fullRanges:     1,
			intervalRanges: 0,
		}, {
			// Rates should calculate per-second rates.
			expr:           `rate(http_requests{group="canary", instance="1", job="app-server"}[60m])`,
			output:         []string{`{group="canary", instance="1", job="app-server"} => 0.26666666666666666 @[%v]`},
			fullRanges:     1,
			intervalRanges: 0,
		}, {
			// Deriv should return the same as rate in simple cases.
			expr:           `deriv(http_requests{group="canary", instance="1", job="app-server"}[60m])`,
			output:         []string{`{group="canary", instance="1", job="app-server"} => 0.26666666666666666 @[%v]`},
			fullRanges:     1,
			intervalRanges: 0,
		}, {
			// Counter resets at in the middle of range are handled correctly by rate().
			expr:           `rate(testcounter_reset_middle[60m])`,
			output:         []string{`{} => 0.03 @[%v]`},
			fullRanges:     1,
			intervalRanges: 0,
		}, {
			// Counter resets at end of range are ignored by rate().
			expr:           `rate(testcounter_reset_end[5m])`,
			output:         []string{`{} => 0 @[%v]`},
			fullRanges:     1,
			intervalRanges: 0,
		}, {
			// Deriv should return correct result.
			expr:           `deriv(testcounter_reset_middle[100m])`,
			output:         []string{`{} => 0.010606060606060607 @[%v]`},
			fullRanges:     1,
			intervalRanges: 0,
		}, {
			// count_scalar for a non-empty vector should return scalar element count.
			expr:           `count_scalar(http_requests)`,
			output:         []string{`scalar: 8 @[%v]`},
			fullRanges:     0,
			intervalRanges: 8,
		}, {
			// count_scalar for an empty vector should return scalar 0.
			expr:           `count_scalar(nonexistent)`,
			output:         []string{`scalar: 0 @[%v]`},
			fullRanges:     0,
			intervalRanges: 0,
		}, {
			// Empty expressions shouldn't parse.
			expr:       ``,
			shouldFail: true,
		}, {
			// Interval durations can't be in quotes.
			expr:       `http_requests["1m"]`,
			shouldFail: true,
		}, {
			// Binop arguments need to be scalar or vector.
			expr:       `http_requests - http_requests[1m]`,
			shouldFail: true,
		}, {
			expr: `http_requests{group!="canary"}`,
			output: []string{
				`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
				`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
				`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
				`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 4,
		}, {
			expr: `http_requests{job=~"server",group!="canary"}`,
			output: []string{
				`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
				`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
				`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
				`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 4,
		}, {
			expr: `http_requests{job!~"api",group!="canary"}`,
			output: []string{
				`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
				`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 2,
		}, {
			expr:           `count_scalar(http_requests{job=~"^server$"})`,
			output:         []string{`scalar: 0 @[%v]`},
			fullRanges:     0,
			intervalRanges: 0,
		}, {
			expr: `http_requests{group="production",job=~"^api"}`,
			output: []string{
				`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
				`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 2,
		},
		{
			expr: `abs(-1 * http_requests{group="production",job="api-server"})`,
			output: []string{
				`{group="production", instance="0", job="api-server"} => 100 @[%v]`,
				`{group="production", instance="1", job="api-server"} => 200 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 2,
		},
		{
			expr: `avg_over_time(http_requests{group="production",job="api-server"}[1h])`,
			output: []string{
				`{group="production", instance="0", job="api-server"} => 50 @[%v]`,
				`{group="production", instance="1", job="api-server"} => 100 @[%v]`,
			},
			fullRanges:     2,
			intervalRanges: 0,
		},
		{
			expr: `count_over_time(http_requests{group="production",job="api-server"}[1h])`,
			output: []string{
				`{group="production", instance="0", job="api-server"} => 11 @[%v]`,
				`{group="production", instance="1", job="api-server"} => 11 @[%v]`,
			},
			fullRanges:     2,
			intervalRanges: 0,
		},
		{
			expr: `max_over_time(http_requests{group="production",job="api-server"}[1h])`,
			output: []string{
				`{group="production", instance="0", job="api-server"} => 100 @[%v]`,
				`{group="production", instance="1", job="api-server"} => 200 @[%v]`,
			},
			fullRanges:     2,
			intervalRanges: 0,
		},
		{
			expr: `min_over_time(http_requests{group="production",job="api-server"}[1h])`,
			output: []string{
				`{group="production", instance="0", job="api-server"} => 0 @[%v]`,
				`{group="production", instance="1", job="api-server"} => 0 @[%v]`,
			},
			fullRanges:     2,
			intervalRanges: 0,
		},
		{
			expr: `sum_over_time(http_requests{group="production",job="api-server"}[1h])`,
			output: []string{
				`{group="production", instance="0", job="api-server"} => 550 @[%v]`,
				`{group="production", instance="1", job="api-server"} => 1100 @[%v]`,
			},
			fullRanges:     2,
			intervalRanges: 0,
		},
		{
			expr:           `time()`,
			output:         []string{`scalar: 3000 @[%v]`},
			fullRanges:     0,
			intervalRanges: 0,
		},
		{
			expr: `drop_common_labels(http_requests{group="production",job="api-server"})`,
			output: []string{
				`http_requests{instance="0"} => 100 @[%v]`,
				`http_requests{instance="1"} => 200 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 2,
		},
		{
			expr: `{` + string(clientmodel.MetricNameLabel) + `=~".*"}`,
			output: []string{
				`http_requests{group="canary", instance="0", job="api-server"} => 300 @[%v]`,
				`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
				`http_requests{group="canary", instance="1", job="api-server"} => 400 @[%v]`,
				`http_requests{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
				`http_requests{group="production", instance="0", job="api-server"} => 100 @[%v]`,
				`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
				`http_requests{group="production", instance="1", job="api-server"} => 200 @[%v]`,
				`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
				`testcounter_reset_end => 0 @[%v]`,
				`testcounter_reset_middle => 50 @[%v]`,
				`x{y="testvalue"} => 100 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 11,
		},
		{
			expr: `{job=~"server", job!~"api"}`,
			output: []string{
				`http_requests{group="canary", instance="0", job="app-server"} => 700 @[%v]`,
				`http_requests{group="canary", instance="1", job="app-server"} => 800 @[%v]`,
				`http_requests{group="production", instance="0", job="app-server"} => 500 @[%v]`,
				`http_requests{group="production", instance="1", job="app-server"} => 600 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 4,
		},
		{
			// Test alternative "by"-clause order.
			expr: `sum by (group) (http_requests{job="api-server"})`,
			output: []string{
				`{group="canary"} => 700 @[%v]`,
				`{group="production"} => 300 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 4,
		},
		{
			// Test alternative "by"-clause order with "keeping_extra".
			expr: `sum by (group) keeping_extra (http_requests{job="api-server"})`,
			output: []string{
				`{group="canary", job="api-server"} => 700 @[%v]`,
				`{group="production", job="api-server"} => 300 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 4,
		},
		{
			// Test both alternative "by"-clause orders in one expression.
			// Public health warning: stick to one form within an expression (or even
			// in an organization), or risk serious user confusion.
			expr: `sum(sum by (group) keeping_extra (http_requests{job="api-server"})) by (job)`,
			output: []string{
				`{job="api-server"} => 1000 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 4,
		},
		{
			expr: `absent(nonexistent)`,
			output: []string{
				`{} => 1 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 0,
		},
		{
			expr: `absent(nonexistent{job="testjob", instance="testinstance", method=~".*"})`,
			output: []string{
				`{instance="testinstance", job="testjob"} => 1 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 0,
		},
		{
			expr: `count_scalar(absent(http_requests))`,
			output: []string{
				`scalar: 0 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		},
		{
			expr: `count_scalar(absent(sum(http_requests)))`,
			output: []string{
				`scalar: 0 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 8,
		},
		{
			expr: `absent(sum(nonexistent{job="testjob", instance="testinstance"}))`,
			output: []string{
				`{} => 1 @[%v]`,
			},
			fullRanges:     0,
			intervalRanges: 0,
		},
	}

	storage, closer := newTestStorage(t)
	defer closer.Close()

	for i, exprTest := range expressionTests {
		expectedLines := annotateWithTime(exprTest.output, testEvalTime)

		testExpr, err := LoadExprFromString(exprTest.expr)

		if err != nil {
			if exprTest.shouldFail {
				continue
			}
			t.Errorf("%d. Error during parsing: %v", i, err)
			t.Errorf("%d. Expression: %v", i, exprTest.expr)
		} else {
			if exprTest.shouldFail {
				t.Errorf("%d. Test should fail, but didn't", i)
			}
			failed := false
			resultStr := ast.EvalToString(testExpr, testEvalTime, ast.Text, storage, stats.NewTimerGroup())
			resultLines := strings.Split(resultStr, "\n")

			if len(exprTest.output) != len(resultLines) {
				t.Errorf("%d. Number of samples in expected and actual output don't match", i)
				failed = true
			}

			if exprTest.checkOrder {
				for j, expectedSample := range expectedLines {
					if resultLines[j] != expectedSample {
						t.Errorf("%d.%d. Expected sample '%v', got '%v'", i, j, resultLines[j], expectedSample)
						failed = true
					}
				}
			} else {
				for j, expectedSample := range expectedLines {
					found := false
					for _, actualSample := range resultLines {
						if actualSample == expectedSample {
							found = true
						}
					}
					if !found {
						t.Errorf("%d.%d. Couldn't find expected sample in output: '%v'", i, j, expectedSample)
						failed = true
					}
				}
			}

			analyzer := ast.NewQueryAnalyzer(storage)
			ast.Walk(analyzer, testExpr)
			if exprTest.fullRanges != len(analyzer.FullRanges) {
				t.Errorf("%d. Count of full ranges didn't match: %v vs %v", i, exprTest.fullRanges, len(analyzer.FullRanges))
				failed = true
			}
			if exprTest.intervalRanges != len(analyzer.IntervalRanges) {
				t.Errorf("%d. Count of interval ranges didn't match: %v vs %v", i, exprTest.intervalRanges, len(analyzer.IntervalRanges))
				failed = true
			}

			if failed {
				t.Errorf("%d. Expression: %v\n%v", i, exprTest.expr, vectorComparisonString(expectedLines, resultLines))
			}
		}
	}
}