コード例 #1
0
ファイル: engine.go プロジェクト: prometheus/prometheus
// resultMetric returns the metric for the given sample(s) based on the vector
// binary operation and the matching options.
func resultMetric(lhs, rhs metric.Metric, op itemType, matching *VectorMatching) metric.Metric {
	if shouldDropMetricName(op) {
		lhs.Del(model.MetricNameLabel)
	}
	if !matching.On {
		if matching.Card == CardOneToOne {
			for _, l := range matching.MatchingLabels {
				lhs.Del(l)
			}
		}
		for _, ln := range matching.Include {
			// Included labels from the `group_x` modifier are taken from the "one"-side.
			value := rhs.Metric[ln]
			if value != "" {
				lhs.Set(ln, rhs.Metric[ln])
			} else {
				lhs.Del(ln)
			}
		}
		return lhs
	}
	// As we definitely write, creating a new metric is the easiest solution.
	m := model.Metric{}
	if matching.Card == CardOneToOne {
		for _, ln := range matching.MatchingLabels {
			if v, ok := lhs.Metric[ln]; ok {
				m[ln] = v
			}
		}
	} else {
		for k, v := range lhs.Metric {
			m[k] = v
		}
	}
	for _, ln := range matching.Include {
		// Included labels from the `group_x` modifier are taken from the "one"-side .
		if v, ok := rhs.Metric[ln]; ok {
			m[ln] = v
		} else {
			delete(m, ln)
		}
	}
	return metric.Metric{Metric: m, Copied: false}
}
コード例 #2
0
ファイル: engine.go プロジェクト: prometheus/prometheus
// aggregation evaluates an aggregation operation on a vector.
func (ev *evaluator) aggregation(op itemType, grouping model.LabelNames, without bool, keepCommon bool, param Expr, vec vector) vector {

	result := map[uint64]*groupedAggregation{}
	var k int64
	if op == itemTopK || op == itemBottomK {
		k = ev.evalInt(param)
		if k < 1 {
			return vector{}
		}
	}
	var q float64
	if op == itemQuantile {
		q = ev.evalFloat(param)
	}
	var valueLabel model.LabelName
	if op == itemCountValues {
		valueLabel = model.LabelName(ev.evalString(param).Value)
		if !without {
			grouping = append(grouping, valueLabel)
		}
	}

	for _, s := range vec {
		withoutMetric := s.Metric
		if without {
			for _, l := range grouping {
				withoutMetric.Del(l)
			}
			withoutMetric.Del(model.MetricNameLabel)
			if op == itemCountValues {
				withoutMetric.Set(valueLabel, model.LabelValue(s.Value.String()))
			}
		} else {
			if op == itemCountValues {
				s.Metric.Set(valueLabel, model.LabelValue(s.Value.String()))
			}
		}

		var groupingKey uint64
		if without {
			groupingKey = uint64(withoutMetric.Metric.Fingerprint())
		} else {
			groupingKey = model.SignatureForLabels(s.Metric.Metric, grouping...)
		}

		groupedResult, ok := result[groupingKey]
		// Add a new group if it doesn't exist.
		if !ok {
			var m metric.Metric
			if keepCommon {
				m = s.Metric
				m.Del(model.MetricNameLabel)
			} else if without {
				m = withoutMetric
			} else {
				m = metric.Metric{
					Metric: model.Metric{},
					Copied: true,
				}
				for _, l := range grouping {
					if v, ok := s.Metric.Metric[l]; ok {
						m.Set(l, v)
					}
				}
			}
			result[groupingKey] = &groupedAggregation{
				labels:           m,
				value:            s.Value,
				valuesSquaredSum: s.Value * s.Value,
				groupCount:       1,
			}
			if op == itemTopK || op == itemQuantile {
				result[groupingKey].heap = make(vectorByValueHeap, 0, k)
				heap.Push(&result[groupingKey].heap, &sample{Value: s.Value, Metric: s.Metric})
			} else if op == itemBottomK {
				result[groupingKey].reverseHeap = make(vectorByReverseValueHeap, 0, k)
				heap.Push(&result[groupingKey].reverseHeap, &sample{Value: s.Value, Metric: s.Metric})
			}
			continue
		}
		// Add the sample to the existing group.
		if keepCommon {
			groupedResult.labels = labelIntersection(groupedResult.labels, s.Metric)
		}

		switch op {
		case itemSum:
			groupedResult.value += s.Value
		case itemAvg:
			groupedResult.value += s.Value
			groupedResult.groupCount++
		case itemMax:
			if groupedResult.value < s.Value || math.IsNaN(float64(groupedResult.value)) {
				groupedResult.value = s.Value
			}
		case itemMin:
			if groupedResult.value > s.Value || math.IsNaN(float64(groupedResult.value)) {
				groupedResult.value = s.Value
			}
		case itemCount, itemCountValues:
			groupedResult.groupCount++
		case itemStdvar, itemStddev:
			groupedResult.value += s.Value
			groupedResult.valuesSquaredSum += s.Value * s.Value
			groupedResult.groupCount++
		case itemTopK:
			if int64(len(groupedResult.heap)) < k || groupedResult.heap[0].Value < s.Value || math.IsNaN(float64(groupedResult.heap[0].Value)) {
				if int64(len(groupedResult.heap)) == k {
					heap.Pop(&groupedResult.heap)
				}
				heap.Push(&groupedResult.heap, &sample{Value: s.Value, Metric: s.Metric})
			}
		case itemBottomK:
			if int64(len(groupedResult.reverseHeap)) < k || groupedResult.reverseHeap[0].Value > s.Value || math.IsNaN(float64(groupedResult.reverseHeap[0].Value)) {
				if int64(len(groupedResult.reverseHeap)) == k {
					heap.Pop(&groupedResult.reverseHeap)
				}
				heap.Push(&groupedResult.reverseHeap, &sample{Value: s.Value, Metric: s.Metric})
			}
		case itemQuantile:
			groupedResult.heap = append(groupedResult.heap, s)
		default:
			panic(fmt.Errorf("expected aggregation operator but got %q", op))
		}
	}

	// Construct the result vector from the aggregated groups.
	resultVector := make(vector, 0, len(result))

	for _, aggr := range result {
		switch op {
		case itemAvg:
			aggr.value = aggr.value / model.SampleValue(aggr.groupCount)
		case itemCount, itemCountValues:
			aggr.value = model.SampleValue(aggr.groupCount)
		case itemStdvar:
			avg := float64(aggr.value) / float64(aggr.groupCount)
			aggr.value = model.SampleValue(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg)
		case itemStddev:
			avg := float64(aggr.value) / float64(aggr.groupCount)
			aggr.value = model.SampleValue(math.Sqrt(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg))
		case itemTopK:
			// The heap keeps the lowest value on top, so reverse it.
			sort.Sort(sort.Reverse(aggr.heap))
			for _, v := range aggr.heap {
				resultVector = append(resultVector, &sample{
					Metric:    v.Metric,
					Value:     v.Value,
					Timestamp: ev.Timestamp,
				})
			}
			continue // Bypass default append.
		case itemBottomK:
			// The heap keeps the lowest value on top, so reverse it.
			sort.Sort(sort.Reverse(aggr.reverseHeap))
			for _, v := range aggr.reverseHeap {
				resultVector = append(resultVector, &sample{
					Metric:    v.Metric,
					Value:     v.Value,
					Timestamp: ev.Timestamp,
				})
			}
			continue // Bypass default append.
		case itemQuantile:
			aggr.value = model.SampleValue(quantile(q, aggr.heap))
		default:
			// For other aggregations, we already have the right value.
		}
		sample := &sample{
			Metric:    aggr.labels,
			Value:     aggr.value,
			Timestamp: ev.Timestamp,
		}
		resultVector = append(resultVector, sample)
	}
	return resultVector
}
コード例 #3
0
ファイル: engine.go プロジェクト: izogain/prometheus
// aggregation evaluates an aggregation operation on a vector.
func (ev *evaluator) aggregation(op itemType, grouping model.LabelNames, without bool, keepExtra bool, vec vector) vector {

	result := map[uint64]*groupedAggregation{}

	for _, sample := range vec {
		withoutMetric := sample.Metric
		if without {
			for _, l := range grouping {
				withoutMetric.Del(l)
			}
			withoutMetric.Del(model.MetricNameLabel)
		}

		var groupingKey uint64
		if without {
			groupingKey = uint64(withoutMetric.Metric.Fingerprint())
		} else {
			groupingKey = model.SignatureForLabels(sample.Metric.Metric, grouping...)
		}

		groupedResult, ok := result[groupingKey]
		// Add a new group if it doesn't exist.
		if !ok {
			var m metric.Metric
			if keepExtra {
				m = sample.Metric
				m.Del(model.MetricNameLabel)
			} else if without {
				m = withoutMetric
			} else {
				m = metric.Metric{
					Metric: model.Metric{},
					Copied: true,
				}
				for _, l := range grouping {
					if v, ok := sample.Metric.Metric[l]; ok {
						m.Set(l, v)
					}
				}
			}
			result[groupingKey] = &groupedAggregation{
				labels:           m,
				value:            sample.Value,
				valuesSquaredSum: sample.Value * sample.Value,
				groupCount:       1,
			}
			continue
		}
		// Add the sample to the existing group.
		if keepExtra {
			groupedResult.labels = labelIntersection(groupedResult.labels, sample.Metric)
		}

		switch op {
		case itemSum:
			groupedResult.value += sample.Value
		case itemAvg:
			groupedResult.value += sample.Value
			groupedResult.groupCount++
		case itemMax:
			if groupedResult.value < sample.Value || math.IsNaN(float64(groupedResult.value)) {
				groupedResult.value = sample.Value
			}
		case itemMin:
			if groupedResult.value > sample.Value || math.IsNaN(float64(groupedResult.value)) {
				groupedResult.value = sample.Value
			}
		case itemCount:
			groupedResult.groupCount++
		case itemStdvar, itemStddev:
			groupedResult.value += sample.Value
			groupedResult.valuesSquaredSum += sample.Value * sample.Value
			groupedResult.groupCount++
		default:
			panic(fmt.Errorf("expected aggregation operator but got %q", op))
		}
	}

	// Construct the result vector from the aggregated groups.
	resultVector := make(vector, 0, len(result))

	for _, aggr := range result {
		switch op {
		case itemAvg:
			aggr.value = aggr.value / model.SampleValue(aggr.groupCount)
		case itemCount:
			aggr.value = model.SampleValue(aggr.groupCount)
		case itemStdvar:
			avg := float64(aggr.value) / float64(aggr.groupCount)
			aggr.value = model.SampleValue(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg)
		case itemStddev:
			avg := float64(aggr.value) / float64(aggr.groupCount)
			aggr.value = model.SampleValue(math.Sqrt(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg))
		default:
			// For other aggregations, we already have the right value.
		}
		sample := &sample{
			Metric:    aggr.labels,
			Value:     aggr.value,
			Timestamp: ev.Timestamp,
		}
		resultVector = append(resultVector, sample)
	}
	return resultVector
}