// labelIntersection returns the metric of common label/value pairs of two input metrics. func labelIntersection(metric1, metric2 clientmodel.COWMetric) clientmodel.COWMetric { for label, value := range metric1.Metric { if metric2.Metric[label] != value { metric1.Delete(label) } } return metric1 }
// Eval implements the VectorNode interface and returns the aggregated // Vector. func (node *VectorAggregation) Eval(timestamp clientmodel.Timestamp) Vector { vector := node.vector.Eval(timestamp) result := map[uint64]*groupedAggregation{} for _, sample := range vector { groupingKey := node.labelsToGroupingKey(sample.Metric.Metric) if groupedResult, ok := result[groupingKey]; ok { if node.keepExtraLabels { groupedResult.labels = labelIntersection(groupedResult.labels, sample.Metric) } switch node.aggrType { case Sum: groupedResult.value += sample.Value case Avg: groupedResult.value += sample.Value groupedResult.groupCount++ case Max: if groupedResult.value < sample.Value { groupedResult.value = sample.Value } case Min: if groupedResult.value > sample.Value { groupedResult.value = sample.Value } case Count: groupedResult.groupCount++ default: panic("Unknown aggregation type") } } else { var m clientmodel.COWMetric if node.keepExtraLabels { m = sample.Metric m.Delete(clientmodel.MetricNameLabel) } else { m = clientmodel.COWMetric{ Metric: clientmodel.Metric{}, Copied: true, } for _, l := range node.groupBy { if v, ok := sample.Metric.Metric[l]; ok { m.Set(l, v) } } } result[groupingKey] = &groupedAggregation{ labels: m, value: sample.Value, groupCount: 1, } } } return node.groupedAggregationsToVector(result, timestamp) }
// resultMetric returns the metric for the given sample(s) based on the vector // binary operation and the matching options. func resultMetric(met clientmodel.COWMetric, op itemType, labels ...clientmodel.LabelName) clientmodel.COWMetric { if len(labels) == 0 { if shouldDropMetricName(op) { met.Delete(clientmodel.MetricNameLabel) } return met } // As we definitly write, creating a new metric is the easiest solution. m := clientmodel.Metric{} for _, ln := range labels { // Included labels from the `group_x` modifier are taken from the "many"-side. if v, ok := met.Metric[ln]; ok { m[ln] = v } } return clientmodel.COWMetric{Metric: m, Copied: false} }
// aggregation evaluates an aggregation operation on a vector. func (ev *evaluator) aggregation(op itemType, grouping clientmodel.LabelNames, keepExtra bool, vector Vector) Vector { result := map[uint64]*groupedAggregation{} for _, sample := range vector { groupingKey := clientmodel.SignatureForLabels(sample.Metric.Metric, grouping) groupedResult, ok := result[groupingKey] // Add a new group if it doesn't exist. if !ok { var m clientmodel.COWMetric if keepExtra { m = sample.Metric m.Delete(clientmodel.MetricNameLabel) } else { m = clientmodel.COWMetric{ Metric: clientmodel.Metric{}, Copied: true, } for _, l := range grouping { if v, ok := sample.Metric.Metric[l]; ok { m.Set(l, v) } } } result[groupingKey] = &groupedAggregation{ labels: m, value: sample.Value, valuesSquaredSum: sample.Value * sample.Value, groupCount: 1, } continue } // Add the sample to the existing group. if keepExtra { groupedResult.labels = labelIntersection(groupedResult.labels, sample.Metric) } switch op { case itemSum: groupedResult.value += sample.Value case itemAvg: groupedResult.value += sample.Value groupedResult.groupCount++ case itemMax: if groupedResult.value < sample.Value { groupedResult.value = sample.Value } case itemMin: if groupedResult.value > sample.Value { groupedResult.value = sample.Value } case itemCount: groupedResult.groupCount++ case itemStdvar, itemStddev: groupedResult.value += sample.Value groupedResult.valuesSquaredSum += sample.Value * sample.Value groupedResult.groupCount++ default: panic(fmt.Errorf("expected aggregation operator but got %q", op)) } } // Construct the result vector from the aggregated groups. resultVector := make(Vector, 0, len(result)) for _, aggr := range result { switch op { case itemAvg: aggr.value = aggr.value / clientmodel.SampleValue(aggr.groupCount) case itemCount: aggr.value = clientmodel.SampleValue(aggr.groupCount) case itemStdvar: avg := float64(aggr.value) / float64(aggr.groupCount) aggr.value = clientmodel.SampleValue(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg) case itemStddev: avg := float64(aggr.value) / float64(aggr.groupCount) aggr.value = clientmodel.SampleValue(math.Sqrt(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg)) default: // For other aggregations, we already have the right value. } sample := &Sample{ Metric: aggr.labels, Value: aggr.value, Timestamp: ev.Timestamp, } resultVector = append(resultVector, sample) } return resultVector }