// signatureFunc returns a function that calculates the signature for a metric // based on the provided labels. func signatureFunc(labels ...model.LabelName) func(m metric.Metric) uint64 { if len(labels) == 0 { return func(m metric.Metric) uint64 { m.Del(model.MetricNameLabel) return uint64(m.Metric.Fingerprint()) } } return func(m metric.Metric) uint64 { return model.SignatureForLabels(m.Metric, labels...) } }
// signatureFunc returns a function that calculates the signature for a metric // ignoring the provided labels. If on, then the given labels are only used instead. func signatureFunc(on bool, labels ...model.LabelName) func(m metric.Metric) uint64 { if !on { return func(m metric.Metric) uint64 { tmp := m.Metric.Clone() for _, l := range labels { delete(tmp, l) } delete(tmp, model.MetricNameLabel) return uint64(tmp.Fingerprint()) } } return func(m metric.Metric) uint64 { return model.SignatureForLabels(m.Metric, labels...) } }
// aggregation evaluates an aggregation operation on a vector. func (ev *evaluator) aggregation(op itemType, grouping model.LabelNames, without bool, keepCommon bool, param Expr, vec vector) vector { result := map[uint64]*groupedAggregation{} var k int64 if op == itemTopK || op == itemBottomK { k = ev.evalInt(param) if k < 1 { return vector{} } } var q float64 if op == itemQuantile { q = ev.evalFloat(param) } var valueLabel model.LabelName if op == itemCountValues { valueLabel = model.LabelName(ev.evalString(param).Value) if !without { grouping = append(grouping, valueLabel) } } for _, s := range vec { withoutMetric := s.Metric if without { for _, l := range grouping { withoutMetric.Del(l) } withoutMetric.Del(model.MetricNameLabel) if op == itemCountValues { withoutMetric.Set(valueLabel, model.LabelValue(s.Value.String())) } } else { if op == itemCountValues { s.Metric.Set(valueLabel, model.LabelValue(s.Value.String())) } } var groupingKey uint64 if without { groupingKey = uint64(withoutMetric.Metric.Fingerprint()) } else { groupingKey = model.SignatureForLabels(s.Metric.Metric, grouping...) } groupedResult, ok := result[groupingKey] // Add a new group if it doesn't exist. if !ok { var m metric.Metric if keepCommon { m = s.Metric m.Del(model.MetricNameLabel) } else if without { m = withoutMetric } else { m = metric.Metric{ Metric: model.Metric{}, Copied: true, } for _, l := range grouping { if v, ok := s.Metric.Metric[l]; ok { m.Set(l, v) } } } result[groupingKey] = &groupedAggregation{ labels: m, value: s.Value, valuesSquaredSum: s.Value * s.Value, groupCount: 1, } if op == itemTopK || op == itemQuantile { result[groupingKey].heap = make(vectorByValueHeap, 0, k) heap.Push(&result[groupingKey].heap, &sample{Value: s.Value, Metric: s.Metric}) } else if op == itemBottomK { result[groupingKey].reverseHeap = make(vectorByReverseValueHeap, 0, k) heap.Push(&result[groupingKey].reverseHeap, &sample{Value: s.Value, Metric: s.Metric}) } continue } // Add the sample to the existing group. if keepCommon { groupedResult.labels = labelIntersection(groupedResult.labels, s.Metric) } switch op { case itemSum: groupedResult.value += s.Value case itemAvg: groupedResult.value += s.Value groupedResult.groupCount++ case itemMax: if groupedResult.value < s.Value || math.IsNaN(float64(groupedResult.value)) { groupedResult.value = s.Value } case itemMin: if groupedResult.value > s.Value || math.IsNaN(float64(groupedResult.value)) { groupedResult.value = s.Value } case itemCount, itemCountValues: groupedResult.groupCount++ case itemStdvar, itemStddev: groupedResult.value += s.Value groupedResult.valuesSquaredSum += s.Value * s.Value groupedResult.groupCount++ case itemTopK: if int64(len(groupedResult.heap)) < k || groupedResult.heap[0].Value < s.Value || math.IsNaN(float64(groupedResult.heap[0].Value)) { if int64(len(groupedResult.heap)) == k { heap.Pop(&groupedResult.heap) } heap.Push(&groupedResult.heap, &sample{Value: s.Value, Metric: s.Metric}) } case itemBottomK: if int64(len(groupedResult.reverseHeap)) < k || groupedResult.reverseHeap[0].Value > s.Value || math.IsNaN(float64(groupedResult.reverseHeap[0].Value)) { if int64(len(groupedResult.reverseHeap)) == k { heap.Pop(&groupedResult.reverseHeap) } heap.Push(&groupedResult.reverseHeap, &sample{Value: s.Value, Metric: s.Metric}) } case itemQuantile: groupedResult.heap = append(groupedResult.heap, s) default: panic(fmt.Errorf("expected aggregation operator but got %q", op)) } } // Construct the result vector from the aggregated groups. resultVector := make(vector, 0, len(result)) for _, aggr := range result { switch op { case itemAvg: aggr.value = aggr.value / model.SampleValue(aggr.groupCount) case itemCount, itemCountValues: aggr.value = model.SampleValue(aggr.groupCount) case itemStdvar: avg := float64(aggr.value) / float64(aggr.groupCount) aggr.value = model.SampleValue(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg) case itemStddev: avg := float64(aggr.value) / float64(aggr.groupCount) aggr.value = model.SampleValue(math.Sqrt(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg)) case itemTopK: // The heap keeps the lowest value on top, so reverse it. sort.Sort(sort.Reverse(aggr.heap)) for _, v := range aggr.heap { resultVector = append(resultVector, &sample{ Metric: v.Metric, Value: v.Value, Timestamp: ev.Timestamp, }) } continue // Bypass default append. case itemBottomK: // The heap keeps the lowest value on top, so reverse it. sort.Sort(sort.Reverse(aggr.reverseHeap)) for _, v := range aggr.reverseHeap { resultVector = append(resultVector, &sample{ Metric: v.Metric, Value: v.Value, Timestamp: ev.Timestamp, }) } continue // Bypass default append. case itemQuantile: aggr.value = model.SampleValue(quantile(q, aggr.heap)) default: // For other aggregations, we already have the right value. } sample := &sample{ Metric: aggr.labels, Value: aggr.value, Timestamp: ev.Timestamp, } resultVector = append(resultVector, sample) } return resultVector }
// vectorBinop evaluates a binary operation between two vector, excluding AND and OR. func (ev *evaluator) vectorBinop(op itemType, lhs, rhs vector, matching *VectorMatching, returnBool bool) vector { if matching.Card == CardManyToMany { panic("many-to-many only allowed for AND and OR") } var ( result = vector{} sigf = signatureFunc(matching.On...) resultLabels = append(matching.On, matching.Include...) ) // The control flow below handles one-to-one or many-to-one matching. // For one-to-many, swap sidedness and account for the swap when calculating // values. if matching.Card == CardOneToMany { lhs, rhs = rhs, lhs } // All samples from the rhs hashed by the matching label/values. rightSigs := map[uint64]*sample{} // Add all rhs samples to a map so we can easily find matches later. for _, rs := range rhs { sig := sigf(rs.Metric) // The rhs is guaranteed to be the 'one' side. Having multiple samples // with the same signature means that the matching is many-to-many. if _, found := rightSigs[sig]; found { // Many-to-many matching not allowed. ev.errorf("many-to-many matching not allowed: matching labels must be unique on one side") } rightSigs[sig] = rs } // Tracks the match-signature. For one-to-one operations the value is nil. For many-to-one // the value is a set of signatures to detect duplicated result elements. matchedSigs := map[uint64]map[uint64]struct{}{} // For all lhs samples find a respective rhs sample and perform // the binary operation. for _, ls := range lhs { sig := sigf(ls.Metric) rs, found := rightSigs[sig] // Look for a match in the rhs vector. if !found { continue } // Account for potentially swapped sidedness. vl, vr := ls.Value, rs.Value if matching.Card == CardOneToMany { vl, vr = vr, vl } value, keep := vectorElemBinop(op, vl, vr) if returnBool { if keep { value = 1.0 } else { value = 0.0 } } else if !keep { continue } metric := resultMetric(ls.Metric, op, resultLabels...) insertedSigs, exists := matchedSigs[sig] if matching.Card == CardOneToOne { if exists { ev.errorf("multiple matches for labels: many-to-one matching must be explicit (group_left/group_right)") } matchedSigs[sig] = nil // Set existence to true. } else { // In many-to-one matching the grouping labels have to ensure a unique metric // for the result vector. Check whether those labels have already been added for // the same matching labels. insertSig := model.SignatureForLabels(metric.Metric, matching.Include...) if !exists { insertedSigs = map[uint64]struct{}{} matchedSigs[sig] = insertedSigs } else if _, duplicate := insertedSigs[insertSig]; duplicate { ev.errorf("multiple matches for labels: grouping labels must ensure unique matches") } insertedSigs[insertSig] = struct{}{} } result = append(result, &sample{ Metric: metric, Value: value, Timestamp: ev.Timestamp, }) } return result }
// aggregation evaluates an aggregation operation on a vector. func (ev *evaluator) aggregation(op itemType, grouping model.LabelNames, without bool, keepExtra bool, vec vector) vector { result := map[uint64]*groupedAggregation{} for _, sample := range vec { withoutMetric := sample.Metric if without { for _, l := range grouping { withoutMetric.Del(l) } withoutMetric.Del(model.MetricNameLabel) } var groupingKey uint64 if without { groupingKey = uint64(withoutMetric.Metric.Fingerprint()) } else { groupingKey = model.SignatureForLabels(sample.Metric.Metric, grouping...) } groupedResult, ok := result[groupingKey] // Add a new group if it doesn't exist. if !ok { var m metric.Metric if keepExtra { m = sample.Metric m.Del(model.MetricNameLabel) } else if without { m = withoutMetric } else { m = metric.Metric{ Metric: model.Metric{}, Copied: true, } for _, l := range grouping { if v, ok := sample.Metric.Metric[l]; ok { m.Set(l, v) } } } result[groupingKey] = &groupedAggregation{ labels: m, value: sample.Value, valuesSquaredSum: sample.Value * sample.Value, groupCount: 1, } continue } // Add the sample to the existing group. if keepExtra { groupedResult.labels = labelIntersection(groupedResult.labels, sample.Metric) } switch op { case itemSum: groupedResult.value += sample.Value case itemAvg: groupedResult.value += sample.Value groupedResult.groupCount++ case itemMax: if groupedResult.value < sample.Value || math.IsNaN(float64(groupedResult.value)) { groupedResult.value = sample.Value } case itemMin: if groupedResult.value > sample.Value || math.IsNaN(float64(groupedResult.value)) { groupedResult.value = sample.Value } case itemCount: groupedResult.groupCount++ case itemStdvar, itemStddev: groupedResult.value += sample.Value groupedResult.valuesSquaredSum += sample.Value * sample.Value groupedResult.groupCount++ default: panic(fmt.Errorf("expected aggregation operator but got %q", op)) } } // Construct the result vector from the aggregated groups. resultVector := make(vector, 0, len(result)) for _, aggr := range result { switch op { case itemAvg: aggr.value = aggr.value / model.SampleValue(aggr.groupCount) case itemCount: aggr.value = model.SampleValue(aggr.groupCount) case itemStdvar: avg := float64(aggr.value) / float64(aggr.groupCount) aggr.value = model.SampleValue(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg) case itemStddev: avg := float64(aggr.value) / float64(aggr.groupCount) aggr.value = model.SampleValue(math.Sqrt(float64(aggr.valuesSquaredSum)/float64(aggr.groupCount) - avg*avg)) default: // For other aggregations, we already have the right value. } sample := &sample{ Metric: aggr.labels, Value: aggr.value, Timestamp: ev.Timestamp, } resultVector = append(resultVector, sample) } return resultVector }