// === delta(matrix MatrixNode, isCounter ScalarNode) Vector === func deltaImpl(timestamp time.Time, view *viewAdapter, args []Node) interface{} { matrixNode := args[0].(MatrixNode) isCounter := int(args[1].(ScalarNode).Eval(timestamp, view)) resultVector := Vector{} // If we treat these metrics as counters, we need to fetch all values // in the interval to find breaks in the timeseries' monotonicity. // I.e. if a counter resets, we want to ignore that reset. var matrixValue Matrix if isCounter > 0 { matrixValue = matrixNode.Eval(timestamp, view) } else { matrixValue = matrixNode.EvalBoundaries(timestamp, view) } for _, samples := range matrixValue { counterCorrection := model.SampleValue(0) lastValue := model.SampleValue(0) for _, sample := range samples.Values { currentValue := sample.Value if currentValue < lastValue { counterCorrection += lastValue - currentValue } lastValue = currentValue } resultValue := lastValue - samples.Values[0].Value + counterCorrection resultSample := model.Sample{ Metric: samples.Metric, Value: resultValue, Timestamp: timestamp, } resultVector = append(resultVector, resultSample) } return resultVector }
func AppendRepeatingValuesTests(p MetricPersistence, t test.Tester) { metric := model.Metric{ model.MetricNameLabel: "errors_total", "controller": "foo", "operation": "bar", } increments := 10 repetitions := 500 for i := 0; i < increments; i++ { for j := 0; j < repetitions; j++ { time := time.Time{}.Add(time.Duration(i) * time.Hour).Add(time.Duration(j) * time.Second) testAppendSample(p, model.Sample{ Value: model.SampleValue(i), Timestamp: time, Metric: metric, }, t) } } if true { // XXX: Purely a benchmark. return } labelSet := model.LabelSet{ model.MetricNameLabel: "errors_total", "controller": "foo", "operation": "bar", } for i := 0; i < increments; i++ { for j := 0; j < repetitions; j++ { fingerprints, err := p.GetFingerprintsForLabelSet(labelSet) if err != nil { t.Fatal(err) } if len(fingerprints) != 1 { t.Fatalf("expected %d fingerprints, got %d", 1, len(fingerprints)) } time := time.Time{}.Add(time.Duration(i) * time.Hour).Add(time.Duration(j) * time.Second) sample, err := p.GetValueAtTime(fingerprints[0], time, StalenessPolicy{}) if err != nil { t.Fatal(err) } if sample == nil { t.Fatal("expected non-nil sample.") } expected := model.SampleValue(i) if sample.Value != expected { t.Fatalf("expected %d value, got %d", expected, sample.Value) } } } }
// interpolateSamples interpolates a value at a target time between two // provided sample pairs. func interpolateSamples(first, second *model.SamplePair, timestamp time.Time) *model.SamplePair { dv := second.Value - first.Value dt := second.Timestamp.Sub(first.Timestamp) dDt := dv / model.SampleValue(dt) offset := model.SampleValue(timestamp.Sub(first.Timestamp)) return &model.SamplePair{ Value: first.Value + (offset * dDt), Timestamp: timestamp, } }
func AppendSampleAsPureSparseAppendTests(p MetricPersistence, t test.Tester) { appendSample := func(x int) (success bool) { v := model.SampleValue(x) ts := time.Unix(int64(x), int64(x)) labelName := model.LabelName(x) labelValue := model.LabelValue(x) l := model.Metric{labelName: labelValue} sample := model.Sample{ Value: v, Timestamp: ts, Metric: l, } err := p.AppendSample(sample) success = err == nil if !success { t.Error(err) } return } if err := quick.Check(appendSample, nil); err != nil { t.Error(err) } }
// XXX: Terrible wart. func interpolateSample(x1, x2 time.Time, y1, y2 float32, e time.Time) model.SampleValue { yDelta := y2 - y1 xDelta := x2.Sub(x1) dDt := yDelta / float32(xDelta) offset := float32(e.Sub(x1)) return model.SampleValue(y1 + (offset * dDt)) }
// === rate(node *MatrixNode) Vector === func rateImpl(timestamp time.Time, view *viewAdapter, args []Node) interface{} { args = append(args, &ScalarLiteral{value: 1}) vector := deltaImpl(timestamp, view, args).(Vector) // TODO: could be other type of MatrixNode in the future (right now, only // MatrixLiteral exists). Find a better way of getting the duration of a // matrix, such as looking at the samples themselves. interval := args[0].(*MatrixLiteral).interval for _, sample := range vector { sample.Value /= model.SampleValue(interval / time.Second) } return vector }
func AppendSampleAsSparseAppendWithReadsTests(p MetricPersistence, t test.Tester) { appendSample := func(x int) (success bool) { v := model.SampleValue(x) ts := time.Unix(int64(x), int64(x)) labelName := model.LabelName(x) labelValue := model.LabelValue(x) l := model.Metric{labelName: labelValue} sample := model.Sample{ Value: v, Timestamp: ts, Metric: l, } err := p.AppendSample(sample) if err != nil { t.Error(err) return } fingerprints, err := p.GetFingerprintsForLabelName(labelName) if err != nil { t.Error(err) return } if len(fingerprints) != 1 { t.Errorf("expected fingerprint count of %d, got %d", 1, len(fingerprints)) return } fingerprints, err = p.GetFingerprintsForLabelSet(model.LabelSet{ labelName: labelValue, }) if err != nil { t.Error(err) return } if len(fingerprints) != 1 { t.Errorf("expected fingerprint count of %d, got %d", 1, len(fingerprints)) return } return true } if err := quick.Check(appendSample, nil); err != nil { t.Error(err) } }
func (node *VectorAggregation) groupedAggregationsToVector(aggregations map[string]*groupedAggregation, timestamp time.Time) Vector { vector := Vector{} for _, aggregation := range aggregations { if node.aggrType == AVG { aggregation.value = aggregation.value / model.SampleValue(aggregation.groupCount) } sample := model.Sample{ Metric: aggregation.labels, Value: aggregation.value, Timestamp: timestamp, } vector = append(vector, sample) } return vector }
func buildSamples(from, to time.Time, interval time.Duration, m model.Metric) (v []model.Sample) { i := model.SampleValue(0) for from.Before(to) { v = append(v, model.Sample{ Metric: m, Value: i, Timestamp: from, }) from = from.Add(interval) i++ } return }
func AppendSampleAsPureSingleEntityAppendTests(p MetricPersistence, t test.Tester) { appendSample := func(x int) bool { sample := model.Sample{ Value: model.SampleValue(x), Timestamp: time.Unix(int64(x), 0), Metric: model.Metric{model.MetricNameLabel: "my_metric"}, } err := p.AppendSample(sample) return err == nil } if err := quick.Check(appendSample, nil); err != nil { t.Error(err) } }
func GetFingerprintsForLabelSetUsesAndForLabelMatchingTests(p MetricPersistence, t test.Tester) { metrics := []model.LabelSet{ {model.MetricNameLabel: "request_metrics_latency_equal_tallying_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"}, {model.MetricNameLabel: "requests_metrics_latency_equal_accumulating_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"}, {model.MetricNameLabel: "requests_metrics_latency_logarithmic_accumulating_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"}, {model.MetricNameLabel: "requests_metrics_latency_logarithmic_tallying_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"}, {model.MetricNameLabel: "targets_healthy_scrape_latency_ms", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"}, } for _, metric := range metrics { m := model.Metric{} for k, v := range metric { m[model.LabelName(k)] = model.LabelValue(v) } testAppendSample(p, model.Sample{ Value: model.SampleValue(0.0), Timestamp: time.Now(), Metric: m, }, t) } labelSet := model.LabelSet{ model.MetricNameLabel: "targets_healthy_scrape_latency_ms", "percentile": "0.010000", } fingerprints, err := p.GetFingerprintsForLabelSet(labelSet) if err != nil { t.Errorf("could not get labels: %s", err) } if len(fingerprints) != 1 { t.Errorf("did not get a single metric as is expected, got %s", fingerprints) } }
func GetValueAtTimeTests(persistenceMaker func() (MetricPersistence, test.Closer), t test.Tester) { type value struct { year int month time.Month day int hour int value float32 } type input struct { year int month time.Month day int hour int staleness time.Duration } type output struct { value model.SampleValue } type behavior struct { name string input input output *output } var contexts = []struct { name string values []value behaviors []behavior }{ { name: "no values", values: []value{}, behaviors: []behavior{ { name: "random target", input: input{ year: 1984, month: 3, day: 30, hour: 0, staleness: time.Duration(0), }, }, }, }, { name: "singleton", values: []value{ { year: 1984, month: 3, day: 30, hour: 0, value: 0, }, }, behaviors: []behavior{ { name: "exact without staleness policy", input: input{ year: 1984, month: 3, day: 30, hour: 0, staleness: time.Duration(0), }, output: &output{ value: 0, }, }, { name: "exact with staleness policy", input: input{ year: 1984, month: 3, day: 30, hour: 0, staleness: time.Duration(365*24) * time.Hour, }, output: &output{ value: 0, }, }, { name: "before without staleness policy", input: input{ year: 1984, month: 3, day: 29, hour: 0, staleness: time.Duration(0), }, }, { name: "before within staleness policy", input: input{ year: 1984, month: 3, day: 29, hour: 0, staleness: time.Duration(365*24) * time.Hour, }, }, { name: "before outside staleness policy", input: input{ year: 1984, month: 3, day: 29, hour: 0, staleness: time.Duration(1) * time.Hour, }, }, { name: "after without staleness policy", input: input{ year: 1984, month: 3, day: 31, hour: 0, staleness: time.Duration(0), }, }, { name: "after within staleness policy", input: input{ year: 1984, month: 3, day: 31, hour: 0, staleness: time.Duration(365*24) * time.Hour, }, output: &output{ value: 0, }, }, { name: "after outside staleness policy", input: input{ year: 1984, month: 4, day: 7, hour: 0, staleness: time.Duration(7*24) * time.Hour, }, }, }, }, { name: "double", values: []value{ { year: 1984, month: 3, day: 30, hour: 0, value: 0, }, { year: 1985, month: 3, day: 30, hour: 0, value: 1, }, }, behaviors: []behavior{ { name: "exact first without staleness policy", input: input{ year: 1984, month: 3, day: 30, hour: 0, staleness: time.Duration(0), }, output: &output{ value: 0, }, }, { name: "exact first with staleness policy", input: input{ year: 1984, month: 3, day: 30, hour: 0, staleness: time.Duration(365*24) * time.Hour, }, output: &output{ value: 0, }, }, { name: "exact second without staleness policy", input: input{ year: 1985, month: 3, day: 30, hour: 0, staleness: time.Duration(0), }, output: &output{ value: 1, }, }, { name: "exact second with staleness policy", input: input{ year: 1985, month: 3, day: 30, hour: 0, staleness: time.Duration(365*24) * time.Hour, }, output: &output{ value: 1, }, }, { name: "before first without staleness policy", input: input{ year: 1983, month: 9, day: 29, hour: 12, staleness: time.Duration(0), }, }, { name: "before first with staleness policy", input: input{ year: 1983, month: 9, day: 29, hour: 12, staleness: time.Duration(365*24) * time.Hour, }, }, { name: "after second with staleness policy", input: input{ year: 1985, month: 9, day: 28, hour: 12, staleness: time.Duration(365*24) * time.Hour, }, output: &output{ value: 1, }, }, { name: "after second without staleness policy", input: input{ year: 1985, month: 9, day: 28, hour: 12, staleness: time.Duration(0), }, }, { name: "middle without staleness policy", input: input{ year: 1984, month: 9, day: 28, hour: 12, staleness: time.Duration(0), }, }, { name: "middle with insufficient staleness policy", input: input{ year: 1984, month: 9, day: 28, hour: 12, staleness: time.Duration(364*24) * time.Hour, }, }, { name: "middle with sufficient staleness policy", input: input{ year: 1984, month: 9, day: 28, hour: 12, staleness: time.Duration(365*24) * time.Hour, }, output: &output{ value: 0.5, }, }, }, }, { name: "triple", values: []value{ { year: 1984, month: 3, day: 30, hour: 0, value: 0, }, { year: 1985, month: 3, day: 30, hour: 0, value: 1, }, { year: 1986, month: 3, day: 30, hour: 0, value: 2, }, }, behaviors: []behavior{ { name: "exact first without staleness policy", input: input{ year: 1984, month: 3, day: 30, hour: 0, staleness: time.Duration(0), }, output: &output{ value: 0, }, }, { name: "exact first with staleness policy", input: input{ year: 1984, month: 3, day: 30, hour: 0, staleness: time.Duration(365*24) * time.Hour, }, output: &output{ value: 0, }, }, { name: "exact second without staleness policy", input: input{ year: 1985, month: 3, day: 30, hour: 0, staleness: time.Duration(0), }, output: &output{ value: 1, }, }, { name: "exact second with staleness policy", input: input{ year: 1985, month: 3, day: 30, hour: 0, staleness: time.Duration(365*24) * time.Hour, }, output: &output{ value: 1, }, }, { name: "exact third without staleness policy", input: input{ year: 1986, month: 3, day: 30, hour: 0, staleness: time.Duration(0), }, output: &output{ value: 2, }, }, { name: "exact third with staleness policy", input: input{ year: 1986, month: 3, day: 30, hour: 0, staleness: time.Duration(365*24) * time.Hour, }, output: &output{ value: 2, }, }, { name: "before first without staleness policy", input: input{ year: 1983, month: 9, day: 29, hour: 12, staleness: time.Duration(0), }, }, { name: "before first with staleness policy", input: input{ year: 1983, month: 9, day: 29, hour: 12, staleness: time.Duration(365*24) * time.Hour, }, }, { name: "after third within staleness policy", input: input{ year: 1986, month: 9, day: 28, hour: 12, staleness: time.Duration(365*24) * time.Hour, }, output: &output{ value: 2, }, }, { name: "after third outside staleness policy", input: input{ year: 1986, month: 9, day: 28, hour: 12, staleness: time.Duration(1*24) * time.Hour, }, }, { name: "after third without staleness policy", input: input{ year: 1986, month: 9, day: 28, hour: 12, staleness: time.Duration(0), }, }, { name: "first middle without staleness policy", input: input{ year: 1984, month: 9, day: 28, hour: 12, staleness: time.Duration(0), }, }, { name: "first middle with insufficient staleness policy", input: input{ year: 1984, month: 9, day: 28, hour: 12, staleness: time.Duration(364*24) * time.Hour, }, }, { name: "first middle with sufficient staleness policy", input: input{ year: 1984, month: 9, day: 28, hour: 12, staleness: time.Duration(365*24) * time.Hour, }, output: &output{ value: 0.5, }, }, { name: "second middle without staleness policy", input: input{ year: 1985, month: 9, day: 28, hour: 12, staleness: time.Duration(0), }, }, { name: "second middle with insufficient staleness policy", input: input{ year: 1985, month: 9, day: 28, hour: 12, staleness: time.Duration(364*24) * time.Hour, }, }, { name: "second middle with sufficient staleness policy", input: input{ year: 1985, month: 9, day: 28, hour: 12, staleness: time.Duration(365*24) * time.Hour, }, output: &output{ value: 1.5, }, }, }, }, } for i, context := range contexts { // Wrapping in function to enable garbage collection of resources. func() { p, closer := persistenceMaker() defer closer.Close() defer p.Close() m := model.Metric{ model.MetricNameLabel: "age_in_years", } for _, value := range context.values { testAppendSample(p, model.Sample{ Value: model.SampleValue(value.value), Timestamp: time.Date(value.year, value.month, value.day, value.hour, 0, 0, 0, time.UTC), Metric: m, }, t) } for j, behavior := range context.behaviors { input := behavior.input time := time.Date(input.year, input.month, input.day, input.hour, 0, 0, 0, time.UTC) sp := StalenessPolicy{ DeltaAllowance: input.staleness, } actual, err := p.GetValueAtTime(model.NewFingerprintFromMetric(m), time, sp) if err != nil { t.Fatalf("%d.%d(%s). Could not query for value: %q\n", i, j, behavior.name, err) } if behavior.output == nil { if actual != nil { t.Fatalf("%d.%d(%s). Expected nil but got: %q\n", i, j, behavior.name, actual) } } else { if actual == nil { t.Fatalf("%d.%d(%s). Expected %s but got nil\n", i, j, behavior.name, behavior.output) } else { if actual.Value != behavior.output.value { t.Fatalf("%d.%d(%s). Expected %s but got %s\n", i, j, behavior.name, behavior.output, actual) } } } } }() } }
func evalVectorBinop(opType BinOpType, lhs model.SampleValue, rhs model.SampleValue) (model.SampleValue, bool) { switch opType { case ADD: return lhs + rhs, true case SUB: return lhs - rhs, true case MUL: return lhs * rhs, true case DIV: if rhs != 0 { return lhs / rhs, true } else { return model.SampleValue(math.Inf(int(rhs))), true } case MOD: if rhs != 0 { return model.SampleValue(int(lhs) % int(rhs)), true } else { return model.SampleValue(math.Inf(int(rhs))), true } case EQ: if lhs == rhs { return lhs, true } else { return 0, false } case NE: if lhs != rhs { return lhs, true } else { return 0, false } case GT: if lhs > rhs { return lhs, true } else { return 0, false } case LT: if lhs < rhs { return lhs, true } else { return 0, false } case GE: if lhs >= rhs { return lhs, true } else { return 0, false } case LE: if lhs <= rhs { return lhs, true } else { return 0, false } case AND: return lhs, true case OR: return lhs, true // TODO: implement OR } panic("Not all enum values enumerated in switch") }
func evalScalarBinop(opType BinOpType, lhs model.SampleValue, rhs model.SampleValue) model.SampleValue { switch opType { case ADD: return lhs + rhs case SUB: return lhs - rhs case MUL: return lhs * rhs case DIV: if rhs != 0 { return lhs / rhs } else { return model.SampleValue(math.Inf(int(rhs))) } case MOD: if rhs != 0 { return model.SampleValue(int(lhs) % int(rhs)) } else { return model.SampleValue(math.Inf(int(rhs))) } case EQ: if lhs == rhs { return 1 } else { return 0 } case NE: if lhs != rhs { return 1 } else { return 0 } case GT: if lhs > rhs { return 1 } else { return 0 } case LT: if lhs < rhs { return 1 } else { return 0 } case GE: if lhs >= rhs { return 1 } else { return 0 } case LE: if lhs <= rhs { return 1 } else { return 0 } } panic("Not all enum values enumerated in switch") }
func (v singletonValue) get() model.SampleValue { return model.SampleValue(v) }
func (t *tieredStorage) loadChunkAroundTime(iterator leveldb.Iterator, frontier *seriesFrontier, fingerprint model.Fingerprint, ts time.Time) (chunk []model.SamplePair) { var ( targetKey = &dto.SampleKey{ Fingerprint: fingerprint.ToDTO(), } foundKey = &dto.SampleKey{} foundValue *dto.SampleValueSeries ) // Limit the target key to be within the series' keyspace. if ts.After(frontier.lastSupertime) { targetKey.Timestamp = indexable.EncodeTime(frontier.lastSupertime) } else { targetKey.Timestamp = indexable.EncodeTime(ts) } // Try seeking to target key. rawKey, _ := coding.NewProtocolBuffer(targetKey).Encode() iterator.Seek(rawKey) foundKey, err := extractSampleKey(iterator) if err != nil { panic(err) } // Figure out if we need to rewind by one block. // Imagine the following supertime blocks with time ranges: // // Block 1: ft 1000 - lt 1009 <data> // Block 1: ft 1010 - lt 1019 <data> // // If we are aiming to find time 1005, we would first seek to the block with // supertime 1010, then need to rewind by one block by virtue of LevelDB // iterator seek behavior. // // Only do the rewind if there is another chunk before this one. rewound := false firstTime := indexable.DecodeTime(foundKey.Timestamp) if ts.Before(firstTime) && !frontier.firstSupertime.After(ts) { iterator.Previous() rewound = true } foundValue, err = extractSampleValues(iterator) if err != nil { panic(err) } // If we rewound, but the target time is still past the current block, return // the last value of the current (rewound) block and the entire next block. if rewound { foundKey, err = extractSampleKey(iterator) if err != nil { panic(err) } currentChunkLastTime := time.Unix(*foundKey.LastTimestamp, 0) if ts.After(currentChunkLastTime) { sampleCount := len(foundValue.Value) chunk = append(chunk, model.SamplePair{ Timestamp: time.Unix(*foundValue.Value[sampleCount-1].Timestamp, 0), Value: model.SampleValue(*foundValue.Value[sampleCount-1].Value), }) // We know there's a next block since we have rewound from it. iterator.Next() foundValue, err = extractSampleValues(iterator) if err != nil { panic(err) } } } // Now append all the samples of the currently seeked block to the output. for _, sample := range foundValue.Value { chunk = append(chunk, model.SamplePair{ Timestamp: time.Unix(*sample.Timestamp, 0), Value: model.SampleValue(*sample.Value), }) } return }
return yyactionreturn{0, yyRT_FALLTHROUGH} }}, {regexp.MustCompile("\\-?([0-9])+(\\.([0-9])*)?"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) { defer func() { if r := recover(); r != nil { if r != "yyREJECT" { panic(r) } yyar.returnType = yyRT_REJECT } }() { num, err := strconv.ParseFloat(yytext, 32) if err != nil && err.(*strconv.NumError).Err == strconv.ErrSyntax { panic("Invalid float") } yylval.num = model.SampleValue(num) return yyactionreturn{NUMBER, yyRT_USER_RETURN} } return yyactionreturn{0, yyRT_FALLTHROUGH} }}, {regexp.MustCompile("\\\"(\\\\[^\\n]|[^\\\\\"])*\\\""), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) { defer func() { if r := recover(); r != nil { if r != "yyREJECT" { panic(r) } yyar.returnType = yyRT_REJECT } }() { yylval.str = yytext[1 : len(yytext)-1]
// === time() model.SampleValue === func timeImpl(timestamp time.Time, view *viewAdapter, args []Node) interface{} { return model.SampleValue(time.Now().Unix()) }
func GetRangeValuesTests(persistenceMaker func() (MetricPersistence, test.Closer), t test.Tester) { type value struct { year int month time.Month day int hour int value float32 } type input struct { openYear int openMonth time.Month openDay int openHour int endYear int endMonth time.Month endDay int endHour int } type output struct { year int month time.Month day int hour int value float32 } type behavior struct { name string input input output []output } var contexts = []struct { name string values []value behaviors []behavior }{ { name: "no values", values: []value{}, behaviors: []behavior{ { name: "non-existent interval", input: input{ openYear: 1984, openMonth: 3, openDay: 30, openHour: 0, endYear: 1985, endMonth: 3, endDay: 30, endHour: 0, }, }, }, }, { name: "singleton value", values: []value{ { year: 1984, month: 3, day: 30, hour: 0, value: 0, }, }, behaviors: []behavior{ { name: "start on first value", input: input{ openYear: 1984, openMonth: 3, openDay: 30, openHour: 0, endYear: 1985, endMonth: 3, endDay: 30, endHour: 0, }, output: []output{ { year: 1984, month: 3, day: 30, hour: 0, value: 0, }, }, }, { name: "end on first value", input: input{ openYear: 1983, openMonth: 3, openDay: 30, openHour: 0, endYear: 1984, endMonth: 3, endDay: 30, endHour: 0, }, output: []output{ { year: 1984, month: 3, day: 30, hour: 0, value: 0, }, }, }, { name: "overlap on first value", input: input{ openYear: 1983, openMonth: 3, openDay: 30, openHour: 0, endYear: 1985, endMonth: 3, endDay: 30, endHour: 0, }, output: []output{ { year: 1984, month: 3, day: 30, hour: 0, value: 0, }, }, }, }, }, { name: "two values", values: []value{ { year: 1984, month: 3, day: 30, hour: 0, value: 0, }, { year: 1985, month: 3, day: 30, hour: 0, value: 1, }, }, behaviors: []behavior{ { name: "start on first value", input: input{ openYear: 1984, openMonth: 3, openDay: 30, openHour: 0, endYear: 1985, endMonth: 3, endDay: 30, endHour: 0, }, output: []output{ { year: 1984, month: 3, day: 30, hour: 0, value: 0, }, { year: 1985, month: 3, day: 30, hour: 0, value: 1, }, }, }, { name: "start on second value", input: input{ openYear: 1985, openMonth: 3, openDay: 30, openHour: 0, endYear: 1986, endMonth: 3, endDay: 30, endHour: 0, }, output: []output{ { year: 1985, month: 3, day: 30, hour: 0, value: 1, }, }, }, { name: "end on first value", input: input{ openYear: 1983, openMonth: 3, openDay: 30, openHour: 0, endYear: 1984, endMonth: 3, endDay: 30, endHour: 0, }, output: []output{ { year: 1984, month: 3, day: 30, hour: 0, value: 0, }, }, }, { name: "end on second value", input: input{ openYear: 1985, openMonth: 1, openDay: 1, openHour: 0, endYear: 1985, endMonth: 3, endDay: 30, endHour: 0, }, output: []output{ { year: 1985, month: 3, day: 30, hour: 0, value: 1, }, }, }, { name: "overlap on values", input: input{ openYear: 1983, openMonth: 3, openDay: 30, openHour: 0, endYear: 1986, endMonth: 3, endDay: 30, endHour: 0, }, output: []output{ { year: 1984, month: 3, day: 30, hour: 0, value: 0, }, { year: 1985, month: 3, day: 30, hour: 0, value: 1, }, }, }, }, }, } for i, context := range contexts { // Wrapping in function to enable garbage collection of resources. func() { p, closer := persistenceMaker() defer closer.Close() defer p.Close() m := model.Metric{ model.MetricNameLabel: "age_in_years", } for _, value := range context.values { testAppendSample(p, model.Sample{ Value: model.SampleValue(value.value), Timestamp: time.Date(value.year, value.month, value.day, value.hour, 0, 0, 0, time.UTC), Metric: m, }, t) } for j, behavior := range context.behaviors { input := behavior.input open := time.Date(input.openYear, input.openMonth, input.openDay, input.openHour, 0, 0, 0, time.UTC) end := time.Date(input.endYear, input.endMonth, input.endDay, input.endHour, 0, 0, 0, time.UTC) in := model.Interval{ OldestInclusive: open, NewestInclusive: end, } values, err := p.GetRangeValues(model.NewFingerprintFromMetric(m), in) if err != nil { t.Fatalf("%d.%d(%s). Could not query for value: %q\n", i, j, behavior.name, err) } if values == nil && len(behavior.output) != 0 { t.Fatalf("%d.%d(%s). Expected %s but got: %s\n", i, j, behavior.name, behavior.output, values) } if behavior.output == nil { if values != nil { t.Fatalf("%d.%d(%s). Expected nil values but got: %s\n", i, j, behavior.name, values) } } else { if len(behavior.output) != len(values.Values) { t.Fatalf("%d.%d(%s). Expected length %d but got: %d\n", i, j, behavior.name, len(behavior.output), len(values.Values)) } for k, actual := range values.Values { expected := behavior.output[k] if actual.Value != model.SampleValue(expected.value) { t.Fatalf("%d.%d.%d(%s). Expected %d but got: %d\n", i, j, k, behavior.name, expected.value, actual.Value) } if actual.Timestamp.Year() != expected.year { t.Fatalf("%d.%d.%d(%s). Expected %d but got: %d\n", i, j, k, behavior.name, expected.year, actual.Timestamp.Year()) } if actual.Timestamp.Month() != expected.month { t.Fatalf("%d.%d.%d(%s). Expected %d but got: %d\n", i, j, k, behavior.name, expected.month, actual.Timestamp.Month()) } // XXX: Find problem here. // Mismatches occur in this and have for a long time in the LevelDB // case, however not im-memory. // // if actual.Timestamp.Day() != expected.day { // t.Fatalf("%d.%d.%d(%s). Expected %d but got: %d\n", i, j, k, behavior.name, expected.day, actual.Timestamp.Day()) // } // if actual.Timestamp.Hour() != expected.hour { // t.Fatalf("%d.%d.%d(%s). Expected %d but got: %d\n", i, j, k, behavior.name, expected.hour, actual.Timestamp.Hour()) // } } } } }() } }
func StochasticTests(persistenceMaker func() (MetricPersistence, test.Closer), t test.Tester) { stochastic := func(x int) (success bool) { p, closer := persistenceMaker() defer closer.Close() defer p.Close() seed := rand.NewSource(int64(x)) random := rand.New(seed) numberOfMetrics := random.Intn(stochasticMaximumVariance) + 1 numberOfSharedLabels := random.Intn(stochasticMaximumVariance) numberOfUnsharedLabels := random.Intn(stochasticMaximumVariance) numberOfSamples := random.Intn(stochasticMaximumVariance) + 2 numberOfRangeScans := random.Intn(stochasticMaximumVariance) metricTimestamps := map[int]map[int64]bool{} metricEarliestSample := map[int]int64{} metricNewestSample := map[int]int64{} for metricIndex := 0; metricIndex < numberOfMetrics; metricIndex++ { sample := model.Sample{ Metric: model.Metric{}, } v := model.LabelValue(fmt.Sprintf("metric_index_%d", metricIndex)) sample.Metric[model.MetricNameLabel] = v for sharedLabelIndex := 0; sharedLabelIndex < numberOfSharedLabels; sharedLabelIndex++ { l := model.LabelName(fmt.Sprintf("shared_label_%d", sharedLabelIndex)) v := model.LabelValue(fmt.Sprintf("label_%d", sharedLabelIndex)) sample.Metric[l] = v } for unsharedLabelIndex := 0; unsharedLabelIndex < numberOfUnsharedLabels; unsharedLabelIndex++ { l := model.LabelName(fmt.Sprintf("metric_index_%d_private_label_%d", metricIndex, unsharedLabelIndex)) v := model.LabelValue(fmt.Sprintf("private_label_%d", unsharedLabelIndex)) sample.Metric[l] = v } timestamps := map[int64]bool{} metricTimestamps[metricIndex] = timestamps var ( newestSample int64 = math.MinInt64 oldestSample int64 = math.MaxInt64 nextTimestamp func() int64 ) nextTimestamp = func() int64 { var candidate int64 candidate = random.Int63n(math.MaxInt32 - 1) if _, has := timestamps[candidate]; has { // WART candidate = nextTimestamp() } timestamps[candidate] = true if candidate < oldestSample { oldestSample = candidate } if candidate > newestSample { newestSample = candidate } return candidate } for sampleIndex := 0; sampleIndex < numberOfSamples; sampleIndex++ { sample.Timestamp = time.Unix(nextTimestamp(), 0) sample.Value = model.SampleValue(sampleIndex) err := p.AppendSample(sample) if err != nil { t.Error(err) return } } metricEarliestSample[metricIndex] = oldestSample metricNewestSample[metricIndex] = newestSample for sharedLabelIndex := 0; sharedLabelIndex < numberOfSharedLabels; sharedLabelIndex++ { labelPair := model.LabelSet{ model.LabelName(fmt.Sprintf("shared_label_%d", sharedLabelIndex)): model.LabelValue(fmt.Sprintf("label_%d", sharedLabelIndex)), } fingerprints, err := p.GetFingerprintsForLabelSet(labelPair) if err != nil { t.Error(err) return } if len(fingerprints) == 0 { t.Errorf("expected fingerprint count of %d, got %d", 0, len(fingerprints)) return } labelName := model.LabelName(fmt.Sprintf("shared_label_%d", sharedLabelIndex)) fingerprints, err = p.GetFingerprintsForLabelName(labelName) if err != nil { t.Error(err) return } if len(fingerprints) == 0 { t.Errorf("expected fingerprint count of %d, got %d", 0, len(fingerprints)) return } } } for sharedIndex := 0; sharedIndex < numberOfSharedLabels; sharedIndex++ { labelName := model.LabelName(fmt.Sprintf("shared_label_%d", sharedIndex)) fingerprints, err := p.GetFingerprintsForLabelName(labelName) if err != nil { t.Error(err) return } if len(fingerprints) != numberOfMetrics { t.Errorf("expected fingerprint count of %d, got %d", numberOfMetrics, len(fingerprints)) return } } for metricIndex := 0; metricIndex < numberOfMetrics; metricIndex++ { for unsharedLabelIndex := 0; unsharedLabelIndex < numberOfUnsharedLabels; unsharedLabelIndex++ { labelName := model.LabelName(fmt.Sprintf("metric_index_%d_private_label_%d", metricIndex, unsharedLabelIndex)) labelValue := model.LabelValue(fmt.Sprintf("private_label_%d", unsharedLabelIndex)) labelSet := model.LabelSet{ labelName: labelValue, } fingerprints, err := p.GetFingerprintsForLabelSet(labelSet) if err != nil { t.Error(err) return } if len(fingerprints) != 1 { t.Errorf("expected fingerprint count of %d, got %d", 1, len(fingerprints)) return } fingerprints, err = p.GetFingerprintsForLabelName(labelName) if err != nil { t.Error(err) return } if len(fingerprints) != 1 { t.Errorf("expected fingerprint count of %d, got %d", 1, len(fingerprints)) return } } metric := model.Metric{} metric[model.MetricNameLabel] = model.LabelValue(fmt.Sprintf("metric_index_%d", metricIndex)) for i := 0; i < numberOfSharedLabels; i++ { l := model.LabelName(fmt.Sprintf("shared_label_%d", i)) v := model.LabelValue(fmt.Sprintf("label_%d", i)) metric[l] = v } for i := 0; i < numberOfUnsharedLabels; i++ { l := model.LabelName(fmt.Sprintf("metric_index_%d_private_label_%d", metricIndex, i)) v := model.LabelValue(fmt.Sprintf("private_label_%d", i)) metric[l] = v } for i := 0; i < numberOfRangeScans; i++ { timestamps := metricTimestamps[metricIndex] var first int64 = 0 var second int64 = 0 for { firstCandidate := random.Int63n(int64(len(timestamps))) secondCandidate := random.Int63n(int64(len(timestamps))) smallest := int64(-1) largest := int64(-1) if firstCandidate == secondCandidate { continue } else if firstCandidate > secondCandidate { largest = firstCandidate smallest = secondCandidate } else { largest = secondCandidate smallest = firstCandidate } j := int64(0) for i := range timestamps { if j == smallest { first = i } else if j == largest { second = i break } j++ } break } begin := first end := second if second < first { begin, end = second, first } interval := model.Interval{ OldestInclusive: time.Unix(begin, 0), NewestInclusive: time.Unix(end, 0), } samples, err := p.GetRangeValues(model.NewFingerprintFromMetric(metric), interval) if err != nil { t.Error(err) return } if len(samples.Values) < 2 { t.Errorf("expected sample count less than %d, got %d", 2, len(samples.Values)) return } } } return true } if err := quick.Check(stochastic, nil); err != nil { t.Error(err) } }
// === count(vector VectorNode) model.SampleValue === func countImpl(timestamp time.Time, view *viewAdapter, args []Node) interface{} { return model.SampleValue(len(args[0].(VectorNode).Eval(timestamp, view))) }
func GetBoundaryValuesTests(persistenceMaker func() (MetricPersistence, test.Closer), t test.Tester) { type value struct { year int month time.Month day int hour int value float32 } type input struct { openYear int openMonth time.Month openDay int openHour int endYear int endMonth time.Month endDay int endHour int staleness time.Duration } type output struct { open model.SampleValue end model.SampleValue } type behavior struct { name string input input output *output } var contexts = []struct { name string values []value behaviors []behavior }{ { name: "no values", values: []value{}, behaviors: []behavior{ { name: "non-existent interval without staleness policy", input: input{ openYear: 1984, openMonth: 3, openDay: 30, openHour: 0, endYear: 1985, endMonth: 3, endDay: 30, endHour: 0, staleness: time.Duration(0), }, }, { name: "non-existent interval with staleness policy", input: input{ openYear: 1984, openMonth: 3, openDay: 30, openHour: 0, endYear: 1985, endMonth: 3, endDay: 30, endHour: 0, staleness: time.Duration(365*24) * time.Hour, }, }, }, }, { name: "single value", values: []value{ { year: 1984, month: 3, day: 30, hour: 0, value: 0, }, }, behaviors: []behavior{ { name: "on start but missing end without staleness policy", input: input{ openYear: 1984, openMonth: 3, openDay: 30, openHour: 0, endYear: 1985, endMonth: 3, endDay: 30, endHour: 0, staleness: time.Duration(0), }, }, { name: "non-existent interval after within staleness policy", input: input{ openYear: 1984, openMonth: 3, openDay: 31, openHour: 0, endYear: 1985, endMonth: 3, endDay: 30, endHour: 0, staleness: time.Duration(4380) * time.Hour, }, }, { name: "non-existent interval after without staleness policy", input: input{ openYear: 1984, openMonth: 3, openDay: 31, openHour: 0, endYear: 1985, endMonth: 3, endDay: 30, endHour: 0, staleness: time.Duration(0), }, }, { name: "non-existent interval before with staleness policy", input: input{ openYear: 1983, openMonth: 3, openDay: 30, openHour: 0, endYear: 1984, endMonth: 3, endDay: 29, endHour: 0, staleness: time.Duration(365*24) * time.Hour, }, }, { name: "non-existent interval before without staleness policy", input: input{ openYear: 1983, openMonth: 3, openDay: 30, openHour: 0, endYear: 1984, endMonth: 3, endDay: 29, endHour: 0, staleness: time.Duration(0), }, }, { name: "on end but not start without staleness policy", input: input{ openYear: 1983, openMonth: 3, openDay: 30, openHour: 0, endYear: 1984, endMonth: 3, endDay: 30, endHour: 0, staleness: time.Duration(0), }, }, { name: "on end but not start without staleness policy", input: input{ openYear: 1983, openMonth: 3, openDay: 30, openHour: 0, endYear: 1984, endMonth: 3, endDay: 30, endHour: 0, staleness: time.Duration(365*24) * time.Hour, }, }, { name: "before point without staleness policy", input: input{ openYear: 1982, openMonth: 3, openDay: 30, openHour: 0, endYear: 1983, endMonth: 3, endDay: 30, endHour: 0, staleness: time.Duration(0), }, }, { name: "before point with staleness policy", input: input{ openYear: 1982, openMonth: 3, openDay: 30, openHour: 0, endYear: 1983, endMonth: 3, endDay: 30, endHour: 0, staleness: time.Duration(365*24) * time.Hour, }, }, { name: "after point without staleness policy", input: input{ openYear: 1985, openMonth: 3, openDay: 30, openHour: 0, endYear: 1986, endMonth: 3, endDay: 30, endHour: 0, staleness: time.Duration(0), }, }, { name: "after point with staleness policy", input: input{ openYear: 1985, openMonth: 3, openDay: 30, openHour: 0, endYear: 1986, endMonth: 3, endDay: 30, endHour: 0, staleness: time.Duration(365*24) * time.Hour, }, }, { name: "spanning point without staleness policy", input: input{ openYear: 1983, openMonth: 9, openDay: 29, openHour: 12, endYear: 1984, endMonth: 9, endDay: 28, endHour: 12, staleness: time.Duration(0), }, }, { name: "spanning point with staleness policy", input: input{ openYear: 1983, openMonth: 9, openDay: 29, openHour: 12, endYear: 1984, endMonth: 9, endDay: 28, endHour: 12, staleness: time.Duration(365*24) * time.Hour, }, }, }, }, { name: "double values", values: []value{ { year: 1984, month: 3, day: 30, hour: 0, value: 0, }, { year: 1985, month: 3, day: 30, hour: 0, value: 1, }, }, behaviors: []behavior{ { name: "on points without staleness policy", input: input{ openYear: 1984, openMonth: 3, openDay: 30, openHour: 0, endYear: 1985, endMonth: 3, endDay: 30, endHour: 0, staleness: time.Duration(0), }, output: &output{ open: 0, end: 1, }, }, { name: "on points with staleness policy", input: input{ openYear: 1984, openMonth: 3, openDay: 30, openHour: 0, endYear: 1985, endMonth: 3, endDay: 30, endHour: 0, staleness: time.Duration(365*24) * time.Hour, }, output: &output{ open: 0, end: 1, }, }, { name: "on first before second outside of staleness", input: input{ openYear: 1984, openMonth: 3, openDay: 30, openHour: 0, endYear: 1984, endMonth: 6, endDay: 29, endHour: 6, staleness: time.Duration(2190) * time.Hour, }, }, { name: "on first before second within staleness", input: input{ openYear: 1984, openMonth: 3, openDay: 30, openHour: 0, endYear: 1984, endMonth: 6, endDay: 29, endHour: 6, staleness: time.Duration(356*24) * time.Hour, }, }, { name: "on first after second outside of staleness", input: input{ openYear: 1984, openMonth: 3, openDay: 30, openHour: 0, endYear: 1985, endMonth: 6, endDay: 29, endHour: 6, staleness: time.Duration(1) * time.Hour, }, }, { name: "on first after second within staleness", input: input{ openYear: 1984, openMonth: 3, openDay: 30, openHour: 0, endYear: 1985, endMonth: 6, endDay: 29, endHour: 6, staleness: time.Duration(356*24) * time.Hour, }, output: &output{ open: 0, end: 1, }, }, }, }, } for i, context := range contexts { // Wrapping in function to enable garbage collection of resources. func() { p, closer := persistenceMaker() defer closer.Close() defer p.Close() m := model.Metric{ model.MetricNameLabel: "age_in_years", } for _, value := range context.values { testAppendSample(p, model.Sample{ Value: model.SampleValue(value.value), Timestamp: time.Date(value.year, value.month, value.day, value.hour, 0, 0, 0, time.UTC), Metric: m, }, t) } for j, behavior := range context.behaviors { input := behavior.input open := time.Date(input.openYear, input.openMonth, input.openDay, input.openHour, 0, 0, 0, time.UTC) end := time.Date(input.endYear, input.endMonth, input.endDay, input.endHour, 0, 0, 0, time.UTC) interval := model.Interval{ OldestInclusive: open, NewestInclusive: end, } po := StalenessPolicy{ DeltaAllowance: input.staleness, } openValue, endValue, err := p.GetBoundaryValues(model.NewFingerprintFromMetric(m), interval, po) if err != nil { t.Fatalf("%d.%d(%s). Could not query for value: %q\n", i, j, behavior.name, err) } if behavior.output == nil { if openValue != nil { t.Fatalf("%d.%d(%s). Expected open to be nil but got: %q\n", i, j, behavior.name, openValue) } if endValue != nil { t.Fatalf("%d.%d(%s). Expected end to be nil but got: %q\n", i, j, behavior.name, endValue) } } else { if openValue == nil { t.Fatalf("%d.%d(%s). Expected open to be %s but got nil\n", i, j, behavior.name, behavior.output) } if endValue == nil { t.Fatalf("%d.%d(%s). Expected end to be %s but got nil\n", i, j, behavior.name, behavior.output) } if openValue.Value != behavior.output.open { t.Fatalf("%d.%d(%s). Expected open to be %s but got %s\n", i, j, behavior.name, behavior.output.open, openValue.Value) } if endValue.Value != behavior.output.end { t.Fatalf("%d.%d(%s). Expected end to be %s but got %s\n", i, j, behavior.name, behavior.output.end, endValue.Value) } } } }() } }
func (p *processor001) Process(stream io.ReadCloser, baseLabels model.LabelSet, results chan Result) (err error) { // TODO(matt): Replace with plain-jane JSON unmarshalling. defer stream.Close() buffer, err := ioutil.ReadAll(stream) if err != nil { return } entities := entity001{} err = json.Unmarshal(buffer, &entities) if err != nil { return } now := p.time.Now() // TODO(matt): This outer loop is a great basis for parallelization. for _, entity := range entities { for _, value := range entity.Metric.Value { metric := model.Metric{} for label, labelValue := range baseLabels { metric[label] = labelValue } for label, labelValue := range entity.BaseLabels { metric[model.LabelName(label)] = model.LabelValue(labelValue) } for label, labelValue := range value.Labels { metric[model.LabelName(label)] = model.LabelValue(labelValue) } switch entity.Metric.MetricType { case gauge001, counter001: sampleValue, ok := value.Value.(float64) if !ok { err = fmt.Errorf("Could not convert value from %s %s to float64.", entity, value) continue } sample := model.Sample{ Metric: metric, Timestamp: now, Value: model.SampleValue(sampleValue), } results <- Result{ Err: err, Sample: sample, } break case histogram001: sampleValue, ok := value.Value.(map[string]interface{}) if !ok { err = fmt.Errorf("Could not convert value from %q to a map[string]interface{}.", value.Value) continue } for percentile, percentileValue := range sampleValue { individualValue, ok := percentileValue.(float64) if !ok { err = fmt.Errorf("Could not convert value from %q to a float64.", percentileValue) continue } childMetric := make(map[model.LabelName]model.LabelValue, len(metric)+1) for k, v := range metric { childMetric[k] = v } childMetric[model.LabelName(percentile001)] = model.LabelValue(percentile) sample := model.Sample{ Metric: childMetric, Timestamp: now, Value: model.SampleValue(individualValue), } results <- Result{ Err: err, Sample: sample, } } break default: } } } return }
func (l *LevelDBMetricPersistence) GetRangeValues(fp model.Fingerprint, i model.Interval) (v *model.SampleSet, err error) { begin := time.Now() defer func() { duration := time.Since(begin) recordOutcome(duration, err, map[string]string{operation: getRangeValues, result: success}, map[string]string{operation: getRangeValues, result: failure}) }() k := &dto.SampleKey{ Fingerprint: fp.ToDTO(), Timestamp: indexable.EncodeTime(i.OldestInclusive), } e, err := coding.NewProtocolBuffer(k).Encode() if err != nil { return } iterator := l.metricSamples.NewIterator(true) defer iterator.Close() predicate := keyIsOlderThan(i.NewestInclusive) for valid := iterator.Seek(e); valid; valid = iterator.Next() { retrievedKey := &dto.SampleKey{} retrievedKey, err = extractSampleKey(iterator) if err != nil { return } if predicate(retrievedKey) { break } if !fingerprintsEqual(retrievedKey.Fingerprint, k.Fingerprint) { break } retrievedValue, err := extractSampleValues(iterator) if err != nil { return nil, err } if v == nil { // TODO: memoize/cache this or change the return type to metric.SamplePair. m, err := l.GetMetricForFingerprint(fp) if err != nil { return v, err } v = &model.SampleSet{ Metric: *m, } } v.Values = append(v.Values, model.SamplePair{ Value: model.SampleValue(*retrievedValue.Value[0].Value), Timestamp: indexable.DecodeTime(retrievedKey.Timestamp), }) } // XXX: We should not explicitly sort here but rather rely on the datastore. // This adds appreciable overhead. if v != nil { sort.Sort(v.Values) } return }