// Get implements CurationRemarker. func (w *LevelDBCurationRemarker) Get(c *curationKey) (t clientmodel.Timestamp, ok bool, err error) { k := &dto.CurationKey{} c.dump(k) v := &dto.CurationValue{} ok, err = w.LevelDBPersistence.Get(k, v) if err != nil || !ok { return clientmodel.TimestampFromUnix(0), ok, err } return clientmodel.TimestampFromUnix(v.GetLastCompletionTimestamp()), true, nil }
// Get implements HighWatermarker. func (w *LevelDBHighWatermarker) Get(f *clientmodel.Fingerprint) (t clientmodel.Timestamp, ok bool, err error) { k := &dto.Fingerprint{} dumpFingerprint(k, f) v := &dto.MetricHighWatermark{} ok, err = w.LevelDBPersistence.Get(k, v) if err != nil { return t, ok, err } if !ok { return clientmodel.TimestampFromUnix(0), ok, nil } t = clientmodel.TimestampFromUnix(v.GetTimestamp()) return t, true, nil }
func extractCounter(out Ingester, o *ProcessOptions, f *dto.MetricFamily) error { samples := make(model.Samples, 0, len(f.Metric)) for _, m := range f.Metric { if m.Counter == nil { continue } sample := new(model.Sample) samples = append(samples, sample) if m.TimestampMs != nil { sample.Timestamp = model.TimestampFromUnix(*m.TimestampMs / 1000) } else { sample.Timestamp = o.Timestamp } sample.Metric = model.Metric{} metric := sample.Metric for _, p := range m.Label { metric[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) } metric[model.MetricNameLabel] = model.LabelValue(f.GetName()) sample.Value = model.SampleValue(m.Counter.GetValue()) } return out.Ingest(&Result{Samples: samples}) }
func AppendSampleAsPureSparseAppendTests(p metric.Persistence, t test.Tester) { appendSample := func(x int) (success bool) { v := clientmodel.SampleValue(x) ts := clientmodel.TimestampFromUnix(int64(x)) labelName := clientmodel.LabelName(x) labelValue := clientmodel.LabelValue(x) l := clientmodel.Metric{labelName: labelValue} sample := &clientmodel.Sample{ Value: v, Timestamp: ts, Metric: l, } err := p.AppendSamples(clientmodel.Samples{sample}) success = err == nil if !success { t.Error(err) } return } if err := quick.Check(appendSample, nil); err != nil { t.Error(err) } }
// Load deserializes this SampleKey from a DTO. func (s *SampleKey) Load(d *dto.SampleKey) { f := &clientmodel.Fingerprint{} loadFingerprint(f, d.GetFingerprint()) s.Fingerprint = f s.FirstTimestamp = indexable.DecodeTime(d.Timestamp) s.LastTimestamp = clientmodel.TimestampFromUnix(d.GetLastTimestamp()) s.SampleCount = d.GetSampleCount() }
func AppendSampleAsSparseAppendWithReadsTests(p metric.Persistence, t test.Tester) { appendSample := func(x int) (success bool) { v := clientmodel.SampleValue(x) ts := clientmodel.TimestampFromUnix(int64(x)) labelName := clientmodel.LabelName(x) labelValue := clientmodel.LabelValue(x) l := clientmodel.Metric{labelName: labelValue} sample := &clientmodel.Sample{ Value: v, Timestamp: ts, Metric: l, } err := p.AppendSamples(clientmodel.Samples{sample}) if err != nil { t.Error(err) return } values, err := p.GetLabelValuesForLabelName(labelName) if err != nil { t.Error(err) return } if len(values) != 1 { t.Errorf("expected label values count of %d, got %d", 1, len(values)) return } fingerprints, err := p.GetFingerprintsForLabelMatchers(metric.LabelMatchers{{ Type: metric.Equal, Name: labelName, Value: labelValue, }}) if err != nil { t.Error(err) return } if len(fingerprints) != 1 { t.Errorf("expected fingerprint count of %d, got %d", 1, len(fingerprints)) return } return true } if err := quick.Check(appendSample, nil); err != nil { t.Error(err) } }
func TestTimeEndToEnd(t *testing.T) { tester := func(x int) bool { random := rand.New(rand.NewSource(int64(x))) buffer := make([]byte, 8) incoming := clientmodel.TimestampFromUnix(random.Int63()) EncodeTimeInto(buffer, incoming) outgoing := DecodeTime(buffer) return incoming.Equal(outgoing) && incoming.Unix() == outgoing.Unix() } if err := quick.Check(tester, nil); err != nil { t.Error(err) } }
func AppendSampleAsPureSingleEntityAppendTests(p metric.Persistence, t test.Tester) { appendSample := func(x int) bool { sample := &clientmodel.Sample{ Value: clientmodel.SampleValue(x), Timestamp: clientmodel.TimestampFromUnix(int64(x)), Metric: clientmodel.Metric{clientmodel.MetricNameLabel: "my_metric"}, } err := p.AppendSamples(clientmodel.Samples{sample}) return err == nil } if err := quick.Check(appendSample, nil); err != nil { t.Error(err) } }
// unmarshalValues decodes marshalled samples into dest and returns either dest // or a new slice containing those values if dest has insufficient capacity. func unmarshalValues(buf []byte, dest metric.Values) metric.Values { if buf[0] != formatVersion { panic("unsupported format version") } n := (len(buf) - formatVersionSize) / sampleSize if cap(dest) < n { dest = make(metric.Values, n) } else { dest = dest[0:n] } for i := 0; i < n; i++ { offset := formatVersionSize + i*sampleSize dest[i].Timestamp = clientmodel.TimestampFromUnix(int64(binary.LittleEndian.Uint64(buf[offset:]))) dest[i].Value = clientmodel.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(buf[offset+8:]))) } return dest }
func (serv MetricsService) QueryRange(w http.ResponseWriter, r *http.Request) { setAccessControlHeaders(w) w.Header().Set("Content-Type", "application/json") params := http_utils.GetQueryParams(r) expr := params.Get("expr") end, _ := strconv.ParseInt(params.Get("end"), 0, 64) duration, _ := strconv.ParseInt(params.Get("range"), 0, 64) step, _ := strconv.ParseInt(params.Get("step"), 0, 64) exprNode, err := rules.LoadExprFromString(expr) if err != nil { fmt.Fprint(w, ast.ErrorToJSON(err)) return } if exprNode.Type() != ast.VECTOR { fmt.Fprint(w, ast.ErrorToJSON(errors.New("Expression does not evaluate to vector type"))) return } if end == 0 { end = clientmodel.Now().Unix() } if step < 1 { step = 1 } if end-duration < 0 { duration = end } // Align the start to step "tick" boundary. end -= end % step queryStats := stats.NewTimerGroup() evalTimer := queryStats.GetTimer(stats.TotalEvalTime).Start() matrix, err := ast.EvalVectorRange( exprNode.(ast.VectorNode), clientmodel.TimestampFromUnix(end-duration), clientmodel.TimestampFromUnix(end), time.Duration(step)*time.Second, serv.Storage, queryStats) if err != nil { fmt.Fprint(w, ast.ErrorToJSON(err)) return } evalTimer.Stop() sortTimer := queryStats.GetTimer(stats.ResultSortTime).Start() sort.Sort(matrix) sortTimer.Stop() jsonTimer := queryStats.GetTimer(stats.JsonEncodeTime).Start() result := ast.TypedValueToJSON(matrix, "matrix") jsonTimer.Stop() glog.Infof("Range query: %s\nQuery stats:\n%s\n", expr, queryStats) fmt.Fprint(w, result) }
func testBuilder(t test.Tester) { type atTime struct { fingerprint string time clientmodel.Timestamp } type atInterval struct { fingerprint string from clientmodel.Timestamp through clientmodel.Timestamp interval time.Duration } type atRange struct { fingerprint string from clientmodel.Timestamp through clientmodel.Timestamp } type in struct { atTimes []atTime atIntervals []atInterval atRanges []atRange } type out []struct { fingerprint string operations ops } var scenarios = []struct { in in out out }{ // Ensure that the fingerprint is sorted in proper order. { in: in{ atTimes: []atTime{ { fingerprint: "0000000000000001111-a-4-a", time: clientmodel.TimestampFromUnix(100), }, { fingerprint: "0000000000000000000-a-4-a", time: clientmodel.TimestampFromUnix(100), }, }, }, out: out{ { fingerprint: "00000000000000000000-a-4-a", }, { fingerprint: "00000000000000001111-a-4-a", }, }, }, // // Ensure that the fingerprint-timestamp pairs are sorted in proper order. { in: in{ atTimes: []atTime{ { fingerprint: "1111-a-4-a", time: clientmodel.TimestampFromUnix(100), }, { fingerprint: "1111-a-4-a", time: clientmodel.TimestampFromUnix(200), }, { fingerprint: "0-a-4-a", time: clientmodel.TimestampFromUnix(100), }, { fingerprint: "0-a-4-a", time: clientmodel.TimestampFromUnix(0), }, }, }, out: out{ { fingerprint: "00000000000000000000-a-4-a", }, { fingerprint: "00000000000000000000-a-4-a", }, { fingerprint: "00000000000000001111-a-4-a", }, { fingerprint: "00000000000000001111-a-4-a", }, }, }, // Ensure grouping of operations { in: in{ atTimes: []atTime{ { fingerprint: "1111-a-4-a", time: clientmodel.TimestampFromUnix(100), }, }, atRanges: []atRange{ { fingerprint: "1111-a-4-a", from: clientmodel.TimestampFromUnix(100), through: clientmodel.TimestampFromUnix(1000), }, { fingerprint: "1111-a-4-a", from: clientmodel.TimestampFromUnix(100), through: clientmodel.TimestampFromUnix(9000), }, }, }, out: out{ { fingerprint: "00000000000000001111-a-4-a", }, { fingerprint: "00000000000000001111-a-4-a", }, { fingerprint: "00000000000000001111-a-4-a", }, }, }, } for i, scenario := range scenarios { builder := &viewRequestBuilder{} for _, atTime := range scenario.in.atTimes { fingerprint := &clientmodel.Fingerprint{} fingerprint.LoadFromString(atTime.fingerprint) builder.GetMetricAtTime(fingerprint, atTime.time) } for _, atInterval := range scenario.in.atIntervals { fingerprint := &clientmodel.Fingerprint{} fingerprint.LoadFromString(atInterval.fingerprint) builder.GetMetricAtInterval(fingerprint, atInterval.from, atInterval.through, atInterval.interval) } for _, atRange := range scenario.in.atRanges { fingerprint := &clientmodel.Fingerprint{} fingerprint.LoadFromString(atRange.fingerprint) builder.GetMetricRange(fingerprint, atRange.from, atRange.through) } for j, job := range scenario.out { got := builder.PopOp() if got.Fingerprint().String() != job.fingerprint { t.Errorf("%d.%d. expected fingerprint %s, got %s", i, j, job.fingerprint, got.Fingerprint()) } } if builder.HasOp() { t.Error("Expected builder to have no scan jobs left.") } } }
// DecodeTime deserializes a big endian byte array into a Unix time in UTC, // omitting granularity precision less than a second. func DecodeTime(src []byte) clientmodel.Timestamp { return clientmodel.TimestampFromUnix(int64(binary.BigEndian.Uint64(src))) }
func extractSummary(out Ingester, o *ProcessOptions, f *dto.MetricFamily) error { samples := make(model.Samples, 0, len(f.Metric)) for _, m := range f.Metric { if m.Summary == nil { continue } timestamp := o.Timestamp if m.TimestampMs != nil { timestamp = model.TimestampFromUnix(*m.TimestampMs / 1000) } for _, q := range m.Summary.Quantile { sample := new(model.Sample) samples = append(samples, sample) sample.Timestamp = timestamp sample.Metric = model.Metric{} metric := sample.Metric for _, p := range m.Label { metric[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) } // BUG(matt): Update other names to "quantile". metric[model.LabelName("quantile")] = model.LabelValue(fmt.Sprint(q.GetQuantile())) metric[model.MetricNameLabel] = model.LabelValue(f.GetName()) sample.Value = model.SampleValue(q.GetValue()) } if m.Summary.SampleSum != nil { sum := new(model.Sample) sum.Timestamp = timestamp metric := model.Metric{} for _, p := range m.Label { metric[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) } metric[model.MetricNameLabel] = model.LabelValue(f.GetName() + "_sum") sum.Metric = metric sum.Value = model.SampleValue(m.Summary.GetSampleSum()) samples = append(samples, sum) } if m.Summary.SampleCount != nil { count := new(model.Sample) count.Timestamp = timestamp metric := model.Metric{} for _, p := range m.Label { metric[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue()) } metric[model.MetricNameLabel] = model.LabelValue(f.GetName() + "_count") count.Metric = metric count.Value = model.SampleValue(m.Summary.GetSampleCount()) samples = append(samples, count) } } return out.Ingest(&Result{Samples: samples}) }
func (w *watermarks) load(d *dto.MetricHighWatermark) { w.High = clientmodel.TimestampFromUnix(d.GetTimestamp()) }
func StochasticTests(persistenceMaker func() (metric.Persistence, test.Closer), t test.Tester) { stochastic := func(x int) (success bool) { p, closer := persistenceMaker() defer closer.Close() defer p.Close() seed := rand.NewSource(int64(x)) random := rand.New(seed) numberOfMetrics := random.Intn(stochasticMaximumVariance) + 1 numberOfSharedLabels := random.Intn(stochasticMaximumVariance) numberOfUnsharedLabels := random.Intn(stochasticMaximumVariance) numberOfSamples := random.Intn(stochasticMaximumVariance) + 2 numberOfRangeScans := random.Intn(stochasticMaximumVariance) metricTimestamps := map[int]map[int64]bool{} metricEarliestSample := map[int]int64{} metricNewestSample := map[int]int64{} for metricIndex := 0; metricIndex < numberOfMetrics; metricIndex++ { sample := &clientmodel.Sample{ Metric: clientmodel.Metric{}, } v := clientmodel.LabelValue(fmt.Sprintf("metric_index_%d", metricIndex)) sample.Metric[clientmodel.MetricNameLabel] = v for sharedLabelIndex := 0; sharedLabelIndex < numberOfSharedLabels; sharedLabelIndex++ { l := clientmodel.LabelName(fmt.Sprintf("shared_label_%d", sharedLabelIndex)) v := clientmodel.LabelValue(fmt.Sprintf("label_%d", sharedLabelIndex)) sample.Metric[l] = v } for unsharedLabelIndex := 0; unsharedLabelIndex < numberOfUnsharedLabels; unsharedLabelIndex++ { l := clientmodel.LabelName(fmt.Sprintf("metric_index_%d_private_label_%d", metricIndex, unsharedLabelIndex)) v := clientmodel.LabelValue(fmt.Sprintf("private_label_%d", unsharedLabelIndex)) sample.Metric[l] = v } timestamps := map[int64]bool{} metricTimestamps[metricIndex] = timestamps var newestSample int64 = math.MinInt64 var oldestSample int64 = math.MaxInt64 var nextTimestamp func() int64 nextTimestamp = func() int64 { var candidate int64 candidate = random.Int63n(math.MaxInt32 - 1) if _, has := timestamps[candidate]; has { // WART candidate = nextTimestamp() } timestamps[candidate] = true if candidate < oldestSample { oldestSample = candidate } if candidate > newestSample { newestSample = candidate } return candidate } // BUG(matt): Invariant of the in-memory database assumes this. sortedTimestamps := timeslice{} for sampleIndex := 0; sampleIndex < numberOfSamples; sampleIndex++ { sortedTimestamps = append(sortedTimestamps, clientmodel.TimestampFromUnix(nextTimestamp())) } sort.Sort(sortedTimestamps) for sampleIndex := 0; sampleIndex < numberOfSamples; sampleIndex++ { sample.Timestamp = sortedTimestamps[sampleIndex] sample.Value = clientmodel.SampleValue(sampleIndex) err := p.AppendSamples(clientmodel.Samples{sample}) if err != nil { t.Error(err) return } } metricEarliestSample[metricIndex] = oldestSample metricNewestSample[metricIndex] = newestSample for sharedLabelIndex := 0; sharedLabelIndex < numberOfSharedLabels; sharedLabelIndex++ { matchers := metric.LabelMatchers{{ Type: metric.Equal, Name: clientmodel.LabelName(fmt.Sprintf("shared_label_%d", sharedLabelIndex)), Value: clientmodel.LabelValue(fmt.Sprintf("label_%d", sharedLabelIndex)), }} fingerprints, err := p.GetFingerprintsForLabelMatchers(matchers) if err != nil { t.Error(err) return } if len(fingerprints) == 0 { t.Errorf("expected fingerprint count of %d, got %d", 0, len(fingerprints)) return } } } for metricIndex := 0; metricIndex < numberOfMetrics; metricIndex++ { for unsharedLabelIndex := 0; unsharedLabelIndex < numberOfUnsharedLabels; unsharedLabelIndex++ { labelName := clientmodel.LabelName(fmt.Sprintf("metric_index_%d_private_label_%d", metricIndex, unsharedLabelIndex)) labelValue := clientmodel.LabelValue(fmt.Sprintf("private_label_%d", unsharedLabelIndex)) matchers := metric.LabelMatchers{{ Type: metric.Equal, Name: labelName, Value: labelValue, }} fingerprints, err := p.GetFingerprintsForLabelMatchers(matchers) if err != nil { t.Error(err) return } if len(fingerprints) != 1 { t.Errorf("expected fingerprint count of %d, got %d", 1, len(fingerprints)) return } } m := clientmodel.Metric{} m[clientmodel.MetricNameLabel] = clientmodel.LabelValue(fmt.Sprintf("metric_index_%d", metricIndex)) for i := 0; i < numberOfSharedLabels; i++ { l := clientmodel.LabelName(fmt.Sprintf("shared_label_%d", i)) v := clientmodel.LabelValue(fmt.Sprintf("label_%d", i)) m[l] = v } for i := 0; i < numberOfUnsharedLabels; i++ { l := clientmodel.LabelName(fmt.Sprintf("metric_index_%d_private_label_%d", metricIndex, i)) v := clientmodel.LabelValue(fmt.Sprintf("private_label_%d", i)) m[l] = v } for i := 0; i < numberOfRangeScans; i++ { timestamps := metricTimestamps[metricIndex] var first int64 var second int64 for { firstCandidate := random.Int63n(int64(len(timestamps))) secondCandidate := random.Int63n(int64(len(timestamps))) smallest := int64(-1) largest := int64(-1) if firstCandidate == secondCandidate { continue } else if firstCandidate > secondCandidate { largest = firstCandidate smallest = secondCandidate } else { largest = secondCandidate smallest = firstCandidate } j := int64(0) for i := range timestamps { if j == smallest { first = i } else if j == largest { second = i break } j++ } break } begin := first end := second if second < first { begin, end = second, first } interval := metric.Interval{ OldestInclusive: clientmodel.TimestampFromUnix(begin), NewestInclusive: clientmodel.TimestampFromUnix(end), } samples := metric.Values{} fp := &clientmodel.Fingerprint{} fp.LoadFromMetric(m) switch persistence := p.(type) { case metric.View: samples = persistence.GetRangeValues(fp, interval) if len(samples) < 2 { t.Fatalf("expected sample count greater than %d, got %d", 2, len(samples)) } case *LevelDBPersistence: var err error samples, err = levelDBGetRangeValues(persistence, fp, interval) if err != nil { t.Fatal(err) } if len(samples) < 2 { t.Fatalf("expected sample count greater than %d, got %d", 2, len(samples)) } default: t.Error("Unexpected type of metric.Persistence.") } } } return true } if err := quick.Check(stochastic, nil); err != nil { t.Error(err) } }