Пример #1
0
// === changes(matrix model.ValMatrix) Vector ===
func funcChanges(ev *evaluator, args Expressions) model.Value {
	in := ev.evalMatrix(args[0])
	out := make(vector, 0, len(in))

	for _, samples := range in {
		changes := 0
		prev := model.SampleValue(samples.Values[0].Value)
		for _, sample := range samples.Values[1:] {
			current := sample.Value
			if current != prev {
				changes++
			}
			prev = current
		}

		rs := &sample{
			Metric:    samples.Metric,
			Value:     model.SampleValue(changes),
			Timestamp: ev.Timestamp,
		}
		rs.Metric.Del(model.MetricNameLabel)
		out = append(out, rs)
	}
	return out
}
Пример #2
0
// === delta(matrix model.ValMatrix, isCounter=0 model.ValScalar) Vector ===
func funcDelta(ev *evaluator, args Expressions) model.Value {
	isCounter := len(args) >= 2 && ev.evalInt(args[1]) > 0
	resultVector := vector{}

	// If we treat these metrics as counters, we need to fetch all values
	// in the interval to find breaks in the timeseries' monotonicity.
	// I.e. if a counter resets, we want to ignore that reset.
	var matrixValue matrix
	if isCounter {
		matrixValue = ev.evalMatrix(args[0])
	} else {
		matrixValue = ev.evalMatrixBounds(args[0])
	}
	for _, samples := range matrixValue {
		// No sense in trying to compute a delta without at least two points. Drop
		// this vector element.
		if len(samples.Values) < 2 {
			continue
		}

		var (
			counterCorrection model.SampleValue
			lastValue         model.SampleValue
		)
		for _, sample := range samples.Values {
			currentValue := sample.Value
			if isCounter && currentValue < lastValue {
				counterCorrection += lastValue - currentValue
			}
			lastValue = currentValue
		}
		resultValue := lastValue - samples.Values[0].Value + counterCorrection

		targetInterval := args[0].(*MatrixSelector).Range
		sampledInterval := samples.Values[len(samples.Values)-1].Timestamp.Sub(samples.Values[0].Timestamp)
		if sampledInterval == 0 {
			// Only found one sample. Cannot compute a rate from this.
			continue
		}
		// Correct for differences in target vs. actual delta interval.
		//
		// Above, we didn't actually calculate the delta for the specified target
		// interval, but for an interval between the first and last found samples
		// under the target interval, which will usually have less time between
		// them. Depending on how many samples are found under a target interval,
		// the delta results are distorted and temporal aliasing occurs (ugly
		// bumps). This effect is corrected for below.
		intervalCorrection := model.SampleValue(targetInterval) / model.SampleValue(sampledInterval)
		resultValue *= intervalCorrection

		resultSample := &sample{
			Metric:    samples.Metric,
			Value:     resultValue,
			Timestamp: ev.Timestamp,
		}
		resultSample.Metric.Del(model.MetricNameLabel)
		resultVector = append(resultVector, resultSample)
	}
	return resultVector
}
Пример #3
0
// === scalar(node model.ValVector) Scalar ===
func funcScalar(ev *evaluator, args Expressions) model.Value {
	v := ev.evalVector(args[0])
	if len(v) != 1 {
		return &model.Scalar{model.SampleValue(math.NaN()), ev.Timestamp}
	}
	return &model.Scalar{model.SampleValue(v[0].Value), ev.Timestamp}
}
Пример #4
0
// sampleValueAtIndex implements chunkIterator.
func (it *deltaEncodedChunkIterator) sampleValueAtIndex(idx int) model.SampleValue {
	offset := deltaHeaderBytes + idx*int(it.tBytes+it.vBytes) + int(it.tBytes)

	if it.isInt {
		switch it.vBytes {
		case d0:
			return it.baseV
		case d1:
			return it.baseV + model.SampleValue(int8(it.c[offset]))
		case d2:
			return it.baseV + model.SampleValue(int16(binary.LittleEndian.Uint16(it.c[offset:])))
		case d4:
			return it.baseV + model.SampleValue(int32(binary.LittleEndian.Uint32(it.c[offset:])))
		// No d8 for ints.
		default:
			panic("invalid number of bytes for integer delta")
		}
	} else {
		switch it.vBytes {
		case d4:
			return it.baseV + model.SampleValue(math.Float32frombits(binary.LittleEndian.Uint32(it.c[offset:])))
		case d8:
			// Take absolute value for d8.
			return model.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(it.c[offset:])))
		default:
			panic("invalid number of bytes for floating point delta")
		}
	}
}
Пример #5
0
// === predict_linear(node model.ValMatrix, k model.ValScalar) Vector ===
func funcPredictLinear(ev *evaluator, args Expressions) model.Value {
	vec := funcDeriv(ev, args[0:1]).(vector)
	duration := model.SampleValue(model.SampleValue(ev.evalFloat(args[1])))

	excludedLabels := map[model.LabelName]struct{}{
		model.MetricNameLabel: {},
	}

	// Calculate predicted delta over the duration.
	signatureToDelta := map[uint64]model.SampleValue{}
	for _, el := range vec {
		signature := model.SignatureWithoutLabels(el.Metric.Metric, excludedLabels)
		signatureToDelta[signature] = el.Value * duration
	}

	// add predicted delta to last value.
	matrixBounds := ev.evalMatrixBounds(args[0])
	outVec := make(vector, 0, len(signatureToDelta))
	for _, samples := range matrixBounds {
		if len(samples.Values) < 2 {
			continue
		}
		signature := model.SignatureWithoutLabels(samples.Metric.Metric, excludedLabels)
		delta, ok := signatureToDelta[signature]
		if ok {
			samples.Metric.Del(model.MetricNameLabel)
			outVec = append(outVec, &sample{
				Metric:    samples.Metric,
				Value:     delta + samples.Values[1].Value,
				Timestamp: ev.Timestamp,
			})
		}
	}
	return outVec
}
Пример #6
0
// scalarBinop evaluates a binary operation between two scalars.
func scalarBinop(op itemType, lhs, rhs model.SampleValue) model.SampleValue {
	switch op {
	case itemADD:
		return lhs + rhs
	case itemSUB:
		return lhs - rhs
	case itemMUL:
		return lhs * rhs
	case itemDIV:
		return lhs / rhs
	case itemPOW:
		return model.SampleValue(math.Pow(float64(lhs), float64(rhs)))
	case itemMOD:
		return model.SampleValue(math.Mod(float64(lhs), float64(rhs)))
	case itemEQL:
		return btos(lhs == rhs)
	case itemNEQ:
		return btos(lhs != rhs)
	case itemGTR:
		return btos(lhs > rhs)
	case itemLSS:
		return btos(lhs < rhs)
	case itemGTE:
		return btos(lhs >= rhs)
	case itemLTE:
		return btos(lhs <= rhs)
	}
	panic(fmt.Errorf("operator %q not allowed for scalar operations", op))
}
Пример #7
0
// vectorElemBinop evaluates a binary operation between two vector elements.
func vectorElemBinop(op itemType, lhs, rhs model.SampleValue) (model.SampleValue, bool) {
	switch op {
	case itemADD:
		return lhs + rhs, true
	case itemSUB:
		return lhs - rhs, true
	case itemMUL:
		return lhs * rhs, true
	case itemDIV:
		return lhs / rhs, true
	case itemPOW:
		return model.SampleValue(math.Pow(float64(lhs), float64(rhs))), true
	case itemMOD:
		return model.SampleValue(math.Mod(float64(lhs), float64(rhs))), true
	case itemEQL:
		return lhs, lhs == rhs
	case itemNEQ:
		return lhs, lhs != rhs
	case itemGTR:
		return lhs, lhs > rhs
	case itemLSS:
		return lhs, lhs < rhs
	case itemGTE:
		return lhs, lhs >= rhs
	case itemLTE:
		return lhs, lhs <= rhs
	}
	panic(fmt.Errorf("operator %q not allowed for operations between vectors", op))
}
Пример #8
0
func extractSummary(o *DecodeOptions, f *dto.MetricFamily) model.Vector {
	samples := make(model.Vector, 0, len(f.Metric))

	for _, m := range f.Metric {
		if m.Summary == nil {
			continue
		}

		timestamp := o.Timestamp
		if m.TimestampMs != nil {
			timestamp = model.TimeFromUnixNano(*m.TimestampMs * 1000000)
		}

		for _, q := range m.Summary.Quantile {
			lset := make(model.LabelSet, len(m.Label)+2)
			for _, p := range m.Label {
				lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue())
			}
			// BUG(matt): Update other names to "quantile".
			lset[model.LabelName(model.QuantileLabel)] = model.LabelValue(fmt.Sprint(q.GetQuantile()))
			lset[model.MetricNameLabel] = model.LabelValue(f.GetName())

			samples = append(samples, &model.Sample{
				Metric:    model.Metric(lset),
				Value:     model.SampleValue(q.GetValue()),
				Timestamp: timestamp,
			})
		}

		if m.Summary.SampleSum != nil {
			lset := make(model.LabelSet, len(m.Label)+1)
			for _, p := range m.Label {
				lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue())
			}
			lset[model.MetricNameLabel] = model.LabelValue(f.GetName() + "_sum")

			samples = append(samples, &model.Sample{
				Metric:    model.Metric(lset),
				Value:     model.SampleValue(m.Summary.GetSampleSum()),
				Timestamp: timestamp,
			})
		}

		if m.Summary.SampleCount != nil {
			lset := make(model.LabelSet, len(m.Label)+1)
			for _, p := range m.Label {
				lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue())
			}
			lset[model.MetricNameLabel] = model.LabelValue(f.GetName() + "_count")

			samples = append(samples, &model.Sample{
				Metric:    model.Metric(lset),
				Value:     model.SampleValue(m.Summary.GetSampleCount()),
				Timestamp: timestamp,
			})
		}
	}

	return samples
}
Пример #9
0
func (acc *deltaEncodedIndexAccessor) sampleValueAtIndex(idx int) model.SampleValue {
	offset := deltaHeaderBytes + idx*int(acc.tBytes+acc.vBytes) + int(acc.tBytes)

	if acc.isInt {
		switch acc.vBytes {
		case d0:
			return acc.baseV
		case d1:
			return acc.baseV + model.SampleValue(int8(acc.c[offset]))
		case d2:
			return acc.baseV + model.SampleValue(int16(binary.LittleEndian.Uint16(acc.c[offset:])))
		case d4:
			return acc.baseV + model.SampleValue(int32(binary.LittleEndian.Uint32(acc.c[offset:])))
		// No d8 for ints.
		default:
			acc.lastErr = fmt.Errorf("invalid number of bytes for integer delta: %d", acc.vBytes)
			return 0
		}
	} else {
		switch acc.vBytes {
		case d4:
			return acc.baseV + model.SampleValue(math.Float32frombits(binary.LittleEndian.Uint32(acc.c[offset:])))
		case d8:
			// Take absolute value for d8.
			return model.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(acc.c[offset:])))
		default:
			acc.lastErr = fmt.Errorf("invalid number of bytes for floating point delta: %d", acc.vBytes)
			return 0
		}
	}
}
Пример #10
0
// interpolateSamples interpolates a value at a target time between two
// provided sample pairs.
func interpolateSamples(first, second *model.SamplePair, timestamp model.Time) *model.SamplePair {
	dv := second.Value - first.Value
	dt := second.Timestamp.Sub(first.Timestamp)

	dDt := dv / model.SampleValue(dt)
	offset := model.SampleValue(timestamp.Sub(first.Timestamp))

	return &model.SamplePair{
		Value:     first.Value + (offset * dDt),
		Timestamp: timestamp,
	}
}
Пример #11
0
func (acc *doubleDeltaEncodedIndexAccessor) sampleValueAtIndex(idx int) model.SampleValue {
	if idx == 0 {
		return acc.baseV
	}
	if idx == 1 {
		// If value bytes are at d8, the value is saved directly rather
		// than as a difference.
		if acc.vBytes == d8 {
			return acc.baseΔV
		}
		return acc.baseV + acc.baseΔV
	}

	offset := doubleDeltaHeaderBytes + (idx-2)*int(acc.tBytes+acc.vBytes) + int(acc.tBytes)

	if acc.isInt {
		switch acc.vBytes {
		case d0:
			return acc.baseV +
				model.SampleValue(idx)*acc.baseΔV
		case d1:
			return acc.baseV +
				model.SampleValue(idx)*acc.baseΔV +
				model.SampleValue(int8(acc.c[offset]))
		case d2:
			return acc.baseV +
				model.SampleValue(idx)*acc.baseΔV +
				model.SampleValue(int16(binary.LittleEndian.Uint16(acc.c[offset:])))
		case d4:
			return acc.baseV +
				model.SampleValue(idx)*acc.baseΔV +
				model.SampleValue(int32(binary.LittleEndian.Uint32(acc.c[offset:])))
		// No d8 for ints.
		default:
			acc.lastErr = fmt.Errorf("invalid number of bytes for integer delta: %d", acc.vBytes)
			return 0
		}
	} else {
		switch acc.vBytes {
		case d4:
			return acc.baseV +
				model.SampleValue(idx)*acc.baseΔV +
				model.SampleValue(math.Float32frombits(binary.LittleEndian.Uint32(acc.c[offset:])))
		case d8:
			// Take absolute value for d8.
			return model.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(acc.c[offset:])))
		default:
			acc.lastErr = fmt.Errorf("invalid number of bytes for floating point delta: %d", acc.vBytes)
			return 0
		}
	}
}
Пример #12
0
// sampleValueAtIndex implements chunkIterator.
func (it *doubleDeltaEncodedChunkIterator) sampleValueAtIndex(idx int) model.SampleValue {
	if idx == 0 {
		return it.baseV
	}
	if idx == 1 {
		// If value bytes are at d8, the value is saved directly rather
		// than as a difference.
		if it.vBytes == d8 {
			return it.baseΔV
		}
		return it.baseV + it.baseΔV
	}

	offset := doubleDeltaHeaderBytes + (idx-2)*int(it.tBytes+it.vBytes) + int(it.tBytes)

	if it.isInt {
		switch it.vBytes {
		case d0:
			return it.baseV +
				model.SampleValue(idx)*it.baseΔV
		case d1:
			return it.baseV +
				model.SampleValue(idx)*it.baseΔV +
				model.SampleValue(int8(it.c[offset]))
		case d2:
			return it.baseV +
				model.SampleValue(idx)*it.baseΔV +
				model.SampleValue(int16(binary.LittleEndian.Uint16(it.c[offset:])))
		case d4:
			return it.baseV +
				model.SampleValue(idx)*it.baseΔV +
				model.SampleValue(int32(binary.LittleEndian.Uint32(it.c[offset:])))
		// No d8 for ints.
		default:
			panic("invalid number of bytes for integer delta")
		}
	} else {
		switch it.vBytes {
		case d4:
			return it.baseV +
				model.SampleValue(idx)*it.baseΔV +
				model.SampleValue(math.Float32frombits(binary.LittleEndian.Uint32(it.c[offset:])))
		case d8:
			// Take absolute value for d8.
			return model.SampleValue(math.Float64frombits(binary.LittleEndian.Uint64(it.c[offset:])))
		default:
			panic("invalid number of bytes for floating point delta")
		}
	}
}
func TestSampleDelivery(t *testing.T) {
	// Let's create an even number of send batches so we don't run into the
	// batch timeout case.
	n := maxSamplesPerSend * 2

	samples := make(model.Samples, 0, n)
	for i := 0; i < n; i++ {
		samples = append(samples, &model.Sample{
			Metric: model.Metric{
				model.MetricNameLabel: "test_metric",
			},
			Value: model.SampleValue(i),
		})
	}

	c := &TestStorageClient{}
	c.expectSamples(samples[:len(samples)/2])
	m := NewStorageQueueManager(c, len(samples)/2)

	// These should be received by the client.
	for _, s := range samples[:len(samples)/2] {
		m.Append(s)
	}
	// These will be dropped because the queue is full.
	for _, s := range samples[len(samples)/2:] {
		m.Append(s)
	}
	go m.Run()
	defer m.Stop()

	c.waitForExpectedSamples(t)
}
Пример #14
0
func (sl *scrapeLoop) report(start time.Time, duration time.Duration, err error) {
	sl.scraper.report(start, duration, err)

	ts := model.TimeFromUnixNano(start.UnixNano())

	var health model.SampleValue
	if err == nil {
		health = 1
	}

	healthSample := &model.Sample{
		Metric: model.Metric{
			model.MetricNameLabel: scrapeHealthMetricName,
		},
		Timestamp: ts,
		Value:     health,
	}
	durationSample := &model.Sample{
		Metric: model.Metric{
			model.MetricNameLabel: scrapeDurationMetricName,
		},
		Timestamp: ts,
		Value:     model.SampleValue(float64(duration) / float64(time.Second)),
	}

	sl.reportAppender.Append(healthSample)
	sl.reportAppender.Append(durationSample)
}
Пример #15
0
// primaryExpr parses a primary expression.
//
//		<metric_name> | <function_call> | <vector_aggregation> | <literal>
//
func (p *parser) primaryExpr() Expr {
	switch t := p.next(); {
	case t.typ == itemNumber:
		f := p.number(t.val)
		return &NumberLiteral{model.SampleValue(f)}

	case t.typ == itemString:
		s := t.val[1 : len(t.val)-1]
		return &StringLiteral{s}

	case t.typ == itemLeftBrace:
		// Metric selector without metric name.
		p.backup()
		return p.vectorSelector("")

	case t.typ == itemIdentifier:
		// Check for function call.
		if p.peek().typ == itemLeftParen {
			return p.call(t.val)
		}
		fallthrough // Else metric selector.

	case t.typ == itemMetricIdentifier:
		return p.vectorSelector(t.val)

	case t.typ.isAggregator():
		p.backup()
		return p.aggrExpr()

	default:
		p.errorf("no valid expression found")
	}
	return nil
}
Пример #16
0
func (t *Test) parseEval(lines []string, i int) (int, *evalCmd, error) {
	if !patEvalInstant.MatchString(lines[i]) {
		return i, nil, raise(i, "invalid evaluation command. (eval[_fail|_ordered] instant [at <offset:duration>] <query>")
	}
	parts := patEvalInstant.FindStringSubmatch(lines[i])
	var (
		mod = parts[1]
		at  = parts[2]
		qry = parts[3]
	)
	expr, err := ParseExpr(qry)
	if err != nil {
		if perr, ok := err.(*ParseErr); ok {
			perr.Line = i + 1
			perr.Pos += strings.Index(lines[i], qry)
		}
		return i, nil, err
	}

	offset, err := model.ParseDuration(at)
	if err != nil {
		return i, nil, raise(i, "invalid step definition %q: %s", parts[1], err)
	}
	ts := testStartTime.Add(time.Duration(offset))

	cmd := newEvalCmd(expr, ts, ts, 0)
	switch mod {
	case "ordered":
		cmd.ordered = true
	case "fail":
		cmd.fail = true
	}

	for j := 1; i+1 < len(lines); j++ {
		i++
		defLine := lines[i]
		if len(defLine) == 0 {
			i--
			break
		}
		if f, err := parseNumber(defLine); err == nil {
			cmd.expect(0, nil, sequenceValue{value: model.SampleValue(f)})
			break
		}
		metric, vals, err := parseSeriesDesc(defLine)
		if err != nil {
			if perr, ok := err.(*ParseErr); ok {
				perr.Line = i + 1
			}
			return i, nil, err
		}

		// Currently, we are not expecting any matrices.
		if len(vals) > 1 {
			return i, nil, raise(i, "expecting multiple values in instant evaluation not allowed")
		}
		cmd.expect(j, metric, vals...)
	}
	return i, cmd, nil
}
func TestSampleDeliveryOrder(t *testing.T) {
	cfg := defaultConfig
	ts := 10
	n := cfg.MaxSamplesPerSend * ts
	// Ensure we don't drop samples in this test.
	cfg.QueueCapacity = n

	samples := make(model.Samples, 0, n)
	for i := 0; i < n; i++ {
		name := model.LabelValue(fmt.Sprintf("test_metric_%d", i%ts))
		samples = append(samples, &model.Sample{
			Metric: model.Metric{
				model.MetricNameLabel: name,
			},
			Value:     model.SampleValue(i),
			Timestamp: model.Time(i),
		})
	}

	c := NewTestStorageClient()
	c.expectSamples(samples)
	m := NewStorageQueueManager(c, &cfg)

	// These should be received by the client.
	for _, s := range samples {
		m.Append(s)
	}
	go m.Run()
	defer m.Stop()

	c.waitForExpectedSamples(t)
}
func TestSampleDelivery(t *testing.T) {
	// Let's create an even number of send batches so we don't run into the
	// batch timeout case.
	cfg := defaultConfig
	n := cfg.QueueCapacity * 2
	cfg.Shards = 1

	samples := make(model.Samples, 0, n)
	for i := 0; i < n; i++ {
		name := model.LabelValue(fmt.Sprintf("test_metric_%d", i))
		samples = append(samples, &model.Sample{
			Metric: model.Metric{
				model.MetricNameLabel: name,
			},
			Value: model.SampleValue(i),
		})
	}

	c := NewTestStorageClient()
	c.expectSamples(samples[:len(samples)/2])
	m := NewStorageQueueManager(c, &cfg)

	// These should be received by the client.
	for _, s := range samples[:len(samples)/2] {
		m.Append(s)
	}
	// These will be dropped because the queue is full.
	for _, s := range samples[len(samples)/2:] {
		m.Append(s)
	}
	go m.Run()
	defer m.Stop()

	c.waitForExpectedSamples(t)
}
Пример #19
0
func (sl *scrapeLoop) report(start time.Time, duration time.Duration, err error) {
	sl.scraper.report(start, duration, err)

	ts := model.TimeFromUnixNano(start.UnixNano())

	var health model.SampleValue
	if err == nil {
		health = 1
	}

	healthSample := &model.Sample{
		Metric: model.Metric{
			model.MetricNameLabel: scrapeHealthMetricName,
		},
		Timestamp: ts,
		Value:     health,
	}
	durationSample := &model.Sample{
		Metric: model.Metric{
			model.MetricNameLabel: scrapeDurationMetricName,
		},
		Timestamp: ts,
		Value:     model.SampleValue(float64(duration) / float64(time.Second)),
	}

	if err := sl.reportAppender.Append(healthSample); err != nil {
		log.With("sample", healthSample).With("error", err).Warn("Scrape health sample discarded")
	}
	if err := sl.reportAppender.Append(durationSample); err != nil {
		log.With("sample", durationSample).With("error", err).Warn("Scrape duration sample discarded")
	}
}
Пример #20
0
func (t *Target) report(app storage.SampleAppender, start time.Time, duration time.Duration, err error) {
	t.status.setLastScrape(start)
	t.status.setLastError(err)

	ts := model.TimeFromUnixNano(start.UnixNano())

	var health model.SampleValue
	if err == nil {
		health = 1
	}

	healthSample := &model.Sample{
		Metric: model.Metric{
			model.MetricNameLabel: scrapeHealthMetricName,
		},
		Timestamp: ts,
		Value:     health,
	}
	durationSample := &model.Sample{
		Metric: model.Metric{
			model.MetricNameLabel: scrapeDurationMetricName,
		},
		Timestamp: ts,
		Value:     model.SampleValue(float64(duration) / float64(time.Second)),
	}

	app = t.wrapReportingAppender(app)

	app.Append(healthSample)
	app.Append(durationSample)
}
Пример #21
0
func (s *testNotificationScenario) test(i int, t *testing.T) {
	h := NewNotificationHandler(&NotificationHandlerOptions{
		AlertmanagerURL: "alertmanager_url",
		QueueCapacity:   0,
		Deadline:        10 * time.Second,
	})
	defer h.Stop()

	receivedPost := make(chan bool, 1)
	poster := testHTTPPoster{receivedPost: receivedPost}
	h.httpClient = &poster

	go h.Run()

	h.SubmitReqs(NotificationReqs{
		{
			Summary:     s.summary,
			Description: s.description,
			Runbook:     s.runbook,
			Labels: model.LabelSet{
				model.LabelName("instance"): model.LabelValue("testinstance"),
			},
			Value:        model.SampleValue(1.0 / 3.0),
			ActiveSince:  time.Time{},
			RuleString:   "Test rule string",
			GeneratorURL: "prometheus_url",
		},
	})

	<-receivedPost
	if poster.message != s.message {
		t.Fatalf("%d. Expected '%s', received '%s'", i, s.message, poster.message)
	}
}
Пример #22
0
func benchmarkRangeValues(b *testing.B, encoding chunkEncoding) {
	samples := make(model.Samples, 10000)
	for i := range samples {
		samples[i] = &model.Sample{
			Timestamp: model.Time(2 * i),
			Value:     model.SampleValue(float64(i) * 0.2),
		}
	}
	s, closer := NewTestStorage(b, encoding)
	defer closer.Close()

	for _, sample := range samples {
		s.Append(sample)
	}
	s.WaitForIndexing()

	fp := model.Metric{}.FastFingerprint()

	_, it := s.preloadChunksForRange(fp, model.Earliest, model.Latest)

	b.ResetTimer()

	for i := 0; i < b.N; i++ {
		for _, sample := range samples {
			actual := it.RangeValues(metric.Interval{
				OldestInclusive: sample.Timestamp - 20,
				NewestInclusive: sample.Timestamp + 20,
			})

			if len(actual) < 10 {
				b.Fatalf("not enough samples found")
			}
		}
	}
}
Пример #23
0
func extractUntyped(o *DecodeOptions, f *dto.MetricFamily) model.Vector {
	samples := make(model.Vector, 0, len(f.Metric))

	for _, m := range f.Metric {
		if m.Untyped == nil {
			continue
		}

		lset := make(model.LabelSet, len(m.Label)+1)
		for _, p := range m.Label {
			lset[model.LabelName(p.GetName())] = model.LabelValue(p.GetValue())
		}
		lset[model.MetricNameLabel] = model.LabelValue(f.GetName())

		smpl := &model.Sample{
			Metric: model.Metric(lset),
			Value:  model.SampleValue(m.Untyped.GetValue()),
		}

		if m.TimestampMs != nil {
			smpl.Timestamp = model.TimeFromUnixNano(*m.TimestampMs * 1000000)
		} else {
			smpl.Timestamp = o.Timestamp
		}

		samples = append(samples, smpl)
	}

	return samples
}
Пример #24
0
func buildTestChunks(t *testing.T, encoding chunk.Encoding) map[model.Fingerprint][]chunk.Chunk {
	fps := model.Fingerprints{
		m1.FastFingerprint(),
		m2.FastFingerprint(),
		m3.FastFingerprint(),
	}
	fpToChunks := map[model.Fingerprint][]chunk.Chunk{}

	for _, fp := range fps {
		fpToChunks[fp] = make([]chunk.Chunk, 0, 10)
		for i := 0; i < 10; i++ {
			ch, err := chunk.NewForEncoding(encoding)
			if err != nil {
				t.Fatal(err)
			}
			chs, err := ch.Add(model.SamplePair{
				Timestamp: model.Time(i),
				Value:     model.SampleValue(fp),
			})
			if err != nil {
				t.Fatal(err)
			}
			fpToChunks[fp] = append(fpToChunks[fp], chs[0])
		}
	}
	return fpToChunks
}
Пример #25
0
func recordScrapeHealth(
	sampleAppender storage.SampleAppender,
	timestamp time.Time,
	baseLabels model.LabelSet,
	health TargetHealth,
	scrapeDuration time.Duration,
) {
	healthMetric := make(model.Metric, len(baseLabels)+1)
	durationMetric := make(model.Metric, len(baseLabels)+1)

	healthMetric[model.MetricNameLabel] = scrapeHealthMetricName
	durationMetric[model.MetricNameLabel] = scrapeDurationMetricName

	for ln, lv := range baseLabels {
		healthMetric[ln] = lv
		durationMetric[ln] = lv
	}

	ts := model.TimeFromUnixNano(timestamp.UnixNano())

	healthSample := &model.Sample{
		Metric:    healthMetric,
		Timestamp: ts,
		Value:     health.value(),
	}
	durationSample := &model.Sample{
		Metric:    durationMetric,
		Timestamp: ts,
		Value:     model.SampleValue(float64(scrapeDuration) / float64(time.Second)),
	}

	sampleAppender.Append(healthSample)
	sampleAppender.Append(durationSample)
}
Пример #26
0
// === log10(vector model.ValVector) Vector ===
func funcLog10(ev *evaluator, args Expressions) model.Value {
	vector := ev.evalVector(args[0])
	for _, el := range vector {
		el.Metric.Del(model.MetricNameLabel)
		el.Value = model.SampleValue(math.Log10(float64(el.Value)))
	}
	return vector
}
Пример #27
0
// === max_over_time(matrix model.ValMatrix) Vector ===
func funcMaxOverTime(ev *evaluator, args Expressions) model.Value {
	return aggrOverTime(ev, args, func(values []model.SamplePair) model.SampleValue {
		max := math.Inf(-1)
		for _, v := range values {
			max = math.Max(max, float64(v.Value))
		}
		return model.SampleValue(max)
	})
}
Пример #28
0
func (c doubleDeltaEncodedChunk) baseValue() model.SampleValue {
	return model.SampleValue(
		math.Float64frombits(
			binary.LittleEndian.Uint64(
				c[doubleDeltaHeaderBaseValueOffset:],
			),
		),
	)
}
Пример #29
0
// === vector(s scalar) Vector ===
func funcVector(ev *evaluator, args Expressions) model.Value {
	return vector{
		&sample{
			Metric:    metric.Metric{},
			Value:     model.SampleValue(ev.evalFloat(args[0])),
			Timestamp: ev.Timestamp,
		},
	}
}
Пример #30
0
// === min_over_time(matrix model.ValMatrix) Vector ===
func funcMinOverTime(ev *evaluator, args Expressions) model.Value {
	return aggrOverTime(ev, args, func(values []model.SamplePair) model.SampleValue {
		min := math.Inf(1)
		for _, v := range values {
			min = math.Min(min, float64(v.Value))
		}
		return model.SampleValue(min)
	})
}