示例#1
0
// === delta(matrix MatrixNode, isCounter ScalarNode) ===
func deltaImpl(timestamp *time.Time, args []Node) interface{} {
	matrixNode := args[0].(MatrixNode)
	isCounter := int(args[1].(ScalarNode).Eval(timestamp))
	resultVector := Vector{}

	// If we treat these metrics as counters, we need to fetch all values
	// in the interval to find breaks in the timeseries' monotonicity.
	// I.e. if a counter resets, we want to ignore that reset.
	var matrixValue Matrix
	if isCounter > 0 {
		matrixValue = matrixNode.Eval(timestamp)
	} else {
		matrixValue = matrixNode.EvalBoundaries(timestamp)
	}
	for _, samples := range matrixValue {
		counterCorrection := model.SampleValue(0)
		lastValue := model.SampleValue(0)
		for _, sample := range samples.Values {
			currentValue := sample.Value
			if currentValue < lastValue {
				counterCorrection += lastValue - currentValue
			}
			lastValue = currentValue
		}
		resultValue := lastValue - samples.Values[0].Value + counterCorrection
		resultSample := &model.Sample{
			Metric:    samples.Metric,
			Value:     resultValue,
			Timestamp: *timestamp,
		}
		resultVector = append(resultVector, resultSample)
	}
	return resultVector
}
示例#2
0
func TestGetFingerprintsForLabelSetUsesAnd(t *testing.T) {
	temporaryDirectory, _ := ioutil.TempDir("", "test_get_fingerprints_for_label_set_uses_and")

	defer func() {
		err := os.RemoveAll(temporaryDirectory)
		if err != nil {
			t.Errorf("could not remove temporary directory: %f", err)
		}
	}()

	persistence, _ := NewLevelDBMetricPersistence(temporaryDirectory)
	defer persistence.Close()

	metrics := []map[string]string{
		{"name": "request_metrics_latency_equal_tallying_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
		{"name": "requests_metrics_latency_equal_accumulating_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
		{"name": "requests_metrics_latency_logarithmic_accumulating_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
		{"name": "requests_metrics_latency_logarithmic_tallying_microseconds", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
		{"name": "targets_healthy_scrape_latency_ms", "instance": "http://localhost:9090/metrics.json", "percentile": "0.010000"},
	}

	for _, metric := range metrics {
		m := model.Metric{}

		for k, v := range metric {
			m[model.LabelName(k)] = model.LabelValue(v)
		}

		err := persistence.AppendSample(&model.Sample{
			Value:     model.SampleValue(0.0),
			Timestamp: time.Now(),
			Metric:    m,
		})

		if err != nil {
			t.Errorf("could not create base sample: %s", err)
		}
	}

	labelSet := model.LabelSet{
		"name":       "targets_healthy_scrape_latency_ms",
		"percentile": "0.010000",
	}

	fingerprints, err := persistence.GetFingerprintsForLabelSet(&labelSet)
	if err != nil {
		t.Errorf("could not get labels: %s", err)
	}

	if len(fingerprints) != 1 {
		t.Errorf("did not get a single metric as is expected, got %s", fingerprints)
	}
}
示例#3
0
// === rate(node *MatrixNode) ===
func rateImpl(timestamp *time.Time, args []Node) interface{} {
	args = append(args, &ScalarLiteral{value: 1})
	vector := deltaImpl(timestamp, args).(Vector)

	// TODO: could be other type of MatrixNode in the future (right now, only
	// MatrixLiteral exists). Find a better way of getting the duration of a
	// matrix, such as looking at the samples themselves.
	interval := args[0].(*MatrixLiteral).interval
	for _, sample := range vector {
		sample.Value /= model.SampleValue(interval / time.Second)
	}
	return vector
}
示例#4
0
文件: ast.go 项目: grobie/prometheus
func (node *VectorAggregation) groupedAggregationsToVector(aggregations map[string]*groupedAggregation, timestamp *time.Time) Vector {
	vector := Vector{}
	for _, aggregation := range aggregations {
		if node.aggrType == AVG {
			aggregation.value = aggregation.value / model.SampleValue(aggregation.groupCount)
		}
		sample := &model.Sample{
			Metric:    aggregation.labels,
			Value:     aggregation.value,
			Timestamp: *timestamp,
		}
		vector = append(vector, sample)
	}
	return vector
}
				}
			}()

			persistence, _ := NewLevelDBMetricPersistence(temporaryDirectory)

			defer func() {
				persistence.Close()
			}()

			m := model.Metric{
				"name": "age_in_years",
			}

			for j, value := range context.values {
				err := persistence.AppendSample(&model.Sample{
					Value:     model.SampleValue(value.value),
					Timestamp: time.Date(value.year, value.month, value.day, value.hour, 0, 0, 0, time.UTC),
					Metric:    m,
				})

				if err != nil {
					t.Errorf("%d.%d(%s). Could not create sample: %q\n", i, j, context.name, err)
				}
			}

			for j, behavior := range context.behaviors {
				input := behavior.input
				time := time.Date(input.year, input.month, input.day, input.hour, 0, 0, 0, time.UTC)
				p := metric.StalenessPolicy{
					DeltaAllowance: input.staleness,
				}
示例#6
0
	return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\-?([0-9])+(\\.([0-9])*)?"), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
	defer func() {
		if r := recover(); r != nil {
			if r != "yyREJECT" {
				panic(r)
			}
			yyar.returnType = yyRT_REJECT
		}
	}()
	{
		num, err := strconv.ParseFloat(yytext, 32)
		if err != nil {
			rulesError("Invalid float %v", yytext)
		}
		yylval.num = model.SampleValue(num)
		return yyactionreturn{NUMBER, yyRT_USER_RETURN}
	}

	return yyactionreturn{0, yyRT_FALLTHROUGH}
}}, {regexp.MustCompile("\\\"(\\\\[^\\n]|[^\\\\\"])*\\\""), nil, []yystartcondition{}, false, func() (yyar yyactionreturn) {
	defer func() {
		if r := recover(); r != nil {
			if r != "yyREJECT" {
				panic(r)
			}
			yyar.returnType = yyRT_REJECT
		}
	}()
	{
		yylval.str = yytext[1 : len(yytext)-1]
示例#7
0
func (t *target) Scrape(earliest time.Time, results chan Result) (err error) {
	result := Result{}

	defer func() {
		futureState := t.state

		switch err {
		case nil:
			futureState = ALIVE
		default:
			futureState = UNREACHABLE
		}

		t.scheduler.Reschedule(earliest, futureState)

		result.Err = err
		results <- result
	}()

	done := make(chan bool)

	request := func() {
		ti := time.Now()
		resp, err := http.Get(t.Address())
		if err != nil {
			return
		}

		defer resp.Body.Close()

		raw, err := ioutil.ReadAll(resp.Body)
		if err != nil {
			return
		}

		intermediate := make(map[string]interface{})
		err = json.Unmarshal(raw, &intermediate)
		if err != nil {
			return
		}

		baseLabels := model.LabelSet{"instance": model.LabelValue(t.Address())}
		for baseK, baseV := range t.BaseLabels {
			baseLabels[baseK] = baseV
		}

		for name, v := range intermediate {
			asMap, ok := v.(map[string]interface{})

			if !ok {
				continue
			}

			switch asMap["type"] {
			case "counter":
				m := model.Metric{}
				m["name"] = model.LabelValue(name)
				asFloat, ok := asMap["value"].(float64)
				if !ok {
					continue
				}

				s := model.Sample{
					Metric:    m,
					Value:     model.SampleValue(asFloat),
					Timestamp: ti,
				}

				for baseK, baseV := range baseLabels {
					m[baseK] = baseV
				}

				result.Samples = append(result.Samples, s)
			case "histogram":
				values, ok := asMap["value"].(map[string]interface{})
				if !ok {
					continue
				}

				for p, pValue := range values {
					asString, ok := pValue.(string)
					if !ok {
						continue
					}

					float, err := strconv.ParseFloat(asString, 64)
					if err != nil {
						continue
					}

					m := model.Metric{}
					m["name"] = model.LabelValue(name)
					m["percentile"] = model.LabelValue(p)

					s := model.Sample{
						Metric:    m,
						Value:     model.SampleValue(float),
						Timestamp: ti,
					}

					for baseK, baseV := range baseLabels {
						m[baseK] = baseV
					}

					result.Samples = append(result.Samples, s)
				}
			}
		}

		done <- true
	}

	accumulator := func(d time.Duration) {
		ms := float64(d) / float64(time.Millisecond)
		labels := map[string]string{address: t.Address(), outcome: success}
		if err != nil {
			labels[outcome] = failure
		}

		targetOperationLatencies.Add(labels, ms)
		targetOperations.Increment(labels)
	}

	go metrics.InstrumentCall(request, accumulator)

	select {
	case <-done:
		break
	case <-time.After(t.Deadline):
		err = fmt.Errorf("Target %s exceeded %s deadline.", t, t.Deadline)
	}

	return
}
示例#8
0
// === count(vector VectorNode) ===
func countImpl(timestamp *time.Time, args []Node) interface{} {
	return model.SampleValue(len(args[0].(VectorNode).Eval(timestamp)))
}
示例#9
0
// === time() ===
func timeImpl(timestamp *time.Time, args []Node) interface{} {
	return model.SampleValue(time.Now().Unix())
}
示例#10
0
func (l *LevelDBMetricPersistence) GetSamplesForMetric(metric model.Metric, interval model.Interval) ([]model.Samples, error) {
	metricDTO := model.MetricToDTO(&metric)

	if fingerprintDTO, fingerprintDTOErr := model.MessageToFingerprintDTO(metricDTO); fingerprintDTOErr == nil {
		if iterator, closer, iteratorErr := l.metricSamples.GetIterator(); iteratorErr == nil {
			defer closer.Close()

			start := &dto.SampleKey{
				Fingerprint: fingerprintDTO,
				Timestamp:   indexable.EncodeTime(interval.OldestInclusive),
			}

			emission := make([]model.Samples, 0)

			if encode, encodeErr := coding.NewProtocolBufferEncoder(start).Encode(); encodeErr == nil {
				iterator.Seek(encode)

				predicate := keyIsAtMostOld(interval.NewestInclusive)

				for iterator = iterator; iterator.Valid(); iterator.Next() {
					key := &dto.SampleKey{}
					value := &dto.SampleValue{}
					if keyUnmarshalErr := proto.Unmarshal(iterator.Key(), key); keyUnmarshalErr == nil {
						if valueUnmarshalErr := proto.Unmarshal(iterator.Value(), value); valueUnmarshalErr == nil {
							if fingerprintsEqual(fingerprintDTO, key.Fingerprint) {
								// Wart
								if predicate(key) {
									emission = append(emission, model.Samples{
										Value:     model.SampleValue(*value.Value),
										Timestamp: indexable.DecodeTime(key.Timestamp),
									})
								} else {
									break
								}
							} else {
								break
							}
						} else {
							return nil, valueUnmarshalErr
						}
					} else {
						return nil, keyUnmarshalErr
					}
				}

				return emission, nil

			} else {
				log.Printf("Could not encode the start key: %q\n", encodeErr)
				return nil, encodeErr
			}
		} else {
			log.Printf("Could not acquire iterator: %q\n", iteratorErr)
			return nil, iteratorErr
		}
	} else {
		log.Printf("Could not create fingerprint for the metric: %q\n", fingerprintDTOErr)
		return nil, fingerprintDTOErr
	}

	panic("unreachable")
}
示例#11
0
func (l *LevelDBMetricPersistence) GetRangeValues(m *model.Metric, i *model.Interval, s *metric.StalenessPolicy) (v *model.SampleSet, err error) {
	begin := time.Now()

	defer func() {
		duration := time.Now().Sub(begin)

		recordOutcome(storageOperations, storageLatency, duration, err, map[string]string{operation: getRangeValues, result: success}, map[string]string{operation: getRangeValues, result: failure})
	}()

	d := model.MetricToDTO(m)

	f, err := model.MessageToFingerprintDTO(d)
	if err != nil {
		return
	}

	k := &dto.SampleKey{
		Fingerprint: f,
		Timestamp:   indexable.EncodeTime(i.OldestInclusive),
	}

	e, err := coding.NewProtocolBufferEncoder(k).Encode()
	if err != nil {
		return
	}

	iterator, closer, err := l.metricSamples.GetIterator()
	if err != nil {
		return
	}
	defer closer.Close()

	iterator.Seek(e)

	predicate := keyIsOlderThan(i.NewestInclusive)

	for ; iterator.Valid(); iterator.Next() {
		retrievedKey := &dto.SampleKey{}

		retrievedKey, err = extractSampleKey(iterator)
		if err != nil {
			return
		}

		if predicate(retrievedKey) {
			break
		}

		if !fingerprintsEqual(retrievedKey.Fingerprint, k.Fingerprint) {
			break
		}

		retrievedValue, err := extractSampleValue(iterator)
		if err != nil {
			return nil, err
		}

		if v == nil {
			v = &model.SampleSet{}
		}

		v.Values = append(v.Values, model.SamplePair{
			Value:     model.SampleValue(*retrievedValue.Value),
			Timestamp: indexable.DecodeTime(retrievedKey.Timestamp),
		})
	}

	return
}
示例#12
0
文件: ast.go 项目: grobie/prometheus
func evalVectorBinop(opType BinOpType,
	lhs model.SampleValue,
	rhs model.SampleValue) (model.SampleValue, bool) {
	switch opType {
	case ADD:
		return lhs + rhs, true
	case SUB:
		return lhs - rhs, true
	case MUL:
		return lhs * rhs, true
	case DIV:
		if rhs != 0 {
			return lhs / rhs, true
		} else {
			return model.SampleValue(math.Inf(int(rhs))), true
		}
	case MOD:
		if rhs != 0 {
			return model.SampleValue(int(lhs) % int(rhs)), true
		} else {
			return model.SampleValue(math.Inf(int(rhs))), true
		}
	case EQ:
		if lhs == rhs {
			return lhs, true
		} else {
			return 0, false
		}
	case NE:
		if lhs != rhs {
			return lhs, true
		} else {
			return 0, false
		}
	case GT:
		if lhs > rhs {
			return lhs, true
		} else {
			return 0, false
		}
	case LT:
		if lhs < rhs {
			return lhs, true
		} else {
			return 0, false
		}
	case GE:
		if lhs >= rhs {
			return lhs, true
		} else {
			return 0, false
		}
	case LE:
		if lhs <= rhs {
			return lhs, true
		} else {
			return 0, false
		}
	case AND:
		return lhs, true
	case OR:
		return lhs, true // TODO: implement OR
	}
	panic("Not all enum values enumerated in switch")
}
示例#13
0
文件: ast.go 项目: grobie/prometheus
func evalScalarBinop(opType BinOpType,
	lhs model.SampleValue,
	rhs model.SampleValue) model.SampleValue {
	switch opType {
	case ADD:
		return lhs + rhs
	case SUB:
		return lhs - rhs
	case MUL:
		return lhs * rhs
	case DIV:
		if rhs != 0 {
			return lhs / rhs
		} else {
			return model.SampleValue(math.Inf(int(rhs)))
		}
	case MOD:
		if rhs != 0 {
			return model.SampleValue(int(lhs) % int(rhs))
		} else {
			return model.SampleValue(math.Inf(int(rhs)))
		}
	case EQ:
		if lhs == rhs {
			return 1
		} else {
			return 0
		}
	case NE:
		if lhs != rhs {
			return 1
		} else {
			return 0
		}
	case GT:
		if lhs > rhs {
			return 1
		} else {
			return 0
		}
	case LT:
		if lhs < rhs {
			return 1
		} else {
			return 0
		}
	case GE:
		if lhs >= rhs {
			return 1
		} else {
			return 0
		}
	case LE:
		if lhs <= rhs {
			return 1
		} else {
			return 0
		}
	}
	panic("Not all enum values enumerated in switch")
}