Ejemplo n.º 1
0
func (t *Target) scrape(sampleAppender storage.SampleAppender) (err error) {
	start := time.Now()
	baseLabels := t.BaseLabels()

	defer func() {
		t.status.setLastError(err)
		recordScrapeHealth(sampleAppender, clientmodel.TimestampFromTime(start), baseLabels, t.status.Health(), time.Since(start))
	}()

	req, err := http.NewRequest("GET", t.URL(), nil)
	if err != nil {
		panic(err)
	}
	req.Header.Add("Accept", acceptHeader)

	resp, err := t.httpClient.Do(req)
	if err != nil {
		return err
	}
	defer resp.Body.Close()
	if resp.StatusCode != http.StatusOK {
		return fmt.Errorf("server returned HTTP status %s", resp.Status)
	}

	processor, err := extraction.ProcessorForRequestHeader(resp.Header)
	if err != nil {
		return err
	}

	t.ingestedSamples = make(chan clientmodel.Samples, ingestedSamplesCap)

	processOptions := &extraction.ProcessOptions{
		Timestamp: clientmodel.TimestampFromTime(start),
	}
	go func() {
		err = processor.ProcessSingle(resp.Body, t, processOptions)
		close(t.ingestedSamples)
	}()

	for samples := range t.ingestedSamples {
		for _, s := range samples {
			s.Metric.MergeFromLabelSet(baseLabels, clientmodel.ExporterLabelPrefix)
			// Avoid the copy in Relabel if there are no configs.
			if len(t.metricRelabelConfigs) > 0 {
				labels, err := Relabel(clientmodel.LabelSet(s.Metric), t.metricRelabelConfigs...)
				if err != nil {
					log.Errorf("error while relabeling metric %s of instance %s: ", s.Metric, t.url, err)
					continue
				}
				// Check if the timeseries was dropped.
				if labels == nil {
					continue
				}
				s.Metric = clientmodel.Metric(labels)
			}
			sampleAppender.Append(s)
		}
	}
	return err
}
Ejemplo n.º 2
0
// Regression test for https://github.com/prometheus/prometheus/issues/381.
//
// 1. Creates samples for two timeseries with one common labelpair.
// 2. Flushes memory storage such that only one series is dropped from memory.
// 3. Gets fingerprints for common labelpair.
// 4. Checks that exactly one fingerprint remains.
func TestDroppedSeriesIndexRegression(t *testing.T) {
	samples := clientmodel.Samples{
		&clientmodel.Sample{
			Metric: clientmodel.Metric{
				clientmodel.MetricNameLabel: "testmetric",
				"different":                 "differentvalue1",
				"common":                    "samevalue",
			},
			Value:     1,
			Timestamp: clientmodel.TimestampFromTime(time.Date(2000, 0, 0, 0, 0, 0, 0, time.UTC)),
		},
		&clientmodel.Sample{
			Metric: clientmodel.Metric{
				clientmodel.MetricNameLabel: "testmetric",
				"different":                 "differentvalue2",
				"common":                    "samevalue",
			},
			Value:     2,
			Timestamp: clientmodel.TimestampFromTime(time.Date(2002, 0, 0, 0, 0, 0, 0, time.UTC)),
		},
	}

	s := NewMemorySeriesStorage(MemorySeriesOptions{})
	s.AppendSamples(samples)

	common := clientmodel.LabelSet{"common": "samevalue"}
	fps, err := s.GetFingerprintsForLabelMatchers(labelMatchersFromLabelSet(common))
	if err != nil {
		t.Fatal(err)
	}
	if len(fps) != 2 {
		t.Fatalf("Got %d fingerprints, expected 2", len(fps))
	}

	toDisk := make(chan clientmodel.Samples, 2)
	s.Flush(clientmodel.TimestampFromTime(time.Date(2001, 0, 0, 0, 0, 0, 0, time.UTC)), toDisk)
	if len(toDisk) != 1 {
		t.Fatalf("Got %d disk sample lists, expected 1", len(toDisk))
	}
	diskSamples := <-toDisk
	if len(diskSamples) != 1 {
		t.Fatalf("Got %d disk samples, expected 1", len(diskSamples))
	}

	fps, err = s.GetFingerprintsForLabelMatchers(labelMatchersFromLabelSet(common))
	if err != nil {
		t.Fatal(err)
	}
	if len(fps) != 1 {
		t.Fatalf("Got %d fingerprints, expected 1", len(fps))
	}
}
Ejemplo n.º 3
0
func (s *memorySeriesStorage) loop() {
	checkpointTimer := time.NewTimer(s.checkpointInterval)

	// We take the number of head chunks persisted since the last checkpoint
	// as an approximation for the number of series that are "dirty",
	// i.e. whose head chunk is different from the one in the most recent
	// checkpoint or for which the fact that the head chunk has been
	// persisted is not reflected in the most recent checkpoint. This count
	// could overestimate the number of dirty series, but it's good enough
	// as a heuristic.
	headChunksPersistedSinceLastCheckpoint := 0

	defer func() {
		checkpointTimer.Stop()
		glog.Info("Maintenance loop stopped.")
		close(s.loopStopped)
	}()

	memoryFingerprints := s.cycleThroughMemoryFingerprints()
	archivedFingerprints := s.cycleThroughArchivedFingerprints()

loop:
	for {
		select {
		case <-s.loopStopping:
			break loop
		case <-checkpointTimer.C:
			s.persistence.checkpointSeriesMapAndHeads(s.fpToSeries, s.fpLocker)
			headChunksPersistedSinceLastCheckpoint = 0
			checkpointTimer.Reset(s.checkpointInterval)
		case fp := <-memoryFingerprints:
			s.purgeSeries(fp, clientmodel.TimestampFromTime(time.Now()).Add(-1*s.purgeAfter))
			s.seriesOps.WithLabelValues(memoryMaintenance).Inc()
		case fp := <-archivedFingerprints:
			s.purgeSeries(fp, clientmodel.TimestampFromTime(time.Now()).Add(-1*s.purgeAfter))
			s.seriesOps.WithLabelValues(archiveMaintenance).Inc()
		case <-s.countPersistedHeadChunks:
			headChunksPersistedSinceLastCheckpoint++
			if headChunksPersistedSinceLastCheckpoint >= s.checkpointDirtySeriesLimit {
				checkpointTimer.Reset(0)
			}
		}
	}
	// Wait until both channels are closed.
	for range memoryFingerprints {
	}
	for range archivedFingerprints {
	}
}
Ejemplo n.º 4
0
func (serv MetricsService) Query(w http.ResponseWriter, r *http.Request) {
	setAccessControlHeaders(w)

	params := http_utils.GetQueryParams(r)
	expr := params.Get("expr")
	asText := params.Get("asText")

	var format ast.OutputFormat
	// BUG(julius): Use Content-Type negotiation.
	if asText == "" {
		format = ast.JSON
		w.Header().Set("Content-Type", "application/json")
	} else {
		format = ast.TEXT
		w.Header().Set("Content-Type", "text/plain")
	}

	exprNode, err := rules.LoadExprFromString(expr)
	if err != nil {
		fmt.Fprint(w, ast.ErrorToJSON(err))
		return
	}

	timestamp := clientmodel.TimestampFromTime(serv.time.Now())

	queryStats := stats.NewTimerGroup()
	result := ast.EvalToString(exprNode, timestamp, format, serv.Storage, queryStats)
	glog.Infof("Instant query: %s\nQuery stats:\n%s\n", expr, queryStats)
	fmt.Fprint(w, result)
}
Ejemplo n.º 5
0
func verifyStorage(t testing.TB, s Storage, samples clientmodel.Samples, maxAge time.Duration) bool {
	result := true
	for _, i := range rand.Perm(len(samples)) {
		sample := samples[i]
		if sample.Timestamp.Before(clientmodel.TimestampFromTime(time.Now().Add(-maxAge))) {
			continue
			// TODO: Once we have a guaranteed cutoff at the
			// retention period, we can verify here that no results
			// are returned.
		}
		fp := sample.Metric.Fingerprint()
		p := s.NewPreloader()
		p.PreloadRange(fp, sample.Timestamp, sample.Timestamp, time.Hour)
		found := s.NewIterator(fp).GetValueAtTime(sample.Timestamp)
		if len(found) != 1 {
			t.Errorf("Sample %#v: Expected exactly one value, found %d.", sample, len(found))
			result = false
			p.Close()
			continue
		}
		want := float64(sample.Value)
		got := float64(found[0].Value)
		if want != got || sample.Timestamp != found[0].Timestamp {
			t.Errorf(
				"Value (or timestamp) mismatch, want %f (at time %v), got %f (at time %v).",
				want, sample.Timestamp, got, found[0].Timestamp,
			)
			result = false
		}
		p.Close()
	}
	return result
}
Ejemplo n.º 6
0
// cycleThroughArchivedFingerprints returns a channel that emits fingerprints
// for archived series in a throttled fashion. It continues to cycle through all
// archived fingerprints until s.loopStopping is closed.
func (s *memorySeriesStorage) cycleThroughArchivedFingerprints() chan clientmodel.Fingerprint {
	archivedFingerprints := make(chan clientmodel.Fingerprint)
	go func() {
		defer close(archivedFingerprints)

		for {
			archivedFPs, err := s.persistence.getFingerprintsModifiedBefore(
				clientmodel.TimestampFromTime(time.Now()).Add(-1 * s.purgeAfter),
			)
			if err != nil {
				glog.Error("Failed to lookup archived fingerprint ranges: ", err)
				s.waitForNextFP(0)
				continue
			}
			// Initial wait, also important if there are no FPs yet.
			if !s.waitForNextFP(len(archivedFPs)) {
				return
			}
			begin := time.Now()
			for _, fp := range archivedFPs {
				select {
				case archivedFingerprints <- fp:
				case <-s.loopStopping:
					return
				}
				s.waitForNextFP(len(archivedFPs))
			}
			glog.Infof("Completed maintenance sweep through archived fingerprints in %v.", time.Since(begin))
		}
	}()
	return archivedFingerprints
}
Ejemplo n.º 7
0
func BenchmarkGetFingerprintsForNotEqualMatcher1000(b *testing.B) {
	numSeries := 1000
	samples := make(clientmodel.Samples, 0, numSeries)
	for i := 0; i < numSeries; i++ {
		samples = append(samples, &clientmodel.Sample{
			Metric: clientmodel.Metric{
				clientmodel.MetricNameLabel: "testmetric",
				"instance":                  clientmodel.LabelValue(fmt.Sprint("instance_", i)),
			},
			Value:     1,
			Timestamp: clientmodel.TimestampFromTime(time.Date(2000, 0, 0, 0, 0, 0, 0, time.UTC)),
		})
	}

	s := NewMemorySeriesStorage(MemorySeriesOptions{})
	if err := s.AppendSamples(samples); err != nil {
		b.Fatal(err)
	}

	m, err := metric.NewLabelMatcher(metric.NotEqual, "instance", "foo")
	if err != nil {
		b.Fatal(err)
	}

	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		s.GetFingerprintsForLabelMatchers(metric.LabelMatchers{m})
	}
}
Ejemplo n.º 8
0
func (g *Prometheus) gatherURL(url string, acc plugins.Accumulator) error {
	resp, err := http.Get(url)
	if err != nil {
		return fmt.Errorf("error making HTTP request to %s: %s", url, err)
	}
	defer resp.Body.Close()
	if resp.StatusCode != http.StatusOK {
		return fmt.Errorf("%s returned HTTP status %s", url, resp.Status)
	}
	processor, err := extraction.ProcessorForRequestHeader(resp.Header)
	if err != nil {
		return fmt.Errorf("error getting extractor for %s: %s", url, err)
	}

	ingestor := &Ingester{
		acc: acc,
	}

	options := &extraction.ProcessOptions{
		Timestamp: model.TimestampFromTime(time.Now()),
	}

	err = processor.ProcessSingle(resp.Body, ingestor, options)
	if err != nil {
		return fmt.Errorf("error getting processing samples for %s: %s", url, err)
	}
	return nil
}
Ejemplo n.º 9
0
func parseTime(s string) (clientmodel.Timestamp, error) {
	if t, err := strconv.ParseFloat(s, 64); err == nil {
		ts := int64(t * float64(time.Second))
		return clientmodel.TimestampFromUnixNano(ts), nil
	}
	if t, err := time.Parse(time.RFC3339Nano, s); err == nil {
		return clientmodel.TimestampFromTime(t), nil
	}
	return 0, fmt.Errorf("cannot parse %q to a valid timestamp", s)
}
Ejemplo n.º 10
0
func TestParseTime(t *testing.T) {
	ts, err := time.Parse(time.RFC3339Nano, "2015-06-03T13:21:58.555Z")
	if err != nil {
		panic(err)
	}

	var tests = []struct {
		input  string
		fail   bool
		result time.Time
	}{
		{
			input: "",
			fail:  true,
		}, {
			input: "abc",
			fail:  true,
		}, {
			input: "30s",
			fail:  true,
		}, {
			input:  "123",
			result: time.Unix(123, 0),
		}, {
			input:  "123.123",
			result: time.Unix(123, 123000000),
		}, {
			input:  "123.123",
			result: time.Unix(123, 123000000),
		}, {
			input:  "2015-06-03T13:21:58.555Z",
			result: ts,
		}, {
			input:  "2015-06-03T14:21:58.555+01:00",
			result: ts,
		},
	}

	for _, test := range tests {
		ts, err := parseTime(test.input)
		if err != nil && !test.fail {
			t.Errorf("Unexpected error for %q: %s", test.input, err)
			continue
		}
		if err == nil && test.fail {
			t.Errorf("Expected error for %q but got none", test.input)
			continue
		}
		res := clientmodel.TimestampFromTime(test.result)
		if !test.fail && ts != res {
			t.Errorf("Expected time %v for input %q but got %v", res, test.input, ts)
		}
	}
}
Ejemplo n.º 11
0
func BenchmarkStreamAdd(b *testing.B) {
	b.StopTimer()
	s := newArrayStream(clientmodel.Metric{})
	samples := make(metric.Values, b.N)
	for i := 0; i < b.N; i++ {
		samples = append(samples, metric.SamplePair{
			Timestamp: clientmodel.TimestampFromTime(time.Date(i, 0, 0, 0, 0, 0, 0, time.UTC)),
			Value:     clientmodel.SampleValue(i),
		})
	}

	b.StartTimer()
	s.add(samples)
}
Ejemplo n.º 12
0
func benchmarkAppendSamples(b *testing.B, labels int) {
	b.StopTimer()
	s := NewMemorySeriesStorage(MemorySeriesOptions{})

	metric := clientmodel.Metric{}

	for i := 0; i < labels; i++ {
		metric[clientmodel.LabelName(fmt.Sprintf("label_%d", i))] = clientmodel.LabelValue(fmt.Sprintf("value_%d", i))
	}
	samples := make(clientmodel.Samples, 0, b.N)
	for i := 0; i < b.N; i++ {
		samples = append(samples, &clientmodel.Sample{
			Metric:    metric,
			Value:     clientmodel.SampleValue(i),
			Timestamp: clientmodel.TimestampFromTime(time.Date(i, 0, 0, 0, 0, 0, 0, time.UTC)),
		})
	}

	b.StartTimer()
	for i := 0; i < b.N; i++ {
		s.AppendSample(samples[i])
	}
}
Ejemplo n.º 13
0
func (t *Target) scrape(sampleAppender storage.SampleAppender) (err error) {
	start := time.Now()
	baseLabels := t.BaseLabels()

	defer func() {
		t.status.setLastError(err)
		recordScrapeHealth(sampleAppender, clientmodel.TimestampFromTime(start), baseLabels, t.status.Health(), time.Since(start))
	}()

	req, err := http.NewRequest("GET", t.URL().String(), nil)
	if err != nil {
		panic(err)
	}
	req.Header.Add("Accept", acceptHeader)

	resp, err := t.httpClient.Do(req)
	if err != nil {
		return err
	}
	defer resp.Body.Close()
	if resp.StatusCode != http.StatusOK {
		return fmt.Errorf("server returned HTTP status %s", resp.Status)
	}

	processor, err := extraction.ProcessorForRequestHeader(resp.Header)
	if err != nil {
		return err
	}

	t.ingestedSamples = make(chan clientmodel.Samples, ingestedSamplesCap)

	processOptions := &extraction.ProcessOptions{
		Timestamp: clientmodel.TimestampFromTime(start),
	}
	go func() {
		err = processor.ProcessSingle(resp.Body, t, processOptions)
		close(t.ingestedSamples)
	}()

	for samples := range t.ingestedSamples {
		for _, s := range samples {
			if t.honorLabels {
				// Merge the metric with the baseLabels for labels not already set in the
				// metric. This also considers labels explicitly set to the empty string.
				for ln, lv := range baseLabels {
					if _, ok := s.Metric[ln]; !ok {
						s.Metric[ln] = lv
					}
				}
			} else {
				// Merge the ingested metric with the base label set. On a collision the
				// value of the label is stored in a label prefixed with the exported prefix.
				for ln, lv := range baseLabels {
					if v, ok := s.Metric[ln]; ok && v != "" {
						s.Metric[clientmodel.ExportedLabelPrefix+ln] = v
					}
					s.Metric[ln] = lv
				}
			}
			// Avoid the copy in Relabel if there are no configs.
			if len(t.metricRelabelConfigs) > 0 {
				labels, err := Relabel(clientmodel.LabelSet(s.Metric), t.metricRelabelConfigs...)
				if err != nil {
					log.Errorf("Error while relabeling metric %s of instance %s: %s", s.Metric, t.url, err)
					continue
				}
				// Check if the timeseries was dropped.
				if labels == nil {
					continue
				}
				s.Metric = clientmodel.Metric(labels)
			}
			sampleAppender.Append(s)
		}
	}
	return err
}
Ejemplo n.º 14
0
	clientmodel "github.com/prometheus/client_golang/model"

	"github.com/prometheus/prometheus/promql"
	"github.com/prometheus/prometheus/storage/local"
	"github.com/prometheus/prometheus/util/route"
)

// This is a bit annoying. On one hand, we have to choose a current timestamp
// because the storage doesn't have a mocked-out time yet and would otherwise
// immediately throw away "old" samples. On the other hand, we have to make
// sure that the float value survives the parsing and re-formatting in the
// query layer precisely without any change. Thus we round to seconds and then
// add known-good digits after the decimal point which behave well in
// parsing/re-formatting.
var testTimestamp = clientmodel.TimestampFromTime(time.Now().Round(time.Second)).Add(124 * time.Millisecond)

func testNow() clientmodel.Timestamp {
	return testTimestamp
}

func TestQuery(t *testing.T) {
	scenarios := []struct {
		// URL query string.
		queryStr string
		// Expected HTTP response status code.
		status int
		// Regex to match against response body.
		bodyRe string
	}{
		{
Ejemplo n.º 15
0
func GetRangeValuesTests(persistenceMaker func() (metric.ViewablePersistence, test.Closer), onlyBoundaries bool, t test.Tester) {
	type value struct {
		year  int
		month time.Month
		day   int
		hour  int
		value clientmodel.SampleValue
	}

	type input struct {
		openYear  int
		openMonth time.Month
		openDay   int
		openHour  int
		endYear   int
		endMonth  time.Month
		endDay    int
		endHour   int
	}

	type output struct {
		year  int
		month time.Month
		day   int
		hour  int
		value clientmodel.SampleValue
	}

	type behavior struct {
		name   string
		input  input
		output []output
	}

	var contexts = []struct {
		name      string
		values    []value
		behaviors []behavior
	}{
		{
			name:   "no values",
			values: []value{},
			behaviors: []behavior{
				{
					name: "non-existent interval",
					input: input{
						openYear:  1984,
						openMonth: 3,
						openDay:   30,
						openHour:  0,
						endYear:   1985,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
				},
			},
		},
		{
			name: "singleton value",
			values: []value{
				{
					year:  1984,
					month: 3,
					day:   30,
					hour:  0,
					value: 0,
				},
			},
			behaviors: []behavior{
				{
					name: "start on first value",
					input: input{
						openYear:  1984,
						openMonth: 3,
						openDay:   30,
						openHour:  0,
						endYear:   1985,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
					output: []output{
						{
							year:  1984,
							month: 3,
							day:   30,
							hour:  0,
							value: 0,
						},
					},
				},
				{
					name: "end on first value",
					input: input{
						openYear:  1983,
						openMonth: 3,
						openDay:   30,
						openHour:  0,
						endYear:   1984,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
					output: []output{
						{
							year:  1984,
							month: 3,
							day:   30,
							hour:  0,
							value: 0,
						},
					},
				},
				{
					name: "overlap on first value",
					input: input{
						openYear:  1983,
						openMonth: 3,
						openDay:   30,
						openHour:  0,
						endYear:   1985,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
					output: []output{
						{
							year:  1984,
							month: 3,
							day:   30,
							hour:  0,
							value: 0,
						},
					},
				},
			},
		},
		{
			name: "two values",
			values: []value{
				{
					year:  1984,
					month: 3,
					day:   30,
					hour:  0,
					value: 0,
				},
				{
					year:  1985,
					month: 3,
					day:   30,
					hour:  0,
					value: 1,
				},
			},
			behaviors: []behavior{
				{
					name: "start on first value",
					input: input{
						openYear:  1984,
						openMonth: 3,
						openDay:   30,
						openHour:  0,
						endYear:   1985,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
					output: []output{
						{
							year:  1984,
							month: 3,
							day:   30,
							hour:  0,
							value: 0,
						},
						{
							year:  1985,
							month: 3,
							day:   30,
							hour:  0,
							value: 1,
						},
					},
				},
				{
					name: "start on second value",
					input: input{
						openYear:  1985,
						openMonth: 3,
						openDay:   30,
						openHour:  0,
						endYear:   1986,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
					output: []output{
						{
							year:  1985,
							month: 3,
							day:   30,
							hour:  0,
							value: 1,
						},
					},
				},
				{
					name: "end on first value",
					input: input{
						openYear:  1983,
						openMonth: 3,
						openDay:   30,
						openHour:  0,
						endYear:   1984,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
					output: []output{
						{
							year:  1984,
							month: 3,
							day:   30,
							hour:  0,
							value: 0,
						},
					},
				},
				{
					name: "end on second value",
					input: input{
						openYear:  1985,
						openMonth: 1,
						openDay:   1,
						openHour:  0,
						endYear:   1985,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
					output: []output{
						{
							year:  1985,
							month: 3,
							day:   30,
							hour:  0,
							value: 1,
						},
					},
				},
				{
					name: "overlap on values",
					input: input{
						openYear:  1983,
						openMonth: 3,
						openDay:   30,
						openHour:  0,
						endYear:   1986,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
					output: []output{
						{
							year:  1984,
							month: 3,
							day:   30,
							hour:  0,
							value: 0,
						},
						{
							year:  1985,
							month: 3,
							day:   30,
							hour:  0,
							value: 1,
						},
					},
				},
			},
		},
		{
			name: "three values",
			values: []value{
				{
					year:  1984,
					month: 3,
					day:   30,
					hour:  0,
					value: 0,
				},
				{
					year:  1985,
					month: 3,
					day:   30,
					hour:  0,
					value: 1,
				},
				{
					year:  1986,
					month: 3,
					day:   30,
					hour:  0,
					value: 2,
				},
			},
			behaviors: []behavior{
				{
					name: "start on first value",
					input: input{
						openYear:  1984,
						openMonth: 3,
						openDay:   30,
						openHour:  0,
						endYear:   1985,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
					output: []output{
						{
							year:  1984,
							month: 3,
							day:   30,
							hour:  0,
							value: 0,
						},
						{
							year:  1985,
							month: 3,
							day:   30,
							hour:  0,
							value: 1,
						},
					},
				},
				{
					name: "start on second value",
					input: input{
						openYear:  1985,
						openMonth: 3,
						openDay:   30,
						openHour:  0,
						endYear:   1986,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
					output: []output{
						{
							year:  1985,
							month: 3,
							day:   30,
							hour:  0,
							value: 1,
						},
						{
							year:  1986,
							month: 3,
							day:   30,
							hour:  0,
							value: 2,
						},
					},
				},
				{
					name: "end on first value",
					input: input{
						openYear:  1983,
						openMonth: 3,
						openDay:   30,
						openHour:  0,
						endYear:   1984,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
					output: []output{
						{
							year:  1984,
							month: 3,
							day:   30,
							hour:  0,
							value: 0,
						},
					},
				},
				{
					name: "end on second value",
					input: input{
						openYear:  1985,
						openMonth: 1,
						openDay:   1,
						openHour:  0,
						endYear:   1985,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
					output: []output{
						{
							year:  1985,
							month: 3,
							day:   30,
							hour:  0,
							value: 1,
						},
					},
				},
				{
					name: "overlap on values",
					input: input{
						openYear:  1983,
						openMonth: 3,
						openDay:   30,
						openHour:  0,
						endYear:   1986,
						endMonth:  3,
						endDay:    30,
						endHour:   0,
					},
					output: []output{
						{
							year:  1984,
							month: 3,
							day:   30,
							hour:  0,
							value: 0,
						},
						{
							year:  1985,
							month: 3,
							day:   30,
							hour:  0,
							value: 1,
						},
						{
							year:  1986,
							month: 3,
							day:   30,
							hour:  0,
							value: 2,
						},
					},
				},
			},
		},
	}

	for i, context := range contexts {
		// Wrapping in function to enable garbage collection of resources.
		func() {
			p, closer := persistenceMaker()

			defer closer.Close()
			defer p.Close()

			m := clientmodel.Metric{
				clientmodel.MetricNameLabel: "age_in_years",
			}

			for _, value := range context.values {
				testAppendSamples(p, &clientmodel.Sample{
					Value:     clientmodel.SampleValue(value.value),
					Timestamp: clientmodel.TimestampFromTime(time.Date(value.year, value.month, value.day, value.hour, 0, 0, 0, time.UTC)),
					Metric:    m,
				}, t)
			}

			for j, behavior := range context.behaviors {
				input := behavior.input
				open := clientmodel.TimestampFromTime(time.Date(input.openYear, input.openMonth, input.openDay, input.openHour, 0, 0, 0, time.UTC))
				end := clientmodel.TimestampFromTime(time.Date(input.endYear, input.endMonth, input.endDay, input.endHour, 0, 0, 0, time.UTC))
				in := metric.Interval{
					OldestInclusive: open,
					NewestInclusive: end,
				}

				actualValues := metric.Values{}
				expectedValues := []output{}
				fp := &clientmodel.Fingerprint{}
				fp.LoadFromMetric(m)
				if onlyBoundaries {
					actualValues = p.GetBoundaryValues(fp, in)
					l := len(behavior.output)
					if l == 1 {
						expectedValues = behavior.output[0:1]
					}
					if l > 1 {
						expectedValues = append(behavior.output[0:1], behavior.output[l-1])
					}
				} else {
					actualValues = p.GetRangeValues(fp, in)
					expectedValues = behavior.output
				}

				if actualValues == nil && len(expectedValues) != 0 {
					t.Fatalf("%d.%d(%s). Expected %v but got: %v\n", i, j, behavior.name, expectedValues, actualValues)
				}

				if expectedValues == nil {
					if actualValues != nil {
						t.Fatalf("%d.%d(%s). Expected nil values but got: %s\n", i, j, behavior.name, actualValues)
					}
				} else {
					if len(expectedValues) != len(actualValues) {
						t.Fatalf("%d.%d(%s). Expected length %d but got: %d\n", i, j, behavior.name, len(expectedValues), len(actualValues))
					}

					for k, actual := range actualValues {
						expected := expectedValues[k]

						if actual.Value != clientmodel.SampleValue(expected.value) {
							t.Fatalf("%d.%d.%d(%s). Expected %v but got: %v\n", i, j, k, behavior.name, expected.value, actual.Value)
						}

						if actual.Timestamp.Time().Year() != expected.year {
							t.Fatalf("%d.%d.%d(%s). Expected %d but got: %d\n", i, j, k, behavior.name, expected.year, actual.Timestamp.Time().Year())
						}
						if actual.Timestamp.Time().Month() != expected.month {
							t.Fatalf("%d.%d.%d(%s). Expected %d but got: %d\n", i, j, k, behavior.name, expected.month, actual.Timestamp.Time().Month())
						}
						// XXX: Find problem here.
						// Mismatches occur in this and have for a long time in the LevelDB
						// case, however not im-memory.
						//
						// if actual.Timestamp.Day() != expected.day {
						// 	t.Fatalf("%d.%d.%d(%s). Expected %d but got: %d\n", i, j, k, behavior.name, expected.day, actual.Timestamp.Day())
						// }
						// if actual.Timestamp.Hour() != expected.hour {
						// 	t.Fatalf("%d.%d.%d(%s). Expected %d but got: %d\n", i, j, k, behavior.name, expected.hour, actual.Timestamp.Hour())
						// }
					}
				}
			}
		}()
	}
}
Ejemplo n.º 16
0
func GetValueAtTimeTests(persistenceMaker func() (metric.ViewablePersistence, test.Closer), t test.Tester) {
	type value struct {
		year  int
		month time.Month
		day   int
		hour  int
		value clientmodel.SampleValue
	}

	type input struct {
		year  int
		month time.Month
		day   int
		hour  int
	}

	type output []clientmodel.SampleValue

	type behavior struct {
		name   string
		input  input
		output output
	}

	var contexts = []struct {
		name      string
		values    []value
		behaviors []behavior
	}{
		{
			name:   "no values",
			values: []value{},
			behaviors: []behavior{
				{
					name: "random target",
					input: input{
						year:  1984,
						month: 3,
						day:   30,
						hour:  0,
					},
				},
			},
		},
		{
			name: "singleton",
			values: []value{
				{
					year:  1984,
					month: 3,
					day:   30,
					hour:  0,
					value: 0,
				},
			},
			behaviors: []behavior{
				{
					name: "exact",
					input: input{
						year:  1984,
						month: 3,
						day:   30,
						hour:  0,
					},
					output: output{
						0,
					},
				},
				{
					name: "before",
					input: input{
						year:  1984,
						month: 3,
						day:   29,
						hour:  0,
					},
					output: output{
						0,
					},
				},
				{
					name: "after",
					input: input{
						year:  1984,
						month: 3,
						day:   31,
						hour:  0,
					},
					output: output{
						0,
					},
				},
			},
		},
		{
			name: "double",
			values: []value{
				{
					year:  1984,
					month: 3,
					day:   30,
					hour:  0,
					value: 0,
				},
				{
					year:  1985,
					month: 3,
					day:   30,
					hour:  0,
					value: 1,
				},
			},
			behaviors: []behavior{
				{
					name: "exact first",
					input: input{
						year:  1984,
						month: 3,
						day:   30,
						hour:  0,
					},
					output: output{
						0,
					},
				},
				{
					name: "exact second",
					input: input{
						year:  1985,
						month: 3,
						day:   30,
						hour:  0,
					},
					output: output{
						1,
					},
				},
				{
					name: "before first",
					input: input{
						year:  1983,
						month: 9,
						day:   29,
						hour:  12,
					},
					output: output{
						0,
					},
				},
				{
					name: "after second",
					input: input{
						year:  1985,
						month: 9,
						day:   28,
						hour:  12,
					},
					output: output{
						1,
					},
				},
				{
					name: "middle",
					input: input{
						year:  1984,
						month: 9,
						day:   28,
						hour:  12,
					},
					output: output{
						0,
						1,
					},
				},
			},
		},
		{
			name: "triple",
			values: []value{
				{
					year:  1984,
					month: 3,
					day:   30,
					hour:  0,
					value: 0,
				},
				{
					year:  1985,
					month: 3,
					day:   30,
					hour:  0,
					value: 1,
				},
				{
					year:  1986,
					month: 3,
					day:   30,
					hour:  0,
					value: 2,
				},
			},
			behaviors: []behavior{
				{
					name: "exact first",
					input: input{
						year:  1984,
						month: 3,
						day:   30,
						hour:  0,
					},
					output: output{
						0,
					},
				},
				{
					name: "exact second",
					input: input{
						year:  1985,
						month: 3,
						day:   30,
						hour:  0,
					},
					output: output{
						1,
					},
				},
				{
					name: "exact third",
					input: input{
						year:  1986,
						month: 3,
						day:   30,
						hour:  0,
					},
					output: output{
						2,
					},
				},
				{
					name: "before first",
					input: input{
						year:  1983,
						month: 9,
						day:   29,
						hour:  12,
					},
					output: output{
						0,
					},
				},
				{
					name: "after third",
					input: input{
						year:  1986,
						month: 9,
						day:   28,
						hour:  12,
					},
					output: output{
						2,
					},
				},
				{
					name: "first middle",
					input: input{
						year:  1984,
						month: 9,
						day:   28,
						hour:  12,
					},
					output: output{
						0,
						1,
					},
				},
				{
					name: "second middle",
					input: input{
						year:  1985,
						month: 9,
						day:   28,
						hour:  12,
					},
					output: output{
						1,
						2,
					},
				},
			},
		},
	}

	for i, context := range contexts {
		// Wrapping in function to enable garbage collection of resources.
		func() {
			p, closer := persistenceMaker()

			defer closer.Close()
			defer p.Close()

			m := clientmodel.Metric{
				clientmodel.MetricNameLabel: "age_in_years",
			}

			for _, value := range context.values {
				testAppendSamples(p, &clientmodel.Sample{
					Value:     clientmodel.SampleValue(value.value),
					Timestamp: clientmodel.TimestampFromTime(time.Date(value.year, value.month, value.day, value.hour, 0, 0, 0, time.UTC)),
					Metric:    m,
				}, t)
			}

			for j, behavior := range context.behaviors {
				input := behavior.input
				time := clientmodel.TimestampFromTime(time.Date(input.year, input.month, input.day, input.hour, 0, 0, 0, time.UTC))
				fingerprint := &clientmodel.Fingerprint{}
				fingerprint.LoadFromMetric(m)
				actual := p.GetValueAtTime(fingerprint, time)

				if len(behavior.output) != len(actual) {
					t.Fatalf("%d.%d(%s.%s). Expected %d samples but got: %v\n", i, j, context.name, behavior.name, len(behavior.output), actual)
				}
				for k, samplePair := range actual {
					if samplePair.Value != behavior.output[k] {
						t.Fatalf("%d.%d.%d(%s.%s). Expected %s but got %s\n", i, j, k, context.name, behavior.name, behavior.output[k], samplePair)

					}
				}
			}
		}()
	}
}
Ejemplo n.º 17
0
func testMakeView(t test.Tester, flushToDisk bool) {
	type in struct {
		atTime     []getValuesAtTimeOp
		atInterval []getValuesAtIntervalOp
		alongRange []getValuesAlongRangeOp
	}

	type out struct {
		atTime     []metric.Values
		atInterval []metric.Values
		alongRange []metric.Values
	}
	m := clientmodel.Metric{clientmodel.MetricNameLabel: "request_count"}
	fingerprint := &clientmodel.Fingerprint{}
	fingerprint.LoadFromMetric(m)
	var (
		instant   = clientmodel.TimestampFromTime(time.Date(1984, 3, 30, 0, 0, 0, 0, time.Local))
		scenarios = []struct {
			data     clientmodel.Samples
			in       in
			out      out
			diskOnly bool
		}{
			// No sample, but query asks for one.
			{
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant},
						},
					},
				},
				out: out{
					atTime: []metric.Values{{}},
				},
			},
			// Single sample, query asks for exact sample time.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant,
								Value:     0,
							},
						},
					},
				},
			},
			// Single sample, query time before the sample.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant.Add(time.Second),
					},
					{
						Metric:    m,
						Value:     1,
						Timestamp: instant.Add(time.Second * 2),
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second),
								Value:     0,
							},
						},
					},
				},
			},
			// Single sample, query time after the sample.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant.Add(time.Second)},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant,
								Value:     0,
							},
						},
					},
				},
			},
			// Two samples, query asks for first sample time.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
					{
						Metric:    m,
						Value:     1,
						Timestamp: instant.Add(time.Second),
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant,
								Value:     0,
							},
						},
					},
				},
			},
			// Three samples, query asks for second sample time.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
					{
						Metric:    m,
						Value:     1,
						Timestamp: instant.Add(time.Second),
					},
					{
						Metric:    m,
						Value:     2,
						Timestamp: instant.Add(time.Second * 2),
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant.Add(time.Second)},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second),
								Value:     1,
							},
						},
					},
				},
			},
			// Three samples, query asks for time between first and second samples.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
					{
						Metric:    m,
						Value:     1,
						Timestamp: instant.Add(time.Second * 2),
					},
					{
						Metric:    m,
						Value:     2,
						Timestamp: instant.Add(time.Second * 4),
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant.Add(time.Second)},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant,
								Value:     0,
							},
							{
								Timestamp: instant.Add(time.Second * 2),
								Value:     1,
							},
						},
					},
				},
			},
			// Three samples, query asks for time between second and third samples.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
					{
						Metric:    m,
						Value:     1,
						Timestamp: instant.Add(time.Second * 2),
					},
					{
						Metric:    m,
						Value:     2,
						Timestamp: instant.Add(time.Second * 4),
					},
				},
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant.Add(time.Second * 3)},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second * 2),
								Value:     1,
							},
							{
								Timestamp: instant.Add(time.Second * 4),
								Value:     2,
							},
						},
					},
				},
			},
			// Two chunks of samples, query asks for values from second chunk.
			{
				data: buildSamples(
					instant,
					instant.Add(time.Duration(*leveldbChunkSize*4)*time.Second),
					2*time.Second,
					m,
				),
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant.Add(time.Second*time.Duration(*leveldbChunkSize*2) + clientmodel.MinimumTick)},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2)),
								Value:     200,
							},
							{
								Timestamp: instant.Add(time.Second * (time.Duration(*leveldbChunkSize*2) + 2)),
								Value:     201,
							},
						},
					},
				},
			},
			// Two chunks of samples, query asks for values between both chunks.
			{
				data: buildSamples(
					instant,
					instant.Add(time.Duration(*leveldbChunkSize*4)*time.Second),
					2*time.Second,
					m,
				),
				in: in{
					atTime: []getValuesAtTimeOp{
						{
							baseOp: baseOp{current: instant.Add(time.Second*time.Duration(*leveldbChunkSize*2) - clientmodel.MinimumTick)},
						},
					},
				},
				out: out{
					atTime: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second * (time.Duration(*leveldbChunkSize*2) - 2)),
								Value:     199,
							},
							{
								Timestamp: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2)),
								Value:     200,
							},
						},
					},
				},
			},
			// Two chunks of samples, getValuesAtIntervalOp spanning both.
			{
				data: buildSamples(
					instant,
					instant.Add(time.Duration(*leveldbChunkSize*6)*time.Second),
					2*time.Second,
					m,
				),
				in: in{
					atInterval: []getValuesAtIntervalOp{
						{
							getValuesAlongRangeOp: getValuesAlongRangeOp{
								baseOp:  baseOp{current: instant.Add(time.Second*time.Duration(*leveldbChunkSize*2-4) - clientmodel.MinimumTick)},
								through: instant.Add(time.Second*time.Duration(*leveldbChunkSize*2+4) + clientmodel.MinimumTick),
							},
							interval: time.Second * 6,
						},
					},
				},
				out: out{
					atInterval: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2-6)),
								Value:     197,
							},
							{
								Timestamp: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2-4)),
								Value:     198,
							},
							{
								Timestamp: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2)),
								Value:     200,
							},
							{
								Timestamp: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2+2)),
								Value:     201,
							},
						},
					},
				},
			},
			// Three chunks of samples, getValuesAlongRangeOp spanning all of them.
			{
				data: buildSamples(
					instant,
					instant.Add(time.Duration(*leveldbChunkSize*6)*time.Second),
					2*time.Second,
					m,
				),
				in: in{
					alongRange: []getValuesAlongRangeOp{
						{
							baseOp:  baseOp{current: instant.Add(time.Second*time.Duration(*leveldbChunkSize*2-4) - clientmodel.MinimumTick)},
							through: instant.Add(time.Second*time.Duration(*leveldbChunkSize*4+2) + clientmodel.MinimumTick),
						},
					},
				},
				out: out{
					alongRange: []metric.Values{buildValues(
						clientmodel.SampleValue(198),
						instant.Add(time.Second*time.Duration(*leveldbChunkSize*2-4)),
						instant.Add(time.Second*time.Duration(*leveldbChunkSize*4+2)+clientmodel.MinimumTick),
						2*time.Second,
					)},
				},
			},
			// Three chunks of samples and a getValuesAlongIntervalOp with an
			// interval larger than the natural sample interval, spanning the gap
			// between the second and third chunks. To test two consecutive
			// ExtractSamples() calls for the same op, we need three on-disk chunks,
			// because the first two chunks are loaded from disk together and passed
			// as one unit into ExtractSamples(). Especially, we want to test that
			// the first sample of the last chunk is included in the result.
			//
			// This is a regression test for an interval operator advancing too far
			// past the end of the currently available chunk, effectively skipping
			// over a value which is only available in the next chunk passed to
			// ExtractSamples().
			//
			// Chunk and operator layout, assuming 200 samples per chunk:
			//
			//         Chunk 1      Chunk 2        Chunk 3
			// Values: 0......199   200......399   400......599
			// Times:  0......398   400......798   800......1198
			//              |                          |
			//              |_________ Operator _______|
			//             395 399 ......  795  799  803
			{
				data: buildSamples(
					instant,
					instant.Add(time.Duration(*leveldbChunkSize*6)*time.Second),
					2*time.Second,
					m,
				),
				in: in{
					atInterval: []getValuesAtIntervalOp{
						{
							getValuesAlongRangeOp: getValuesAlongRangeOp{
								baseOp:  baseOp{current: instant.Add(time.Second * time.Duration(*leveldbChunkSize*2-5))},
								through: instant.Add(time.Second * time.Duration(*leveldbChunkSize*4+3)),
							},
							interval: time.Second * 4,
						},
					},
				},
				out: out{
					atInterval: []metric.Values{
						// We need two overlapping buildValues() calls here since the last
						// value of the second chunk is extracted twice (value 399, time
						// offset 798s).
						append(
							// Values 197...399.
							// Times  394...798.
							buildValues(
								clientmodel.SampleValue(197),
								instant.Add(time.Second*time.Duration(*leveldbChunkSize*2-6)),
								instant.Add(time.Second*time.Duration(*leveldbChunkSize*4)),
								2*time.Second,
							),
							// Values 399...402.
							// Times  798...804.
							buildValues(
								clientmodel.SampleValue(399),
								instant.Add(time.Second*time.Duration(*leveldbChunkSize*4-2)),
								instant.Add(time.Second*time.Duration(*leveldbChunkSize*4+6)),
								2*time.Second,
							)...,
						),
					},
				},
				// This example only works with on-disk chunks due to the repeatedly
				// extracted value at the end of the second chunk.
				diskOnly: true,
			},
			// Single sample, getValuesAtIntervalOp starting after the sample.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant,
					},
				},
				in: in{
					atInterval: []getValuesAtIntervalOp{
						{
							getValuesAlongRangeOp: getValuesAlongRangeOp{
								baseOp:  baseOp{current: instant.Add(time.Second)},
								through: instant.Add(time.Second * 2),
							},
							interval: time.Second,
						},
					},
				},
				out: out{
					atInterval: []metric.Values{
						{
							{
								Timestamp: instant,
								Value:     0,
							},
						},
					},
				},
			},
			// Single sample, getValuesAtIntervalOp starting before the sample.
			{
				data: clientmodel.Samples{
					{
						Metric:    m,
						Value:     0,
						Timestamp: instant.Add(time.Second),
					},
				},
				in: in{
					atInterval: []getValuesAtIntervalOp{
						{
							getValuesAlongRangeOp: getValuesAlongRangeOp{
								baseOp:  baseOp{current: instant},
								through: instant.Add(time.Second * 2),
							},
							interval: time.Second,
						},
					},
				},
				out: out{
					atInterval: []metric.Values{
						{
							{
								Timestamp: instant.Add(time.Second),
								Value:     0,
							},
							{
								Timestamp: instant.Add(time.Second),
								Value:     0,
							},
						},
					},
				},
			},
		}
	)

	for i, scenario := range scenarios {
		if scenario.diskOnly && !flushToDisk {
			continue
		}

		tiered, closer := NewTestTieredStorage(t)

		err := tiered.AppendSamples(scenario.data)
		if err != nil {
			t.Fatalf("%d. failed to add fixture data: %s", i, err)
		}

		if flushToDisk {
			tiered.Flush()
		}

		requestBuilder := tiered.NewViewRequestBuilder()

		for _, atTime := range scenario.in.atTime {
			requestBuilder.GetMetricAtTime(fingerprint, atTime.current)
		}

		for _, atInterval := range scenario.in.atInterval {
			requestBuilder.GetMetricAtInterval(fingerprint, atInterval.current, atInterval.through, atInterval.interval)
		}

		for _, alongRange := range scenario.in.alongRange {
			requestBuilder.GetMetricRange(fingerprint, alongRange.current, alongRange.through)
		}

		v, err := requestBuilder.Execute(time.Second*5, stats.NewTimerGroup())

		if err != nil {
			t.Fatalf("%d. failed due to %s", i, err)
		}

		// To get all values in the View, ask for the 'forever' interval.
		interval := metric.Interval{OldestInclusive: math.MinInt64, NewestInclusive: math.MaxInt64}

		for j, atTime := range scenario.out.atTime {
			actual := v.GetRangeValues(fingerprint, interval)

			if len(actual) != len(atTime) {
				t.Fatalf("%d.%d. expected %d output, got %d", i, j, len(atTime), len(actual))
			}

			for k, value := range atTime {
				if value.Value != actual[k].Value {
					t.Errorf("%d.%d.%d expected %v value, got %v", i, j, k, value.Value, actual[k].Value)
				}
				if !value.Timestamp.Equal(actual[k].Timestamp) {
					t.Errorf("%d.%d.%d expected %s (offset %ss) timestamp, got %s (offset %ss)", i, j, k, value.Timestamp, value.Timestamp.Sub(instant), actual[k].Timestamp, actual[k].Timestamp.Sub(instant))
				}
			}
		}

		for j, atInterval := range scenario.out.atInterval {
			actual := v.GetRangeValues(fingerprint, interval)

			if len(actual) != len(atInterval) {
				t.Fatalf("%d.%d. expected %d output, got %d", i, j, len(atInterval), len(actual))
			}

			for k, value := range atInterval {
				if value.Value != actual[k].Value {
					t.Errorf("%d.%d.%d expected %v value, got %v", i, j, k, value.Value, actual[k].Value)
				}
				if !value.Timestamp.Equal(actual[k].Timestamp) {
					t.Errorf("%d.%d.%d expected %s (offset %ds) timestamp, got %s (offset %ds, value %s)", i, j, k, value.Timestamp, int(value.Timestamp.Sub(instant)/time.Second), actual[k].Timestamp, int(actual[k].Timestamp.Sub(instant)/time.Second), actual[k].Value)
				}
			}
		}

		for j, alongRange := range scenario.out.alongRange {
			actual := v.GetRangeValues(fingerprint, interval)

			if len(actual) != len(alongRange) {
				t.Fatalf("%d.%d. expected %d output, got %d", i, j, len(alongRange), len(actual))
			}

			for k, value := range alongRange {
				if value.Value != actual[k].Value {
					t.Fatalf("%d.%d.%d expected %v value, got %v", i, j, k, value.Value, actual[k].Value)
				}
				if !value.Timestamp.Equal(actual[k].Timestamp) {
					t.Fatalf("%d.%d.%d expected %s (offset %ss) timestamp, got %s (offset %ss)", i, j, k, value.Timestamp, value.Timestamp.Sub(instant), actual[k].Timestamp, actual[k].Timestamp.Sub(instant))
				}
			}
		}

		closer.Close()
	}
}
Ejemplo n.º 18
0
package tiered

import (
	"fmt"
	"time"

	clientmodel "github.com/prometheus/client_golang/model"

	"github.com/prometheus/prometheus/storage/metric"
	"github.com/prometheus/prometheus/utility/test"
)

var (
	// ``hg clone https://code.google.com/p/go ; cd go ; hg log | tail -n 20``
	usEastern, _ = time.LoadLocation("US/Eastern")
	testInstant  = clientmodel.TimestampFromTime(time.Date(1972, 7, 18, 19, 5, 45, 0, usEastern).In(time.UTC))
)

func testAppendSamples(p metric.Persistence, s *clientmodel.Sample, t test.Tester) {
	err := p.AppendSamples(clientmodel.Samples{s})
	if err != nil {
		t.Fatal(err)
	}
}

func buildLevelDBTestPersistencesMaker(name string, t test.Tester) func() (metric.Persistence, test.Closer) {
	return func() (metric.Persistence, test.Closer) {
		temporaryDirectory := test.NewTemporaryDirectory("get_value_at_time", t)

		p, err := NewLevelDBPersistence(temporaryDirectory.Path())
		if err != nil {