Esempio n. 1
0
// assertModelCorrect asserts that the model data being maintained by this
// testModel is equivalent to the actual time series data stored in the
// engine. If the actual data does not match the model, this method will print
// out detailed information about the differences between the two data sets.
func (tm *testModel) assertModelCorrect() {
	actualData := tm.getActualData()
	if !reflect.DeepEqual(tm.modelData, actualData) {
		// Provide a detailed differencing of the actual data and the expected
		// model. This is done by comparing individual keys, and printing human
		// readable information about any keys which differ in value between the
		// two data sets.
		var buf bytes.Buffer
		buf.WriteString("Found unexpected differences in model data and actual data:\n")
		for k, vActual := range actualData {
			n, s, r, ts, err := DecodeDataKey([]byte(k))
			if err != nil {
				tm.t.Fatal(err)
			}
			if vModel, ok := tm.modelData[k]; !ok {
				fmt.Fprintf(&buf, "\tKey %s/%s@%d, r:%d from actual data was not found in model", n, s, ts, r)
			} else {
				if !gogoproto.Equal(vActual, vModel) {
					fmt.Fprintf(&buf, "\tKey %s/%s@%d, r:%d differs between model and actual:", n, s, ts, r)
					if its, err := proto.InternalTimeSeriesDataFromValue(vActual); err != nil {
						fmt.Fprintf(&buf, "\tActual value is not a valid time series: %v", vActual)
					} else {
						fmt.Fprintf(&buf, "\tActual value: %v", its)
					}
					if its, err := proto.InternalTimeSeriesDataFromValue(vModel); err != nil {
						fmt.Fprintf(&buf, "\tModel value is not a valid time series: %v", vModel)
					} else {
						fmt.Fprintf(&buf, "\tModel value: %v", its)
					}
				}
			}
		}

		// Detect keys in model which were not present in the actual data.
		for k := range tm.modelData {
			n, s, r, ts, err := DecodeDataKey([]byte(k))
			if err != nil {
				tm.t.Fatal(err)
			}
			if _, ok := actualData[k]; !ok {
				fmt.Fprintf(&buf, "Key %s/%s@%d, r:%d from model was not found in actual data", n, s, ts, r)
			}
		}

		tm.t.Fatal(buf.String())
	}
}
Esempio n. 2
0
// unmarshalTimeSeries unmarshals the time series value stored in the given byte
// array. It is assumed that the time series value was originally marshalled as
// a MVCCMetadata with an inline value.
func unmarshalTimeSeries(t testing.TB, b []byte) *proto.InternalTimeSeriesData {
	if b == nil {
		return nil
	}
	var mvccValue MVCCMetadata
	if err := gogoproto.Unmarshal(b, &mvccValue); err != nil {
		t.Fatalf("error unmarshalling time series in text: %s", err.Error())
	}
	valueTS, err := proto.InternalTimeSeriesDataFromValue(mvccValue.Value)
	if err != nil {
		t.Fatalf("error unmarshalling time series in text: %s", err.Error())
	}
	return valueTS
}
Esempio n. 3
0
// MergeInternalTimeSeriesData exports the engine's C++ merge logic for
// InternalTimeSeriesData to higher level packages. This is intended primarily
// for consumption by high level testing of time series functionality.
func MergeInternalTimeSeriesData(sources ...*proto.InternalTimeSeriesData) (
	*proto.InternalTimeSeriesData, error) {
	// Wrap each proto in an inlined MVCC value, and marshal each wrapped value
	// to bytes. This is the format required by the engine.
	srcBytes := make([][]byte, 0, len(sources))
	for _, src := range sources {
		val, err := src.ToValue()
		if err != nil {
			return nil, err
		}
		bytes, err := gogoproto.Marshal(&MVCCMetadata{
			Value: val,
		})
		if err != nil {
			return nil, err
		}
		srcBytes = append(srcBytes, bytes)
	}

	// Merge every element into a nil byte slice, one at a time.
	var (
		mergedBytes []byte
		err         error
	)
	for _, bytes := range srcBytes {
		mergedBytes, err = goMerge(mergedBytes, bytes)
		if err != nil {
			return nil, err
		}
	}

	// Unmarshal merged bytes and extract the time series value within.
	var mvccValue MVCCMetadata
	if err := gogoproto.Unmarshal(mergedBytes, &mvccValue); err != nil {
		return nil, err
	}
	mergedTS, err := proto.InternalTimeSeriesDataFromValue(mvccValue.Value)
	if err != nil {
		return nil, err
	}
	return mergedTS, nil
}
Esempio n. 4
0
func (tm *testModel) storeInModel(r Resolution, data proto.TimeSeriesData) {
	// Note the source, used to construct keys for model queries.
	tm.seenSources[data.Source] = true

	// Process and store data in the model.
	internalData, err := data.ToInternal(r.KeyDuration(), r.SampleDuration())
	if err != nil {
		tm.t.Fatalf("test could not convert time series to internal format: %s", err.Error())
	}

	for _, idata := range internalData {
		key := MakeDataKey(data.Name, data.Source, r, idata.StartTimestampNanos)
		keyStr := string(key)

		existing, ok := tm.modelData[keyStr]
		var newTs *proto.InternalTimeSeriesData
		if ok {
			existingTs, err := proto.InternalTimeSeriesDataFromValue(existing)
			if err != nil {
				tm.t.Fatalf("test could not extract time series from existing model value: %s", err.Error())
			}
			newTs, err = engine.MergeInternalTimeSeriesData(existingTs, idata)
			if err != nil {
				tm.t.Fatalf("test could not merge time series into model value: %s", err.Error())
			}
		} else {
			newTs, err = engine.MergeInternalTimeSeriesData(idata)
			if err != nil {
				tm.t.Fatalf("test could not merge time series into model value: %s", err.Error())
			}
		}
		val, err := newTs.ToValue()
		if err != nil {
			tm.t.Fatal(err)
		}
		tm.modelData[keyStr] = val
	}
}
Esempio n. 5
0
// assertQuery generates a query result from the local test model and compares
// it against the query returned from the server.
func (tm *testModel) assertQuery(name string, agg *proto.TimeSeriesQueryAggregator,
	r Resolution, start, end int64, expectedDatapointCount int, expectedSourceCount int) {
	// Query the actual server.
	q := proto.TimeSeriesQueryRequest_Query{
		Name:       name,
		Aggregator: agg,
	}
	actualDatapoints, actualSources, err := tm.DB.Query(q, r, start, end)
	if err != nil {
		tm.t.Fatal(err)
	}
	if a, e := len(actualDatapoints), expectedDatapointCount; a != e {
		tm.t.Fatalf("query expected %d datapoints, got %d", e, a)
	}
	if a, e := len(actualSources), expectedSourceCount; a != e {
		tm.t.Fatalf("query expected %d sources, got %d", e, a)
	}

	// Construct an expected result for comparison.
	var expectedDatapoints []*proto.TimeSeriesDatapoint
	expectedSources := make([]string, 0, 0)
	dataSpans := make(map[string]*dataSpan)

	// Iterate over all possible sources which may have data for this query.
	for sourceName := range tm.seenSources {
		// Iterate over all possible key times at which query data may be present.
		for time := start - (start % r.KeyDuration()); time < end; time += r.KeyDuration() {
			// Construct a key for this source/time and retrieve it from model.
			key := MakeDataKey(name, sourceName, r, time)
			value := tm.modelData[string(key)]
			if value == nil {
				continue
			}

			// Add data from the key to the correct dataSpan.
			data, err := proto.InternalTimeSeriesDataFromValue(value)
			if err != nil {
				tm.t.Fatal(err)
			}
			ds, ok := dataSpans[sourceName]
			if !ok {
				ds = &dataSpan{
					startNanos:  start - (start % r.SampleDuration()),
					sampleNanos: r.SampleDuration(),
				}
				dataSpans[sourceName] = ds
				expectedSources = append(expectedSources, sourceName)
			}
			if err := ds.addData(data); err != nil {
				tm.t.Fatal(err)
			}
		}
	}

	// Iterate over data in all dataSpans and construct expected datapoints.
	var iters unionIterator
	for _, ds := range dataSpans {
		iters = append(iters, ds.newIterator())
	}
	iters.init()
	for iters.isValid() {
		var value float64
		switch q.GetAggregator() {
		case proto.TimeSeriesQueryAggregator_AVG:
			value = iters.avg()
		case proto.TimeSeriesQueryAggregator_AVG_RATE:
			value = iters.dAvg()
		}
		expectedDatapoints = append(expectedDatapoints, &proto.TimeSeriesDatapoint{
			TimestampNanos: iters.timestamp(),
			Value:          value,
		})
		iters.advance()
	}

	sort.Strings(expectedSources)
	sort.Strings(actualSources)
	if !reflect.DeepEqual(actualSources, expectedSources) {
		tm.t.Errorf("actual source list: %v, expected: %v", actualSources, expectedSources)
	}
	if !reflect.DeepEqual(actualDatapoints, expectedDatapoints) {
		tm.t.Errorf("actual datapoints: %v, expected: %v", actualDatapoints, expectedDatapoints)
	}
}
Esempio n. 6
0
// TestGoMerge tests the function goMerge but not the integration with
// the storage engines. For that, see the engine tests.
func TestGoMerge(t *testing.T) {
	// Let's start with stuff that should go wrong.
	badCombinations := []struct {
		existing, update []byte
	}{
		{counter(0), appender("")},
		{appender(""), counter(0)},
		{counter(0), nil},
		{appender(""), nil},
		{
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
			}...),
			nil,
		},
		{
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
			}...),
			appender("a"),
		},
		{
			appender("a"),
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
			}...),
		},
		{
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
			}...),
			timeSeriesInt(testtime+1, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
			}...),
		},
		{
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
			}...),
			timeSeriesInt(testtime, 100, []tsIntSample{
				{1, 1, 5, 5, 5},
			}...),
		},
	}
	for i, c := range badCombinations {
		_, err := goMerge(c.existing, c.update)
		if err == nil {
			t.Errorf("goMerge: %d: expected error", i)
		}
	}

	testCasesCounter := []struct {
		existing, update, expected int64
		wantError                  bool
	}{
		{0, 10, 10, false},
		{10, 20, 30, false},
		{595, -600, -5, false},
		// Close to overflow, but not quite there.
		{math.MinInt64 + 3, -3, math.MinInt64, false},
		{math.MaxInt64, 0, math.MaxInt64, false},
		// Overflows.
		{math.MaxInt64, 1, 0, true},
		{-1, math.MinInt64, 0, true},
	}
	for i, c := range testCasesCounter {
		result, err := goMerge(counter(c.existing), counter(c.update))
		if c.wantError {
			if err == nil {
				t.Errorf("goMerge: %d: wanted error but got success", i)
			}
			continue
		}
		if err != nil {
			t.Errorf("goMerge error: %d: %v", i, err)
			continue
		}
		var v proto.MVCCMetadata
		if err := gogoproto.Unmarshal(result, &v); err != nil {
			t.Errorf("goMerge error unmarshalling: %s", err)
			continue
		}
		if v.Value.GetInteger() != c.expected {
			t.Errorf("goMerge error: %d: want %v, got %v", i, c.expected, v.Value.GetInteger())
		}
	}

	gibber1, gibber2 := gibberishString(100), gibberishString(200)

	testCasesAppender := []struct {
		existing, update, expected []byte
	}{
		{appender(""), appender(""), appender("")},
		{nil, appender(""), appender("")},
		{nil, nil, mustMarshal(&proto.MVCCMetadata{Value: &proto.Value{}})},
		{appender("\n "), appender(" \t "), appender("\n  \t ")},
		{appender("ქართული"), appender("\nKhartuli"), appender("ქართული\nKhartuli")},
		{appender(gibber1), appender(gibber2), appender(gibber1 + gibber2)},
	}

	for i, c := range testCasesAppender {
		result, err := goMerge(c.existing, c.update)
		if err != nil {
			t.Errorf("goMerge error: %d: %v", i, err)
			continue
		}
		var resultV, expectedV proto.MVCCMetadata
		gogoproto.Unmarshal(result, &resultV)
		gogoproto.Unmarshal(c.expected, &expectedV)
		if !reflect.DeepEqual(resultV, expectedV) {
			t.Errorf("goMerge error: %d: want %+v, got %+v", i, expectedV, resultV)
		}
	}

	testCasesTimeSeries := []struct {
		existing, update, expected []byte
	}{
		{
			nil,
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
			}...),
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
			}...),
		},
		{
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
			}...),
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{2, 1, 5, 5, 5},
			}...),
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
				{2, 1, 5, 5, 5},
			}...),
		},
		{
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
				{3, 1, 5, 5, 5},
			}...),
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{2, 1, 5, 5, 5},
			}...),
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
				{2, 1, 5, 5, 5},
				{3, 1, 5, 5, 5},
			}...),
		},
		{
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 10, 10, 10},
				{1, 1, 5, 5, 5},
				{2, 1, 5, 5, 5},
			}...),
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 100, 100, 100},
				{2, 1, 5, 5, 5},
				{3, 1, 5, 5, 5},
			}...),
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 3, 115, 100, 5},
				{2, 2, 10, 5, 5},
				{3, 1, 5, 5, 5},
			}...),
		},
		{
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
			}...),
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{2, 1, 5, 5, 5},
			}...),
			timeSeriesInt(testtime, 1000, []tsIntSample{
				{1, 1, 5, 5, 5},
				{2, 1, 5, 5, 5},
			}...),
		},
		{
			timeSeriesFloat(testtime, 1000, []tsFloatSample{
				{1, 1, 5, 5, 5},
				{3, 1, 5, 5, 5},
			}...),
			timeSeriesFloat(testtime, 1000, []tsFloatSample{
				{2, 1, 5, 5, 5},
			}...),
			timeSeriesFloat(testtime, 1000, []tsFloatSample{
				{1, 1, 5, 5, 5},
				{2, 1, 5, 5, 5},
				{3, 1, 5, 5, 5},
			}...),
		},
		{
			timeSeriesFloat(testtime, 1000, []tsFloatSample{
				{1, 1, 10, 10, 10},
				{1, 1, 5, 5, 5},
				{2, 1, 5, 5, 5},
			}...),
			timeSeriesFloat(testtime, 1000, []tsFloatSample{
				{1, 1, 100, 100, 100},
				{2, 1, 5, 5, 5},
				{3, 1, 5, 5, 5},
			}...),
			timeSeriesFloat(testtime, 1000, []tsFloatSample{
				{1, 3, 115, 100, 5},
				{2, 2, 10, 5, 5},
				{3, 1, 5, 5, 5},
			}...),
		},
	}

	for i, c := range testCasesTimeSeries {
		result, err := goMerge(c.existing, c.update)
		if err != nil {
			t.Errorf("goMerge error: %d: %v", i, err)
			continue
		}

		// Extract the time series and compare.
		var resultV, expectedV proto.MVCCMetadata
		gogoproto.Unmarshal(result, &resultV)
		gogoproto.Unmarshal(c.expected, &expectedV)
		resultTS, _ := proto.InternalTimeSeriesDataFromValue(resultV.Value)
		expectedTS, _ := proto.InternalTimeSeriesDataFromValue(expectedV.Value)
		if !reflect.DeepEqual(resultTS, expectedTS) {
			t.Errorf("goMerge error: %d: want %v, got %v", i, expectedTS, resultTS)
		}
	}
}