Пример #1
0
// assertModelCorrect asserts that the model data being maintained by this
// testModel is equivalent to the actual time series data stored in the
// engine. If the actual data does not match the model, this method will print
// out detailed information about the differences between the two data sets.
func (tm *testModel) assertModelCorrect() {
	actualData := tm.getActualData()
	if !reflect.DeepEqual(tm.modelData, actualData) {
		// Provide a detailed differencing of the actual data and the expected
		// model. This is done by comparing individual keys, and printing human
		// readable information about any keys which differ in value between the
		// two data sets.
		var buf bytes.Buffer
		buf.WriteString("Found unexpected differences in model data and actual data:\n")
		for k, vActual := range actualData {
			n, s, r, ts, err := DecodeDataKey([]byte(k))
			if err != nil {
				tm.t.Fatal(err)
			}
			if vModel, ok := tm.modelData[k]; !ok {
				fmt.Fprintf(&buf, "\tKey %s/%s@%d, r:%d from actual data was not found in model", n, s, ts, r)
			} else {
				if !proto.Equal(vActual, vModel) {
					fmt.Fprintf(&buf, "\tKey %s/%s@%d, r:%d differs between model and actual:", n, s, ts, r)
					if its, err := roachpb.InternalTimeSeriesDataFromValue(vActual); err != nil {
						fmt.Fprintf(&buf, "\tActual value is not a valid time series: %v", vActual)
					} else {
						fmt.Fprintf(&buf, "\tActual value: %v", its)
					}
					if its, err := roachpb.InternalTimeSeriesDataFromValue(vModel); err != nil {
						fmt.Fprintf(&buf, "\tModel value is not a valid time series: %v", vModel)
					} else {
						fmt.Fprintf(&buf, "\tModel value: %v", its)
					}
				}
			}
		}

		// Detect keys in model which were not present in the actual data.
		for k := range tm.modelData {
			n, s, r, ts, err := DecodeDataKey([]byte(k))
			if err != nil {
				tm.t.Fatal(err)
			}
			if _, ok := actualData[k]; !ok {
				fmt.Fprintf(&buf, "Key %s/%s@%d, r:%d from model was not found in actual data", n, s, ts, r)
			}
		}

		tm.t.Fatal(buf.String())
	}
}
Пример #2
0
// unmarshalTimeSeries unmarshals the time series value stored in the given byte
// array. It is assumed that the time series value was originally marshalled as
// a MVCCMetadata with an inline value.
func unmarshalTimeSeries(t testing.TB, b []byte) *roachpb.InternalTimeSeriesData {
	if b == nil {
		return nil
	}
	var mvccValue MVCCMetadata
	if err := proto.Unmarshal(b, &mvccValue); err != nil {
		t.Fatalf("error unmarshalling time series in text: %s", err.Error())
	}
	valueTS, err := roachpb.InternalTimeSeriesDataFromValue(mvccValue.Value)
	if err != nil {
		t.Fatalf("error unmarshalling time series in text: %s", err.Error())
	}
	return valueTS
}
Пример #3
0
// MergeInternalTimeSeriesData exports the engine's C++ merge logic for
// InternalTimeSeriesData to higher level packages. This is intended primarily
// for consumption by high level testing of time series functionality.
func MergeInternalTimeSeriesData(sources ...*roachpb.InternalTimeSeriesData) (
	*roachpb.InternalTimeSeriesData, error) {
	// Wrap each proto in an inlined MVCC value, and marshal each wrapped value
	// to bytes. This is the format required by the engine.
	srcBytes := make([][]byte, 0, len(sources))
	for _, src := range sources {
		val, err := src.ToValue()
		if err != nil {
			return nil, err
		}
		bytes, err := proto.Marshal(&MVCCMetadata{
			Value: val,
		})
		if err != nil {
			return nil, err
		}
		srcBytes = append(srcBytes, bytes)
	}

	// Merge every element into a nil byte slice, one at a time.
	var (
		mergedBytes []byte
		err         error
	)
	for _, bytes := range srcBytes {
		mergedBytes, err = goMerge(mergedBytes, bytes)
		if err != nil {
			return nil, err
		}
	}

	// Unmarshal merged bytes and extract the time series value within.
	var mvccValue MVCCMetadata
	if err := proto.Unmarshal(mergedBytes, &mvccValue); err != nil {
		return nil, err
	}
	mergedTS, err := roachpb.InternalTimeSeriesDataFromValue(mvccValue.Value)
	if err != nil {
		return nil, err
	}
	return mergedTS, nil
}
Пример #4
0
func (tm *testModel) storeInModel(r Resolution, data TimeSeriesData) {
	// Note the source, used to construct keys for model queries.
	tm.seenSources[data.Source] = struct{}{}

	// Process and store data in the model.
	internalData, err := data.ToInternal(r.KeyDuration(), r.SampleDuration())
	if err != nil {
		tm.t.Fatalf("test could not convert time series to internal format: %s", err.Error())
	}

	for _, idata := range internalData {
		key := MakeDataKey(data.Name, data.Source, r, idata.StartTimestampNanos)
		keyStr := string(key)

		existing, ok := tm.modelData[keyStr]
		var newTs *roachpb.InternalTimeSeriesData
		if ok {
			existingTs, err := roachpb.InternalTimeSeriesDataFromValue(existing)
			if err != nil {
				tm.t.Fatalf("test could not extract time series from existing model value: %s", err.Error())
			}
			newTs, err = engine.MergeInternalTimeSeriesData(existingTs, idata)
			if err != nil {
				tm.t.Fatalf("test could not merge time series into model value: %s", err.Error())
			}
		} else {
			newTs, err = engine.MergeInternalTimeSeriesData(idata)
			if err != nil {
				tm.t.Fatalf("test could not merge time series into model value: %s", err.Error())
			}
		}
		val, err := newTs.ToValue()
		if err != nil {
			tm.t.Fatal(err)
		}
		tm.modelData[keyStr] = val
	}
}
Пример #5
0
// assertQuery generates a query result from the local test model and compares
// it against the query returned from the server.
func (tm *testModel) assertQuery(name string, agg *TimeSeriesQueryAggregator,
	r Resolution, start, end int64, expectedDatapointCount int, expectedSourceCount int) {
	// Query the actual server.
	q := TimeSeriesQueryRequest_Query{
		Name:       name,
		Aggregator: agg,
	}
	actualDatapoints, actualSources, err := tm.DB.Query(q, r, start, end)
	if err != nil {
		tm.t.Fatal(err)
	}
	if a, e := len(actualDatapoints), expectedDatapointCount; a != e {
		tm.t.Fatalf("query expected %d datapoints, got %d", e, a)
	}
	if a, e := len(actualSources), expectedSourceCount; a != e {
		tm.t.Fatalf("query expected %d sources, got %d", e, a)
	}

	// Construct an expected result for comparison.
	var expectedDatapoints []*TimeSeriesDatapoint
	expectedSources := make([]string, 0, 0)
	dataSpans := make(map[string]*dataSpan)

	// Iterate over all possible sources which may have data for this query.
	for sourceName := range tm.seenSources {
		// Iterate over all possible key times at which query data may be present.
		for time := start - (start % r.KeyDuration()); time < end; time += r.KeyDuration() {
			// Construct a key for this source/time and retrieve it from model.
			key := MakeDataKey(name, sourceName, r, time)
			value := tm.modelData[string(key)]
			if value == nil {
				continue
			}

			// Add data from the key to the correct dataSpan.
			data, err := roachpb.InternalTimeSeriesDataFromValue(value)
			if err != nil {
				tm.t.Fatal(err)
			}
			ds, ok := dataSpans[sourceName]
			if !ok {
				ds = &dataSpan{
					startNanos:  start - (start % r.SampleDuration()),
					sampleNanos: r.SampleDuration(),
				}
				dataSpans[sourceName] = ds
				expectedSources = append(expectedSources, sourceName)
			}
			if err := ds.addData(data); err != nil {
				tm.t.Fatal(err)
			}
		}
	}

	// Iterate over data in all dataSpans and construct expected datapoints.
	var iters unionIterator
	for _, ds := range dataSpans {
		iters = append(iters, ds.newIterator())
	}
	iters.init()
	for iters.isValid() {
		var value float64
		switch q.GetAggregator() {
		case TimeSeriesQueryAggregator_AVG:
			value = iters.avg()
		case TimeSeriesQueryAggregator_AVG_RATE:
			value = iters.dAvg()
		}
		expectedDatapoints = append(expectedDatapoints, &TimeSeriesDatapoint{
			TimestampNanos: iters.timestamp(),
			Value:          value,
		})
		iters.advance()
	}

	sort.Strings(expectedSources)
	sort.Strings(actualSources)
	if !reflect.DeepEqual(actualSources, expectedSources) {
		tm.t.Errorf("actual source list: %v, expected: %v", actualSources, expectedSources)
	}
	if !reflect.DeepEqual(actualDatapoints, expectedDatapoints) {
		tm.t.Errorf("actual datapoints: %v, expected: %v", actualDatapoints, expectedDatapoints)
	}
}