Пример #1
0
// addMetricToMap adds a new metric (time-value pair) to a map of DayStore.
// addMetricToMap accepts as arguments the metric name, timestamp, value and the DayStore map.
// The timestamp argument needs to be already rounded to the cluster resolution.
func (rc *realModel) addMetricToMap(metric string, timestamp time.Time, value uint64, dict map[string]*daystore.DayStore) error {
	point := statstore.TimePoint{
		Timestamp: timestamp,
		Value:     value,
	}
	if val, ok := dict[metric]; ok {
		ts := *val
		err := ts.Put(point)
		if err != nil {
			return fmt.Errorf("failed to add metric to DayStore: %s", err)
		}
	} else {
		new_ts := daystore.NewDayStore(epsilonFromMetric(metric), rc.resolution)
		err := new_ts.Put(point)
		if err != nil {
			return fmt.Errorf("failed to add metric to DayStore: %s", err)
		}
		dict[metric] = new_ts
	}
	return nil
}
Пример #2
0
// aggregateMetrics populates an InfoType by adding metrics across a slice of InfoTypes.
// Only metrics taken after the cluster timestamp are affected.
// Assumes an appropriate lock is already taken by the caller.
func (rc *realModel) aggregateMetrics(target *InfoType, sources []*InfoType, latestTime time.Time) error {
	zeroTime := time.Time{}

	if target == nil {
		return fmt.Errorf("nil InfoType pointer provided as aggregation target")
	}
	if len(sources) == 0 {
		return fmt.Errorf("empty sources slice provided")
	}
	for _, source := range sources {
		if source == nil {
			return fmt.Errorf("nil InfoType pointer provided as an aggregation source")
		}
		if source == target {
			return fmt.Errorf("target InfoType pointer is provided as a source")
		}
	}

	if latestTime.Equal(zeroTime) {
		return fmt.Errorf("aggregateMetrics called with a zero latestTime argument")
	}

	// Create a map of []TimePoint as a timeseries accumulator per metric
	newMetrics := make(map[string][]statstore.TimePoint)

	// Reduce the sources slice with timeseries addition for each metric
	for _, info := range sources {
		for key, ds := range info.Metrics {
			_, ok := newMetrics[key]
			if !ok {
				// Metric does not exist on target map, create a new timeseries
				newMetrics[key] = []statstore.TimePoint{}
			}
			// Perform timeseries addition between the accumulator and the current source
			sourceDS := (*ds).Hour.Get(rc.timestamp, zeroTime)
			newMetrics[key] = addMatchingTimeseries(newMetrics[key], sourceDS)
		}
	}

	// Put all the new values in the DayStores under target
	for key, tpSlice := range newMetrics {
		if len(tpSlice) == 0 {
			continue
		}
		_, ok := target.Metrics[key]
		if !ok {
			// Metric does not exist on target InfoType, create DayStore
			target.Metrics[key] = daystore.NewDayStore(epsilonFromMetric(key), rc.resolution)
		}

		// Put the added TimeSeries in the corresponding DayStore, in time-ascending order
		for i := len(tpSlice) - 1; i >= 0; i-- {
			err := target.Metrics[key].Put(tpSlice[i])
			if err != nil {
				return fmt.Errorf("error while performing aggregation: %s", err)
			}
		}

		// Put a TimePoint with the latest aggregated value at the latest model resolution.
		// Causes the DayStore to assume the aggregated metric remained constant until the -
		// next cluster timestamp.
		newTP := statstore.TimePoint{
			Timestamp: latestTime,
			Value:     tpSlice[0].Value,
		}
		err := target.Metrics[key].Put(newTP)
		if err != nil {
			return fmt.Errorf("error while performing aggregation: %s", err)
		}

	}

	// Set the creation time of the entity to the earliest one that we have found data for.
	earliestCreation := sources[0].Creation
	for _, info := range sources[1:] {
		if info.Creation.Before(earliestCreation) && info.Creation.After(time.Time{}) {
			earliestCreation = info.Creation
		}
	}
	if earliestCreation.Before(target.Creation) || target.Creation.Equal(time.Time{}) {
		target.Creation = earliestCreation
	}

	return nil
}
Пример #3
0
func newDayStore() *daystore.DayStore {
	return daystore.NewDayStore(defaultEpsilon, time.Minute)
}