func keyIsAtMostOld(t time.Time) sampleKeyPredicate { unix := t.Unix() return func(k *dto.SampleKey) bool { return indexable.DecodeTime(k.Timestamp).Unix() <= unix } }
func keyIsOlderThan(t time.Time) sampleKeyPredicate { unix := t.Unix() return func(k *dto.SampleKey) bool { return indexable.DecodeTime(k.Timestamp).Unix() > unix } }
func (l *LevelDBMetricPersistence) GetSamplesForMetric(metric model.Metric, interval model.Interval) ([]model.Samples, error) { metricDTO := model.MetricToDTO(&metric) if fingerprintDTO, fingerprintDTOErr := model.MessageToFingerprintDTO(metricDTO); fingerprintDTOErr == nil { if iterator, closer, iteratorErr := l.metricSamples.GetIterator(); iteratorErr == nil { defer closer.Close() start := &dto.SampleKey{ Fingerprint: fingerprintDTO, Timestamp: indexable.EncodeTime(interval.OldestInclusive), } emission := make([]model.Samples, 0) if encode, encodeErr := coding.NewProtocolBufferEncoder(start).Encode(); encodeErr == nil { iterator.Seek(encode) predicate := keyIsAtMostOld(interval.NewestInclusive) for iterator = iterator; iterator.Valid(); iterator.Next() { key := &dto.SampleKey{} value := &dto.SampleValue{} if keyUnmarshalErr := proto.Unmarshal(iterator.Key(), key); keyUnmarshalErr == nil { if valueUnmarshalErr := proto.Unmarshal(iterator.Value(), value); valueUnmarshalErr == nil { if fingerprintsEqual(fingerprintDTO, key.Fingerprint) { // Wart if predicate(key) { emission = append(emission, model.Samples{ Value: model.SampleValue(*value.Value), Timestamp: indexable.DecodeTime(key.Timestamp), }) } else { break } } else { break } } else { return nil, valueUnmarshalErr } } else { return nil, keyUnmarshalErr } } return emission, nil } else { log.Printf("Could not encode the start key: %q\n", encodeErr) return nil, encodeErr } } else { log.Printf("Could not acquire iterator: %q\n", iteratorErr) return nil, iteratorErr } } else { log.Printf("Could not create fingerprint for the metric: %q\n", fingerprintDTOErr) return nil, fingerprintDTOErr } panic("unreachable") }
func (l *LevelDBMetricPersistence) GetRangeValues(m *model.Metric, i *model.Interval, s *metric.StalenessPolicy) (v *model.SampleSet, err error) { begin := time.Now() defer func() { duration := time.Now().Sub(begin) recordOutcome(storageOperations, storageLatency, duration, err, map[string]string{operation: getRangeValues, result: success}, map[string]string{operation: getRangeValues, result: failure}) }() d := model.MetricToDTO(m) f, err := model.MessageToFingerprintDTO(d) if err != nil { return } k := &dto.SampleKey{ Fingerprint: f, Timestamp: indexable.EncodeTime(i.OldestInclusive), } e, err := coding.NewProtocolBufferEncoder(k).Encode() if err != nil { return } iterator, closer, err := l.metricSamples.GetIterator() if err != nil { return } defer closer.Close() iterator.Seek(e) predicate := keyIsOlderThan(i.NewestInclusive) for ; iterator.Valid(); iterator.Next() { retrievedKey := &dto.SampleKey{} retrievedKey, err = extractSampleKey(iterator) if err != nil { return } if predicate(retrievedKey) { break } if !fingerprintsEqual(retrievedKey.Fingerprint, k.Fingerprint) { break } retrievedValue, err := extractSampleValue(iterator) if err != nil { return nil, err } if v == nil { v = &model.SampleSet{} } v.Values = append(v.Values, model.SamplePair{ Value: model.SampleValue(*retrievedValue.Value), Timestamp: indexable.DecodeTime(retrievedKey.Timestamp), }) } return }
func (l *LevelDBMetricPersistence) GetValueAtTime(m *model.Metric, t *time.Time, s *metric.StalenessPolicy) (sample *model.Sample, err error) { begin := time.Now() defer func() { duration := time.Now().Sub(begin) recordOutcome(storageOperations, storageLatency, duration, err, map[string]string{operation: getValueAtTime, result: success}, map[string]string{operation: getValueAtTime, result: failure}) }() d := model.MetricToDTO(m) f, err := model.MessageToFingerprintDTO(d) if err != nil { return } // Candidate for Refactoring k := &dto.SampleKey{ Fingerprint: f, Timestamp: indexable.EncodeTime(*t), } e, err := coding.NewProtocolBufferEncoder(k).Encode() if err != nil { return } iterator, closer, err := l.metricSamples.GetIterator() if err != nil { return } defer closer.Close() iterator.Seek(e) if !iterator.Valid() { /* * Two cases for this: * 1.) Corruption in LevelDB. * 2.) Key seek after AND outside known range. * * Once a LevelDB iterator goes invalid, it cannot be recovered; thusly, * we need to create a new in order to check if the last value in the * database is sufficient for our purposes. This is, in all reality, a * corner case but one that could bring down the system. */ iterator, closer, err = l.metricSamples.GetIterator() if err != nil { return } defer closer.Close() iterator.SeekToLast() if !iterator.Valid() { /* * For whatever reason, the LevelDB cannot be recovered. */ return } } var ( firstKey *dto.SampleKey firstValue *dto.SampleValue ) firstKey, err = extractSampleKey(iterator) if err != nil { return } peekAhead := false if !fingerprintsEqual(firstKey.Fingerprint, k.Fingerprint) { /* * This allows us to grab values for metrics if our request time is after * the last recorded time subject to the staleness policy due to the nuances * of LevelDB storage: * * # Assumptions: * - K0 < K1 in terms of sorting. * - T0 < T1 in terms of sorting. * * # Data * * K0-T0 * K0-T1 * K0-T2 * K1-T0 * K1-T1 * * # Scenario * K0-T3, which does not exist, is requested. LevelDB will thusly seek to * K1-T1, when K0-T2 exists as a perfectly good candidate to check subject * to the provided staleness policy and such. */ peekAhead = true } firstTime := indexable.DecodeTime(firstKey.Timestamp) if t.Before(firstTime) || peekAhead { iterator.Prev() if !iterator.Valid() { /* * Two cases for this: * 1.) Corruption in LevelDB. * 2.) Key seek before AND outside known range. * * This is an explicit validation to ensure that if no previous values for * the series are found, the query aborts. */ return } var ( alternativeKey *dto.SampleKey alternativeValue *dto.SampleValue ) alternativeKey, err = extractSampleKey(iterator) if err != nil { return } if !fingerprintsEqual(alternativeKey.Fingerprint, k.Fingerprint) { return } /* * At this point, we found a previous value in the same series in the * database. LevelDB originally seeked to the subsequent element given * the key, but we need to consider this adjacency instead. */ alternativeTime := indexable.DecodeTime(alternativeKey.Timestamp) firstKey = alternativeKey firstValue = alternativeValue firstTime = alternativeTime } firstDelta := firstTime.Sub(*t) if firstDelta < 0 { firstDelta *= -1 } if firstDelta > s.DeltaAllowance { return } firstValue, err = extractSampleValue(iterator) if err != nil { return } sample = model.SampleFromDTO(m, t, firstValue) if firstDelta == time.Duration(0) { return } iterator.Next() if !iterator.Valid() { /* * Two cases for this: * 1.) Corruption in LevelDB. * 2.) Key seek after AND outside known range. * * This means that there are no more values left in the storage; and if this * point is reached, we know that the one that has been found is within the * allowed staleness limits. */ return } var secondKey *dto.SampleKey secondKey, err = extractSampleKey(iterator) if err != nil { return } if !fingerprintsEqual(secondKey.Fingerprint, k.Fingerprint) { return } else { /* * At this point, current entry in the database has the same key as the * previous. For this reason, the validation logic will expect that the * distance between the two points shall not exceed the staleness policy * allowed limit to reduce interpolation errors. * * For this reason, the sample is reset in case of other subsequent * validation behaviors. */ sample = nil } secondTime := indexable.DecodeTime(secondKey.Timestamp) totalDelta := secondTime.Sub(firstTime) if totalDelta > s.DeltaAllowance { return } var secondValue *dto.SampleValue secondValue, err = extractSampleValue(iterator) if err != nil { return } interpolated := interpolate(firstTime, secondTime, *firstValue.Value, *secondValue.Value, *t) sampleValue := &dto.SampleValue{ Value: &interpolated, } sample = model.SampleFromDTO(m, t, sampleValue) return }