Esempio n. 1
0
func (b *blueflood) FetchSingleSeries(request api.FetchSeriesRequest) (api.Timeseries, error) {
	sampler, ok := samplerMap[request.SampleMethod]
	if !ok {
		return api.Timeseries{}, fmt.Errorf("unsupported SampleMethod %s", request.SampleMethod.String())
	}

	queryUrl, err := b.constructURL(request, sampler)
	if err != nil {
		return api.Timeseries{}, err
	}

	// Issue GET to fetch metrics
	parsedResult, err := b.fetch(request, queryUrl)
	if err != nil {
		return api.Timeseries{}, err
	}

	values := processResult(parsedResult, request.Timerange, sampler)
	log.Debugf("Constructed timeseries from result: %v", values)

	return api.Timeseries{
		Values: values,
		TagSet: request.Metric.TagSet,
	}, nil
}
Esempio n. 2
0
// Blueflood keys the resolution param to a java enum, so we have to convert
// between them.
//
func (c Config) bluefloodResolution(
	desiredResolution time.Duration,
	startMs int64) Resolution {
	log.Debugf("Desired resolution in minutes: %d\n", desiredResolution/time.Minute)
	now := c.TimeSource().Unix() * 1000 //Milliseconds
	// Choose the appropriate resolution based on TTL, fetching the highest resolution data we can
	//
	age := time.Duration(now-startMs) * time.Millisecond //Age in milliseconds
	log.Debugf("The age in minutes of the start time %d\n", age/time.Minute)

	for _, current := range Resolutions {
		maxAge := c.oldestViableDataForResolution(current)
		// log.Debugf("Oldest age? %+v\n", maxAge)

		log.Debugf("Considering resolution %v\n", current)
		log.Debugf("Oldest conceivable data is %v\n", maxAge)
		log.Debugf("Is the desired resolution less than or equal to the current? %b\n", desiredResolution <= current.duration)
		log.Debugf("Is the start time within the TTL window? %b\n", age < maxAge)

		// If the desired resolution is less than or equal to the
		// current resolution and is the distance from now to the oldest
		// viable data still available?
		if desiredResolution <= current.duration &&
			age < maxAge {
			log.Debugf("Choosing resolution: %v\n", current)
			return current
		}
	}
	// If none of the above matched, we choose the coarsest
	return Resolutions[len(Resolutions)-1]
}
Esempio n. 3
0
// Blueflood keys the resolution param to a java enum, so we have to convert
// between them.
func (c Config) bluefloodResolution(
	desiredResolution time.Duration,
	startMs int64) Resolution {
	now := time.Now().Unix() * 1000
	// Choose the appropriate resolution based on TTL, fetching the highest resolution data we can
	for _, current := range Resolutions {
		age := time.Duration(now-startMs) * time.Millisecond
		maxAge := c.getTTL(current)
		log.Debugf("Desired (s): %d\n", desiredResolution/time.Second)
		log.Debugf("Current (s): %d\n", current.duration/time.Second)
		log.Debugf("age (s): %d\n", age/time.Second)
		log.Debugf("ttl (s): %d\n", maxAge/time.Second)
		if desiredResolution <= current.duration &&
			age < c.getTTL(current) {
			return current
		}
	}
	// return the coarsest resolution.
	return Resolutions[len(Resolutions)-1]
}
Esempio n. 4
0
// fetches from the backend. on error, it returns an instance of api.TimeseriesStorageError
func (b *Blueflood) fetch(request api.FetchTimeseriesRequest, queryUrl *url.URL) (queryResponse, []byte, error) {
	log.Debugf("Blueflood fetch: %s", queryUrl.String())
	success := make(chan queryResponse, 1)
	failure := make(chan error, 1)
	timeout := time.After(b.config.Timeout)
	var rawResponse []byte
	go func() {
		resp, err := b.client.Get(queryUrl.String())
		if err != nil {
			failure <- api.TimeseriesStorageError{request.Metric, api.FetchIOError, "error while fetching - http connection"}
			return
		}
		defer resp.Body.Close()

		body, err := ioutil.ReadAll(resp.Body)
		rawResponse = body
		if err != nil {
			failure <- api.TimeseriesStorageError{request.Metric, api.FetchIOError, "error while fetching - reading"}
			return
		}

		log.Debugf("Fetch result: %s", string(body))

		var parsedJson queryResponse
		err = json.Unmarshal(body, &parsedJson)
		// Construct a Timeseries from the result:
		if err != nil {
			failure <- api.TimeseriesStorageError{request.Metric, api.FetchIOError, "error while fetching - json decoding"}
			return
		}
		success <- parsedJson
	}()
	select {
	case response := <-success:
		return response, rawResponse, nil
	case err := <-failure:
		return queryResponse{}, rawResponse, err
	case <-timeout:
		return queryResponse{}, rawResponse, api.TimeseriesStorageError{request.Metric, api.FetchTimeoutError, ""}
	}
}
Esempio n. 5
0
func (b *Blueflood) FetchSingleTimeseries(request api.FetchTimeseriesRequest) (api.Timeseries, error) {
	defer request.Profiler.RecordWithDescription("Blueflood FetchSingleTimeseries", request.Metric.String())()
	sampler, ok := samplerMap[request.SampleMethod]
	if !ok {
		return api.Timeseries{}, fmt.Errorf("unsupported SampleMethod %s", request.SampleMethod.String())
	}
	queryResolution := b.config.bluefloodResolution(
		request.Timerange.Resolution(),
		request.Timerange.Start(),
	)
	log.Debugf("Blueflood resolution: %s\n", queryResolution.String())

	// Sample the data at the given `queryResolution`
	queryUrl, err := b.constructURL(request, sampler, queryResolution)
	if err != nil {
		return api.Timeseries{}, err
	}

	rawResults := make([][]byte, 1)
	parsedResult, rawResult, err := b.fetch(request, queryUrl)
	rawResults[0] = rawResult
	if err != nil {
		return api.Timeseries{}, err
	}

	// combinedResult contains the requested data, along with higher-resolution data intended to fill in gaps.
	combinedResult := parsedResult.Values

	// Sample the data at the FULL resolution.
	// We clip the timerange so that it's only #{config.FullResolutionOverlap} seconds long.
	// This limits the amount of data to be fetched.
	fullResolutionParsedResult := func() []metricPoint {
		// If an error occurs, we just return nothing. We don't return the error.
		// This is so that errors while fetching the FULL-resolution data don't impact the requested data.
		fullResolutionRequest := request // Copy the request
		if request.Timerange.End()-request.Timerange.Start() > b.config.FullResolutionOverlap*1000 {
			// Clip the timerange
			newTimerange, err := api.NewSnappedTimerange(request.Timerange.End()-b.config.FullResolutionOverlap*1000, request.Timerange.End(), request.Timerange.ResolutionMillis())
			if err != nil {
				log.Infof("FULL resolution data errored while building timerange: %s", err.Error())
				return nil
			}
			fullResolutionRequest.Timerange = newTimerange
		}
		fullResolutionQueryURL, err := b.constructURL(fullResolutionRequest, sampler, ResolutionFull)
		if err != nil {
			log.Infof("FULL resolution data errored while building url: %s", err.Error())
			return nil
		}
		fullResolutionParsedResult, rawResult, err := b.fetch(request, fullResolutionQueryURL)
		rawResults = append(rawResults, rawResult)
		if err != nil {
			log.Infof("FULL resolution data errored while parsing result: %s", err.Error())
			return nil
		}
		// The higher-resolution data will likely overlap with the requested data.
		// This isn't a problem - the requested, higher-resolution data will be downsampled by this code.
		// This downsampling should arrive at the same answer as Blueflood's built-in rollups.
		return fullResolutionParsedResult.Values
	}()

	combinedResult = append(combinedResult, fullResolutionParsedResult...)

	values := processResult(combinedResult, request.Timerange, sampler, queryResolution)
	log.Debugf("Constructed timeseries from result: %v", values)

	if request.UserSpecifiableConfig.IncludeRawData {
		return api.Timeseries{
			Values: values,
			TagSet: request.Metric.TagSet,
			Raw:    rawResults,
		}, nil
	} else {
		return api.Timeseries{
			Values: values,
			TagSet: request.Metric.TagSet,
		}, nil
	}

}