func (p WOFPointInPolygon) LoadPolygonsForFeature(feature *geojson.WOFFeature) ([]*geojson.WOFPolygon, error) {

	id := feature.Id()

	polygons := feature.GeomToPolygons()
	var points int

	for _, pl := range polygons {
		points += pl.CountPoints()
	}

	p.Logger.Debug("%d has %d points", id, points)

	if points >= p.CacheTrigger {

		p.Logger.Debug("caching %d because it has E_EXCESSIVE_POINTS (%d)", id, points)

		var c metrics.Counter
		c = *p.Metrics.CountCacheSet

		evicted := p.Cache.Add(id, polygons)

		if evicted == true {

			cache_size := p.CacheSize
			cache_set := c.Count()

			p.Logger.Warning("starting to push thing out of the cache %d sets on a cache size of %d", cache_set, cache_size)
		}

		go c.Inc(1)
	}

	return polygons, nil
}
// addResultToTile adds the Digests from the DMResults to the tile at the given offset.
func addResultToTile(res *DMResults, tile *tiling.Tile, offset int, counter metrics.Counter) {
	res.ForEach(func(traceID, digest string, params map[string]string) {
		if ext, ok := params["ext"]; !ok || ext != "png" {
			return // Skip non-PNG results they are be converted to PNG by a separate process.
		}

		var trace *types.GoldenTrace
		var ok bool
		needsUpdate := false
		if tr, ok := tile.Traces[traceID]; !ok {
			trace = types.NewGoldenTrace()
			tile.Traces[traceID] = trace
			needsUpdate = true
		} else {
			trace = tr.(*types.GoldenTrace)
			if !util.MapsEqual(params, tile.Traces[traceID].Params()) {
				needsUpdate = true
			}
		}
		trace.Params_ = params

		if needsUpdate {
			// Update the Tile's ParamSet with any new keys or values we see.
			for k, v := range params {
				if _, ok = tile.ParamSet[k]; !ok {
					tile.ParamSet[k] = []string{v}
				} else if !util.In(v, tile.ParamSet[k]) {
					tile.ParamSet[k] = append(tile.ParamSet[k], v)
				}
			}
		}
		trace.Values[offset] = digest
		counter.Inc(1)
	})
}
func (p WOFPointInPolygon) GetByLatLonForPlacetype(lat float64, lon float64, placetype string) ([]*geojson.WOFSpatial, []*WOFPointInPolygonTiming) {

	var c metrics.Counter
	c = *p.Metrics.CountLookups
	go c.Inc(1)

	t := time.Now()

	timings := make([]*WOFPointInPolygonTiming, 0)

	intersects, duration := p.GetIntersectsByLatLon(lat, lon)
	timings = append(timings, NewWOFPointInPolygonTiming("intersects", duration))

	inflated, duration := p.InflateSpatialResults(intersects)
	timings = append(timings, NewWOFPointInPolygonTiming("inflate", duration))

	// See what's going on here? We are filtering by placetype before
	// do a final point-in-poly lookup so we don't try to load country
	// records while only searching for localities

	filtered := make([]*geojson.WOFSpatial, 0)

	if placetype != "" {
		filtered, duration = p.FilterByPlacetype(inflated, placetype)
		timings = append(timings, NewWOFPointInPolygonTiming("filter", duration))
	} else {
		filtered = inflated
	}

	contained, duration := p.EnsureContained(lat, lon, filtered)
	timings = append(timings, NewWOFPointInPolygonTiming("contain", duration))

	d := time.Since(t)

	var tm metrics.Timer
	tm = *p.Metrics.TimeToProcess
	go tm.Update(d)

	ttp := float64(d) / 1e9

	if ttp > 0.5 {
		p.Logger.Warning("time to process %f,%f (%s) exceeds 0.5 seconds: %f", lat, lon, placetype, ttp)

		for _, t := range timings {
			p.Logger.Info("[%s] %f", t.Event, t.Duration)
		}
	}

	return contained, timings
}
Exemple #4
0
// See the ingester.ResultIngester interface.
func (t *TrybotResultIngester) Ingest(_ *ingester.TileTracker, opener ingester.Opener, fileInfo *ingester.ResultsFileLocation, counter metrics.Counter) error {
	r, err := opener()
	if err != nil {
		return fmt.Errorf("Unable to open reader: %s", err)
	}
	dmResults, err := goldingester.ParseDMResultsFromReader(r)
	if err != nil {
		return err
	}

	if _, ok := t.resultsByIssue[dmResults.Issue]; !ok {
		t.resultsByIssue[dmResults.Issue] = NewTryBotResults()
	}

	// Add the entire file to our current knowledge about this issue.
	t.resultsByIssue[dmResults.Issue].update(dmResults.Key, dmResults.Patchset, dmResults.Results, fileInfo.LastUpdated)
	counter.Inc(1)
	glog.Infof("Finished processing file %s.", fileInfo.Name)
	return nil
}
Exemple #5
0
// See the ingester.ResultIngester interface.
func (t *TrybotResultIngester) Ingest(_ *ingester.TileTracker, opener ingester.Opener, fileInfo *ingester.ResultsFileLocation, counter metrics.Counter) error {
	r, err := opener()
	if err != nil {
		return fmt.Errorf("Unable to open reader: %s", err)
	}
	dmResults, err := goldingester.ParseDMResultsFromReader(r)
	if err != nil {
		return err
	}

	dmResults.ForEach(func(key, value string, params map[string]string) {
		if _, ok := t.resultsByIssue[dmResults.Issue]; !ok {
			t.resultsByIssue[dmResults.Issue] = types.NewTryBotResults()
		}
		t.resultsByIssue[dmResults.Issue].Update(key, params[types.PRIMARY_KEY_FIELD], value, params, fileInfo.LastUpdated)
	})

	counter.Inc(1)
	glog.Infof("Finished processing file %s.", fileInfo.Name)
	return nil
}
Exemple #6
0
// addTryData copies the data from the ResultsFileLocation into the TryBotResults.
func addTryData(res *types.TryBotResults, opener ingester.Opener, counter metrics.Counter) {
	r, err := opener()
	if err != nil {
		glog.Errorf("Error opening input reader: %s", err)
	}

	benchData, err := perfingester.ParseBenchDataFromReader(r)
	if err != nil {
		// Don't fall over for a single corrupt file.
		glog.Errorf("Unable to parse trybot data: %s", err)
		return
	}

	// cb is the anonymous closure we'll pass over all the trace values found in benchData.
	cb := func(key string, value float64, params map[string]string) {
		res.Values[key] = value
		counter.Inc(1)
	}

	benchData.ForEach(cb)
}
func (p WOFPointInPolygon) LoadPolygons(wof *geojson.WOFSpatial) ([]*geojson.WOFPolygon, error) {

	id := wof.Id

	cache, ok := p.Cache.Get(id)

	if ok {

		var c metrics.Counter
		c = *p.Metrics.CountCacheHit
		go c.Inc(1)

		polygons := cache.([]*geojson.WOFPolygon)
		return polygons, nil
	}

	var c metrics.Counter
	c = *p.Metrics.CountCacheMiss
	go c.Inc(1)

	abs_path := utils.Id2AbsPath(p.Source, id)
	feature, err := p.LoadGeoJSON(abs_path)

	if err != nil {
		return nil, err
	}

	polygons, poly_err := p.LoadPolygonsForFeature(feature)

	if poly_err != nil {
		return nil, poly_err
	}

	return polygons, nil
}
// addBenchDataToTile adds BenchData to a Tile.
//
// See the description at the top of this file for how the mapping works.
func addBenchDataToTile(benchData *BenchData, tile *tiling.Tile, offset int, counter metrics.Counter) {

	// cb is the anonymous closure we'll pass over all the trace values found in benchData.
	cb := func(key string, value float64, params map[string]string) {
		needsUpdate := false
		var trace *types.PerfTrace
		if tr, ok := tile.Traces[key]; !ok {
			trace = types.NewPerfTrace()
			tile.Traces[key] = trace
			needsUpdate = true
		} else {
			trace = tr.(*types.PerfTrace)
			if !util.MapsEqual(params, tile.Traces[key].Params()) {
				needsUpdate = true
			}
		}
		trace.Params_ = params
		trace.Values[offset] = value
		counter.Inc(1)

		if needsUpdate {
			// Update the Tile's ParamSet with any new keys or values we see.
			//
			// TODO(jcgregorio) Maybe defer this until we are about to Put the Tile
			// back to disk and rebuild ParamSet from scratch over all the Traces.
			for k, v := range params {
				if _, ok := tile.ParamSet[k]; !ok {
					tile.ParamSet[k] = []string{v}
				} else if !util.In(v, tile.ParamSet[k]) {
					tile.ParamSet[k] = append(tile.ParamSet[k], v)
				}
			}
		}
	}

	benchData.ForEach(cb)
}
func (p WOFPointInPolygon) LoadGeoJSON(path string) (*geojson.WOFFeature, error) {

	t := time.Now()

	feature, err := geojson.UnmarshalFile(path)

	d := time.Since(t)

	var tm metrics.Timer
	tm = *p.Metrics.TimeToUnmarshal

	go tm.Update(d)

	if err != nil {
		p.Logger.Error("failed to unmarshal %s, because %s", path, err)
		return nil, err
	}

	var c metrics.Counter
	c = *p.Metrics.CountUnmarshal
	go c.Inc(1)

	return feature, err
}
Exemple #10
0
func dumpCounter(name string, metric gometrics.Counter) {
	logger.WithFields(logrus.Fields{
		"name":  name,
		"count": metric.Count(),
	}).Info()
}
Exemple #11
0
// AddCounterMetric Adds various metrics based on a Counter, according to the supplied
// MetricConfig
func AddCounterMetric(plugin *newrelic.Plugin, counter metrics.Counter, config MetricConfig) {
	if config.Count {
		plugin.AddMetric(newrelic.NewMetric(config.Name+"/Count", config.Unit, func() (float64, error) { return float64(counter.Snapshot().Count()), nil }))
	}
}
func (r *reporter) reportCounter(name string, val metrics.Counter) {
	r.reportMetric(name, Int, val.Count())
}