Beispiel #1
0
func main() {
	foo_rpc_latency := metrics.CreateHistogram(&metrics.HistogramSpecification{
		Starts:                metrics.EquallySizedBucketsFor(0, 200, 4),
		BucketMaker:           metrics.AccumulatingBucketBuilder(metrics.EvictAndReplaceWith(10, maths.Average), 50),
		ReportablePercentiles: []float64{0.01, 0.05, 0.5, 0.90, 0.99},
	})
	foo_rpc_calls := &metrics.GaugeMetric{}
	bar_rpc_latency := metrics.CreateHistogram(&metrics.HistogramSpecification{
		Starts:                metrics.EquallySizedBucketsFor(0, 200, 4),
		BucketMaker:           metrics.AccumulatingBucketBuilder(metrics.EvictAndReplaceWith(10, maths.Average), 50),
		ReportablePercentiles: []float64{0.01, 0.05, 0.5, 0.90, 0.99},
	})
	bar_rpc_calls := &metrics.GaugeMetric{}
	zed_rpc_latency := metrics.CreateHistogram(&metrics.HistogramSpecification{
		Starts:                metrics.EquallySizedBucketsFor(0, 200, 4),
		BucketMaker:           metrics.AccumulatingBucketBuilder(metrics.EvictAndReplaceWith(10, maths.Average), 50),
		ReportablePercentiles: []float64{0.01, 0.05, 0.5, 0.90, 0.99},
	})
	zed_rpc_calls := &metrics.GaugeMetric{}

	metrics := registry.NewRegistry()
	metrics.Register("rpc_latency_foo_microseconds", foo_rpc_latency)
	metrics.Register("rpc_calls_foo_total", foo_rpc_calls)
	metrics.Register("rpc_latency_bar_microseconds", bar_rpc_latency)
	metrics.Register("rpc_calls_bar_total", bar_rpc_calls)
	metrics.Register("rpc_latency_zed_microseconds", zed_rpc_latency)
	metrics.Register("rpc_calls_zed_total", zed_rpc_calls)

	go func() {
		for {
			foo_rpc_latency.Add(rand.Float64() * 200)
			foo_rpc_calls.Increment()

			bar_rpc_latency.Add((rand.NormFloat64() * 10.0) + 100.0)
			bar_rpc_calls.Increment()

			zed_rpc_latency.Add(rand.ExpFloat64())
			zed_rpc_calls.Increment()

			time.Sleep(100 * time.Millisecond)
		}
	}()

	exporter := metrics.YieldExporter()

	http.Handle("/metrics.json", exporter)
	http.ListenAndServe(":8080", nil)
}
	contentType     = "Content-Type"
	jsonSuffix      = ".json"
)

/*
Boilerplate metrics about the metrics reporting subservice.  These are only
exposed if the DefaultRegistry's exporter is hooked into the HTTP request
handler.
*/

var requestCount *metrics.GaugeMetric = &metrics.GaugeMetric{}
var requestLatencyLogarithmicBuckets []float64 = metrics.LogarithmicSizedBucketsFor(0, 1000)
var requestLatencyEqualBuckets []float64 = metrics.EquallySizedBucketsFor(0, 1000, 10)
var requestLatencyLogarithmicAccumulating *metrics.Histogram = metrics.CreateHistogram(&metrics.HistogramSpecification{
	Starts:                requestLatencyLogarithmicBuckets,
	BucketMaker:           metrics.AccumulatingBucketBuilder(metrics.EvictAndReplaceWith(50, maths.Average), 1000),
	ReportablePercentiles: []float64{0.01, 0.05, 0.5, 0.9, 0.99},
})
var requestLatencyEqualAccumulating *metrics.Histogram = metrics.CreateHistogram(&metrics.HistogramSpecification{
	Starts:                requestLatencyEqualBuckets,
	BucketMaker:           metrics.AccumulatingBucketBuilder(metrics.EvictAndReplaceWith(50, maths.Average), 1000),
	ReportablePercentiles: []float64{0.01, 0.05, 0.5, 0.9, 0.99},
})
var requestLatencyLogarithmicTallying *metrics.Histogram = metrics.CreateHistogram(&metrics.HistogramSpecification{
	Starts:                requestLatencyLogarithmicBuckets,
	BucketMaker:           metrics.TallyingBucketBuilder,
	ReportablePercentiles: []float64{0.01, 0.05, 0.5, 0.9, 0.99},
})
var requestLatencyEqualTallying *metrics.Histogram = metrics.CreateHistogram(&metrics.HistogramSpecification{
	Starts:                requestLatencyEqualBuckets,
	BucketMaker:           metrics.TallyingBucketBuilder,