func ClassifyMetric(metric string, graphiteConverter util.RuleBasedGraphiteConverter) ConversionStatus { graphiteMetric := util.GraphiteMetric(metric) taggedMetric, err := graphiteConverter.ToTaggedName(graphiteMetric) if err != nil { return Unmatched } reversedMetric, err := graphiteConverter.ToGraphiteName(taggedMetric) if err != nil { return ReverseFailed } if reversedMetric != graphiteMetric { return ReverseChanged } return Matched }
// Integration test for the query execution. package query import ( "testing" "time" "github.com/square/metrics/api" "github.com/square/metrics/function" "github.com/square/metrics/optimize" "github.com/square/metrics/testing_support/assert" "github.com/square/metrics/testing_support/mocks" "github.com/square/metrics/util" ) var emptyGraphiteName = util.GraphiteMetric("") func TestCommand_Describe(t *testing.T) { fakeAPI := mocks.NewFakeMetricMetadataAPI() fakeAPI.AddPairWithoutGraphite(api.TaggedMetric{"series_0", api.ParseTagSet("dc=west,env=production,host=a")}, emptyGraphiteName) fakeAPI.AddPairWithoutGraphite(api.TaggedMetric{"series_0", api.ParseTagSet("dc=west,env=staging,host=b")}, emptyGraphiteName) fakeAPI.AddPairWithoutGraphite(api.TaggedMetric{"series_0", api.ParseTagSet("dc=east,env=production,host=c")}, emptyGraphiteName) fakeAPI.AddPairWithoutGraphite(api.TaggedMetric{"series_0", api.ParseTagSet("dc=east,env=staging,host=d")}, emptyGraphiteName) for _, test := range []struct { query string metricmetadata api.MetricMetadataAPI expected map[string][]string }{ {"describe series_0", fakeAPI, map[string][]string{"dc": {"east", "west"}, "env": {"production", "staging"}, "host": {"a", "b", "c", "d"}}}, {"describe`series_0`", fakeAPI, map[string][]string{"dc": {"east", "west"}, "env": {"production", "staging"}, "host": {"a", "b", "c", "d"}}},
func TestProfilerIntegration(t *testing.T) { myAPI := mocks.NewFakeMetricMetadataAPI() fakeTimeStorage := mocks.FakeTimeseriesStorageAPI{} // myAPI := fakeAPI{ // tagSets: map[string][]api.TagSet{"A": []api.TagSet{ // {"x": "1", "y": "2"}, // {"x": "2", "y": "2"}, // {"x": "3", "y": "1"}, // }, // "B": []api.TagSet{ // {"q": "foo"}, // {"q": "bar"}, // }, // "C": []api.TagSet{ // {"c": "1"}, // {"c": "2"}, // {"c": "3"}, // {"c": "4"}, // {"c": "5"}, // {"c": "6"}, // }, // }, // } emptyGraphiteName := util.GraphiteMetric("") myAPI.AddPairWithoutGraphite(api.TaggedMetric{"A", api.ParseTagSet("x=1,y=2")}, emptyGraphiteName) myAPI.AddPairWithoutGraphite(api.TaggedMetric{"A", api.ParseTagSet("x=2,y=2")}, emptyGraphiteName) myAPI.AddPairWithoutGraphite(api.TaggedMetric{"A", api.ParseTagSet("x=3,y=1")}, emptyGraphiteName) myAPI.AddPairWithoutGraphite(api.TaggedMetric{"B", api.ParseTagSet("q=foo")}, emptyGraphiteName) myAPI.AddPairWithoutGraphite(api.TaggedMetric{"B", api.ParseTagSet("q=bar")}, emptyGraphiteName) myAPI.AddPairWithoutGraphite(api.TaggedMetric{"C", api.ParseTagSet("c=1")}, emptyGraphiteName) myAPI.AddPairWithoutGraphite(api.TaggedMetric{"C", api.ParseTagSet("c=2")}, emptyGraphiteName) myAPI.AddPairWithoutGraphite(api.TaggedMetric{"C", api.ParseTagSet("c=3")}, emptyGraphiteName) myAPI.AddPairWithoutGraphite(api.TaggedMetric{"C", api.ParseTagSet("c=4")}, emptyGraphiteName) myAPI.AddPairWithoutGraphite(api.TaggedMetric{"C", api.ParseTagSet("c=5")}, emptyGraphiteName) myAPI.AddPairWithoutGraphite(api.TaggedMetric{"C", api.ParseTagSet("c=6")}, emptyGraphiteName) testCases := []struct { query string expected map[string]int }{ { query: "describe all", expected: map[string]int{ "describe all.Execute": 1, "Mock GetAllMetrics": 1, }, }, { query: "select A from 0 to 0", expected: map[string]int{ "select.Execute": 1, "Mock FetchMultipleTimeseries": 1, "Mock GetAllTags": 1, "Mock FetchSingleTimeseries": 3, }, }, { query: "select A+A from 0 to 0", expected: map[string]int{ "select.Execute": 1, "Mock FetchMultipleTimeseries": 2, "Mock GetAllTags": 2, "Mock FetchSingleTimeseries": 6, }, }, { query: "select A+2 from 0 to 0", expected: map[string]int{ "select.Execute": 1, "Mock FetchMultipleTimeseries": 1, "Mock GetAllTags": 1, "Mock FetchSingleTimeseries": 3, }, }, { query: "select A where y = '2' from 0 to 0", expected: map[string]int{ "select.Execute": 1, "Mock FetchMultipleTimeseries": 1, "Mock GetAllTags": 1, "Mock FetchSingleTimeseries": 2, }, }, { query: "describe A", expected: map[string]int{ "describe.Execute": 1, "Mock GetAllTags": 1, }, }, { query: "describe metrics where y='2'", expected: map[string]int{ "describe metrics.Execute": 1, "Mock GetMetricsForTag": 1, }, }, { query: "describe all", expected: map[string]int{ "describe all.Execute": 1, "Mock GetAllMetrics": 1, }, }, } for _, test := range testCases { cmd, err := Parse(test.query) if err != nil { t.Error(err.Error()) continue } profilingCommand, profiler := NewProfilingCommand(cmd) _, err = profilingCommand.Execute(ExecutionContext{ TimeseriesStorageAPI: fakeTimeStorage, MetricMetadataAPI: myAPI, FetchLimit: 10000, Timeout: time.Second * 4, OptimizationConfiguration: optimize.NewOptimizationConfiguration(), }) if err != nil { t.Fatal(err.Error()) } list := profiler.All() counts := map[string]int{} for _, node := range list { counts[node.Name()]++ } if len(test.expected) != len(counts) { t.Errorf("The number of calls doesn't match the expected amount.") t.Errorf("Expected %+v, but got %+v", test.expected, counts) } for name, count := range test.expected { if counts[name] != count { t.Errorf("Expected `%s` to have %d occurrences, but had %d\n", name, count, counts[name]) t.Errorf("Expected: %+v\nBut got: %+v\n", test.expected, counts) break } } } }
func Test_Blueflood(t *testing.T) { timerange, err := api.NewTimerange(12000, 13000, 1000) if err != nil { t.Fatalf("invalid testcase timerange") return } graphite := mocks.FakeGraphiteConverter{ MetricMap: map[util.GraphiteMetric]api.TaggedMetric{ util.GraphiteMetric("some.key.graphite"): api.TaggedMetric{ MetricKey: api.MetricKey("some.key"), TagSet: api.ParseTagSet("tag=value"), }, }, } defaultClientConfig := Config{ BaseUrl: "https://blueflood.url", TenantId: "square", Ttls: make(map[string]int64), Timeout: time.Millisecond, FullResolutionOverlap: 0, GraphiteMetricConverter: &graphite, } // Not really MIN1440, but that's what default TTLs will get with the Timerange we use defaultQueryUrl := "https://blueflood.url/v2.0/square/views/some.key.graphite?from=12000&resolution=MIN1440&select=numPoints%2Caverage&to=14000" for _, test := range []struct { name string metricMap map[util.GraphiteMetric]api.TaggedMetric queryMetric api.TaggedMetric sampleMethod api.SampleMethod timerange api.Timerange clientConfig Config queryUrl string queryResponse string queryResponseCode int queryDelay time.Duration expectedErrorCode api.TimeseriesStorageErrorCode expectedSeriesList api.Timeseries }{ { name: "Success case", queryMetric: api.TaggedMetric{ MetricKey: api.MetricKey("some.key"), TagSet: api.ParseTagSet("tag=value"), }, sampleMethod: api.SampleMean, timerange: timerange, queryUrl: defaultQueryUrl, clientConfig: defaultClientConfig, queryResponse: `{ "unit": "unknown", "values": [ { "numPoints": 1, "timestamp": 12000, "average": 5 }, { "numPoints": 1, "timestamp": 13000, "average": 3 } ], "metadata": { "limit": null, "next_href": null, "count": 2, "marker": null } }`, expectedSeriesList: api.Timeseries{ Values: []float64{5, 3}, TagSet: api.ParseTagSet("tag=value"), }, }, { name: "Failure case - invalid JSON", queryMetric: api.TaggedMetric{ MetricKey: api.MetricKey("some.key"), TagSet: api.ParseTagSet("tag=value"), }, sampleMethod: api.SampleMean, timerange: timerange, clientConfig: defaultClientConfig, queryUrl: defaultQueryUrl, queryResponse: `{invalid}`, expectedErrorCode: api.FetchIOError, }, { name: "Failure case - HTTP error", queryMetric: api.TaggedMetric{ MetricKey: api.MetricKey("some.key"), TagSet: api.ParseTagSet("tag=value"), }, sampleMethod: api.SampleMean, timerange: timerange, clientConfig: defaultClientConfig, queryUrl: defaultQueryUrl, queryResponse: `{}`, queryResponseCode: 400, expectedErrorCode: api.FetchIOError, }, { name: "Failure case - timeout", queryMetric: api.TaggedMetric{ MetricKey: api.MetricKey("some.key"), TagSet: api.ParseTagSet("tag=value"), }, sampleMethod: api.SampleMean, timerange: timerange, clientConfig: defaultClientConfig, queryUrl: defaultQueryUrl, queryResponse: `{}`, queryDelay: 1 * time.Second, expectedErrorCode: api.FetchTimeoutError, }, } { a := assert.New(t).Contextf("%s", test.name) fakeApi := mocks.NewFakeMetricMetadataAPI() for k, v := range test.metricMap { fakeApi.AddPair(v, k, &graphite) } fakeHttpClient := mocks.NewFakeHttpClient() code := test.queryResponseCode if code == 0 { code = http.StatusOK } fakeHttpClient.SetResponse(test.queryUrl, mocks.Response{test.queryResponse, test.queryDelay, code}) b := NewBlueflood(test.clientConfig).(*Blueflood) b.client = fakeHttpClient seriesList, err := b.FetchSingleTimeseries(api.FetchTimeseriesRequest{ Metric: test.queryMetric, SampleMethod: test.sampleMethod, Timerange: test.timerange, MetricMetadata: fakeApi, Cancellable: api.NewCancellable(), }) if test.expectedErrorCode != 0 { if err == nil { a.Errorf("Expected error, but was successful.") continue } berr, ok := err.(api.TimeseriesStorageError) if !ok { a.Errorf("Failed to cast error to TimeseriesStorageError") continue } a.Eq(berr.Code, test.expectedErrorCode) } else { if err != nil { a.CheckError(err) continue } a.Eq(seriesList, test.expectedSeriesList) } } }
func TestFullResolutionDataFilling(t *testing.T) { graphite := mocks.FakeGraphiteConverter{ MetricMap: map[util.GraphiteMetric]api.TaggedMetric{ util.GraphiteMetric("some.key.value"): api.TaggedMetric{ MetricKey: api.MetricKey("some.key"), TagSet: api.ParseTagSet("tag=value"), }, }, } fakeApi := mocks.NewFakeMetricMetadataAPI() fakeApi.AddPair( api.TaggedMetric{ MetricKey: api.MetricKey("some.key"), TagSet: api.ParseTagSet("tag=value"), }, util.GraphiteMetric("some.key.value"), &graphite, ) now := time.Unix(1438734300000, 0) baseTime := now.Unix() * 1000 timeSource := func() time.Time { return now } queryTimerange, err := api.NewSnappedTimerange( int64(baseTime)-300*1000*10, // 50 minutes ago int64(baseTime)-300*1000*4, // 20 minutes ago 300*1000, // 5 minute resolution ) // The queries have to be relative to "now" defaultClientConfig := Config{ BaseUrl: "https://blueflood.url", TenantId: "square", Ttls: make(map[string]int64), Timeout: time.Millisecond, FullResolutionOverlap: 14400, GraphiteMetricConverter: &graphite, TimeSource: timeSource, } regularQueryURL := fmt.Sprintf( "https://blueflood.url/v2.0/square/views/some.key.value?from=%d&resolution=MIN5&select=numPoints%%2Caverage&to=%d", queryTimerange.Start(), queryTimerange.End()+queryTimerange.ResolutionMillis(), ) regularResponse := fmt.Sprintf(`{ "unit": "unknown", "values": [ { "numPoints": 28, "timestamp": %d, "average": 100 }, { "numPoints": 29, "timestamp": %d, "average": 142 }, { "numPoints": 27, "timestamp": %d, "average": 138 }, { "numPoints": 28, "timestamp": %d, "average": 182 } ], "metadata": { "limit": null, "next_href": null, "count": 4, "marker": null } }`, baseTime-300*1000*10, // 50 minutes ago baseTime-300*1000*9, // 45 minutes ago baseTime-300*1000*8, // 40 minutes ago baseTime-300*1000*7, // 35 minutes ago ) fullResolutionQueryURL := fmt.Sprintf( "https://blueflood.url/v2.0/square/views/some.key.value?from=%d&resolution=FULL&select=numPoints%%2Caverage&to=%d", queryTimerange.Start(), queryTimerange.End()+queryTimerange.ResolutionMillis(), ) fullResolutionResponse := fmt.Sprintf(`{ "unit": "unknown", "values": [ { "numPoints": 28, "timestamp": %d, "average": 13 }, { "numPoints": 29, "timestamp": %d, "average": 16 }, { "numPoints": 27, "timestamp": %d, "average": 19 }, { "numPoints": 28, "timestamp": %d, "average": 27 } ], "metadata": { "limit": null, "next_href": null, "count": 4, "marker": null } }`, baseTime-300*1000*6, // 30m ago baseTime-300*1000*5+17, // 25m ago with random shuffling baseTime-300*1000*4+2821, // 20m ago with random shuffling baseTime-300*1000*3, // 15m ago ) fakeHttpClient := mocks.NewFakeHttpClient() fakeHttpClient.SetResponse(regularQueryURL, mocks.Response{regularResponse, 0, http.StatusOK}) fakeHttpClient.SetResponse(fullResolutionQueryURL, mocks.Response{fullResolutionResponse, 0, http.StatusOK}) defaultClientConfig.HttpClient = fakeHttpClient defaultClientConfig.TimeSource = timeSource b := NewBlueflood(defaultClientConfig) if err != nil { t.Fatalf("timerange error: %s", err.Error()) } seriesList, err := b.FetchSingleTimeseries(api.FetchTimeseriesRequest{ Metric: api.TaggedMetric{ MetricKey: api.MetricKey("some.key"), TagSet: api.ParseTagSet("tag=value"), }, SampleMethod: api.SampleMean, Timerange: queryTimerange, MetricMetadata: fakeApi, Cancellable: api.NewCancellable(), }) if err != nil { t.Fatalf("Expected success, but got error: %s", err.Error()) } expected := []float64{100, 142, 138, 182, 13, 16, 19} if len(seriesList.Values) != len(expected) { t.Fatalf("Expected %+v but got %+v", expected, seriesList) } for i, expect := range expected { if seriesList.Values[i] != expect { t.Fatalf("Expected %+v but got %+v", expected, seriesList) } } }
func TestIncludeRawPayload(t *testing.T) { graphite := mocks.FakeGraphiteConverter{ MetricMap: map[util.GraphiteMetric]api.TaggedMetric{ util.GraphiteMetric("some.key.value"): api.TaggedMetric{ MetricKey: api.MetricKey("some.key"), TagSet: api.ParseTagSet("tag=value"), }, }, } fakeApi := mocks.NewFakeMetricMetadataAPI() fakeApi.AddPair( api.TaggedMetric{ MetricKey: api.MetricKey("some.key"), TagSet: api.ParseTagSet("tag=value"), }, util.GraphiteMetric("some.key.value"), &graphite, ) now := time.Unix(1438734300000, 0) baseTime := now.Unix() * 1000 timeSource := func() time.Time { return now } queryTimerange, err := api.NewSnappedTimerange( int64(baseTime)-300*1000*10, // 50 minutes ago int64(baseTime)-300*1000*4, // 20 minutes ago 300*1000, // 5 minute resolution ) // The queries have to be relative to "now" defaultClientConfig := Config{ BaseUrl: "https://blueflood.url", TenantId: "square", Ttls: make(map[string]int64), Timeout: time.Millisecond, FullResolutionOverlap: 14400, GraphiteMetricConverter: &graphite, TimeSource: timeSource, } regularQueryURL := fmt.Sprintf( "https://blueflood.url/v2.0/square/views/some.key.value?from=%d&resolution=MIN5&select=numPoints%%2Caverage&to=%d", queryTimerange.Start(), queryTimerange.End()+queryTimerange.ResolutionMillis(), ) regularResponse := fmt.Sprintf(`{ "unit": "unknown", "values": [ { "numPoints": 28, "timestamp": %d, "average": 100 }, { "numPoints": 29, "timestamp": %d, "average": 142 }, { "numPoints": 27, "timestamp": %d, "average": 138 }, { "numPoints": 28, "timestamp": %d, "average": 182 } ], "metadata": { "limit": null, "next_href": null, "count": 4, "marker": null } }`, baseTime-300*1000*10, // 50 minutes ago baseTime-300*1000*9, // 45 minutes ago baseTime-300*1000*8, // 40 minutes ago baseTime-300*1000*7, // 35 minutes ago ) fakeHttpClient := mocks.NewFakeHttpClient() fakeHttpClient.SetResponse(regularQueryURL, mocks.Response{regularResponse, 0, http.StatusOK}) // fakeHttpClient.SetResponse(fullResolutionQueryURL, mocks.Response{fullResolutionResponse, 0, http.StatusOK}) defaultClientConfig.HttpClient = fakeHttpClient defaultClientConfig.TimeSource = timeSource b := NewBlueflood(defaultClientConfig) if err != nil { t.Fatalf("timerange error: %s", err.Error()) } userConfig := api.UserSpecifiableConfig{ IncludeRawData: true, } timeSeries, err := b.FetchSingleTimeseries(api.FetchTimeseriesRequest{ Metric: api.TaggedMetric{ MetricKey: api.MetricKey("some.key"), TagSet: api.ParseTagSet("tag=value"), }, SampleMethod: api.SampleMean, Timerange: queryTimerange, MetricMetadata: fakeApi, Cancellable: api.NewCancellable(), UserSpecifiableConfig: userConfig, }) if err != nil { t.Fatalf("Expected success, but got error: %s", err.Error()) } if timeSeries.Raw == nil || string(timeSeries.Raw[0]) != regularResponse { t.Fatalf("Didn't fill in the raw result correctly, got: %s\n", string(timeSeries.Raw[0])) } }