func TestRecordingRuleHTMLSnippet(t *testing.T) { expr, err := promql.ParseExpr(`foo{html="<b>BOLD<b>"}`) if err != nil { t.Fatal(err) } rule := NewRecordingRule("testrule", expr, model.LabelSet{"html": "<b>BOLD</b>"}) const want = `<a href="/test/prefix/graph?g0.expr=testrule&g0.tab=0">testrule</a>{html="<b>BOLD</b>"} = <a href="/test/prefix/graph?g0.expr=foo%7Bhtml%3D%22%3Cb%3EBOLD%3Cb%3E%22%7D&g0.tab=0">foo{html="<b>BOLD<b>"}</a>` got := rule.HTMLSnippet("/test/prefix") if got != want { t.Fatalf("incorrect HTML snippet; want:\n\n%s\n\ngot:\n\n%s", want, got) } }
func TestAlertingRuleHTMLSnippet(t *testing.T) { expr, err := promql.ParseExpr(`foo{html="<b>BOLD<b>"}`) if err != nil { t.Fatal(err) } rule := NewAlertingRule("testrule", expr, 0, model.LabelSet{"html": "<b>BOLD</b>"}, model.LabelSet{"html": "<b>BOLD</b>"}) const want = `ALERT <a href="/test/prefix/graph?g0.expr=ALERTS%7Balertname%3D%22testrule%22%7D&g0.tab=0">testrule</a> IF <a href="/test/prefix/graph?g0.expr=foo%7Bhtml%3D%22%3Cb%3EBOLD%3Cb%3E%22%7D&g0.tab=0">foo{html="<b>BOLD<b>"}</a> LABELS {html="<b>BOLD</b>"} ANNOTATIONS {html="<b>BOLD</b>"}` got := rule.HTMLSnippet("/test/prefix") if got != want { t.Fatalf("incorrect HTML snippet; want:\n\n|%v|\n\ngot:\n\n|%v|", want, got) } }
func TestAlertingRule(t *testing.T) { // Labels in expected output need to be alphabetically sorted. var evalOutputs = [][]string{ { `ALERTS{alertname="HttpRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, `ALERTS{alertname="HttpRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`, }, { `ALERTS{alertname="HttpRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`, `ALERTS{alertname="HttpRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, `ALERTS{alertname="HttpRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`, `ALERTS{alertname="HttpRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`, }, { `ALERTS{alertname="HttpRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`, `ALERTS{alertname="HttpRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`, }, { /* empty */ }, { /* empty */ }, } storage, closer := local.NewTestStorage(t, 1) defer closer.Close() storeMatrix(storage, testMatrix) engine := promql.NewEngine(storage, nil) defer engine.Stop() expr, err := promql.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`) if err != nil { t.Fatalf("Unable to parse alert expression: %s", err) } alertLabels := clientmodel.LabelSet{ "severity": "critical", } rule := NewAlertingRule("HttpRequestRateLow", expr, time.Minute, alertLabels, "summary", "description") for i, expectedLines := range evalOutputs { evalTime := testStartTime.Add(testSampleInterval * time.Duration(i)) res, err := rule.eval(evalTime, engine) if err != nil { t.Fatalf("Error during alerting rule evaluation: %s", err) } actualLines := strings.Split(res.String(), "\n") expectedLines := annotateWithTime(expectedLines, evalTime) if actualLines[0] == "" { actualLines = []string{} } failed := false if len(actualLines) != len(expectedLines) { t.Errorf("%d. Number of samples in expected and actual output don't match (%d vs. %d)", i, len(expectedLines), len(actualLines)) failed = true } for j, expectedSample := range expectedLines { found := false for _, actualSample := range actualLines { if actualSample == expectedSample { found = true } } if !found { t.Errorf("%d.%d. Couldn't find expected sample in output: '%v'", i, j, expectedSample) failed = true } } if failed { t.Fatalf("%d. Expected and actual outputs don't match:\n%v", i, vectorComparisonString(expectedLines, actualLines)) } } }
func TestAlertingRule(t *testing.T) { suite, err := promql.NewTest(t, ` load 5m http_requests{job="app-server", instance="0", group="canary", severity="overwrite-me"} 75 85 95 105 105 95 85 http_requests{job="app-server", instance="1", group="canary", severity="overwrite-me"} 80 90 100 110 120 130 140 `) if err != nil { t.Fatal(err) } defer suite.Close() if err := suite.Run(); err != nil { t.Fatal(err) } expr, err := promql.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`) if err != nil { t.Fatalf("Unable to parse alert expression: %s", err) } rule := NewAlertingRule( "HTTPRequestRateLow", expr, time.Minute, model.LabelSet{"severity": "{{\"c\"}}ritical"}, model.LabelSet{}, ) var tests = []struct { time time.Duration result []string }{ { time: 0, result: []string{ `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`, }, }, { time: 5 * time.Minute, result: []string{ `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`, `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`, `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`, }, }, { time: 10 * time.Minute, result: []string{ `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`, }, }, { time: 15 * time.Minute, result: []string{ `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`, }, }, { time: 20 * time.Minute, result: []string{}, }, { time: 25 * time.Minute, result: []string{ `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, }, }, { time: 30 * time.Minute, result: []string{ `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`, `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, }, }, } for i, test := range tests { evalTime := model.Time(0).Add(test.time) res, err := rule.eval(evalTime, suite.QueryEngine(), "") if err != nil { t.Fatalf("Error during alerting rule evaluation: %s", err) } actual := strings.Split(res.String(), "\n") expected := annotateWithTime(test.result, evalTime) if actual[0] == "" { actual = []string{} } if len(actual) != len(expected) { t.Errorf("%d. Number of samples in expected and actual output don't match (%d vs. %d)", i, len(expected), len(actual)) } for j, expectedSample := range expected { found := false for _, actualSample := range actual { if actualSample == expectedSample { found = true } } if !found { t.Errorf("%d.%d. Couldn't find expected sample in output: '%v'", i, j, expectedSample) } } if t.Failed() { t.Errorf("%d. Expected and actual outputs don't match:", i) t.Fatalf("Expected:\n%v\n----\nActual:\n%v", strings.Join(expected, "\n"), strings.Join(actual, "\n")) } for _, aa := range rule.ActiveAlerts() { if _, ok := aa.Labels[model.MetricNameLabel]; ok { t.Fatalf("%s label set on active alert: %s", model.MetricNameLabel, aa.Labels) } } } }
func TestAlertingRule(t *testing.T) { suite, err := promql.NewTest(t, ` load 5m http_requests{job="api-server", instance="0", group="production"} 0+10x10 http_requests{job="api-server", instance="1", group="production"} 0+20x10 http_requests{job="api-server", instance="0", group="canary"} 0+30x10 http_requests{job="api-server", instance="1", group="canary"} 0+40x10 http_requests{job="app-server", instance="0", group="production"} 0+50x10 http_requests{job="app-server", instance="1", group="production"} 0+60x10 http_requests{job="app-server", instance="0", group="canary"} 0+70x10 http_requests{job="app-server", instance="1", group="canary"} 0+80x10 `) if err != nil { t.Fatal(err) } defer suite.Close() if err := suite.Run(); err != nil { t.Fatal(err) } expr, err := promql.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`) if err != nil { t.Fatalf("Unable to parse alert expression: %s", err) } rule := NewAlertingRule( "HTTPRequestRateLow", expr, time.Minute, model.LabelSet{"severity": "critical"}, "summary", "description", "runbook", ) var tests = []struct { time time.Duration result []string }{ { time: 0, result: []string{ `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`, }, }, { time: 5 * time.Minute, result: []string{ `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`, `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 1 @[%v]`, `ALERTS{alertname="HTTPRequestRateLow", alertstate="pending", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`, `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 1 @[%v]`, }, }, { time: 10 * time.Minute, result: []string{ `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="1", job="app-server", severity="critical"} => 0 @[%v]`, `ALERTS{alertname="HTTPRequestRateLow", alertstate="firing", group="canary", instance="0", job="app-server", severity="critical"} => 0 @[%v]`, }, }, { time: 15 * time.Minute, result: nil, }, { time: 20 * time.Minute, result: nil, }, } for i, test := range tests { evalTime := model.Time(0).Add(test.time) res, err := rule.eval(evalTime, suite.QueryEngine()) if err != nil { t.Fatalf("Error during alerting rule evaluation: %s", err) } actual := strings.Split(res.String(), "\n") expected := annotateWithTime(test.result, evalTime) if actual[0] == "" { actual = []string{} } if len(actual) != len(expected) { t.Errorf("%d. Number of samples in expected and actual output don't match (%d vs. %d)", i, len(expected), len(actual)) } for j, expectedSample := range expected { found := false for _, actualSample := range actual { if actualSample == expectedSample { found = true } } if !found { t.Errorf("%d.%d. Couldn't find expected sample in output: '%v'", i, j, expectedSample) } } if t.Failed() { t.Errorf("%d. Expected and actual outputs don't match:", i) t.Fatalf("Expected:\n%v\n----\nActual:\n%v", strings.Join(expected, "\n"), strings.Join(actual, "\n")) } } }
func TestExpandAlerts(t *testing.T) { u, err := url.Parse("https://www.google.com/") if err != nil { t.Fatal(err) } m := NewManager(&ManagerOptions{ExternalURL: u}) labels := map[model.LabelName]model.LabelValue{ "summary": "This", "description": "is", "runbook": "Sparta!", } alert := &Alert{ Name: "testalert", Labels: labels, State: StateFiring, } expr, err := promql.ParseExpr(`http_requests{group="canary", job="app-server"} < 100`) if err != nil { t.Fatalf("Unable to parse alert expression: %s", err) } rule := NewAlertingRule( "HTTPRequestRateLow", expr, time.Minute, model.LabelSet{"severity": "critical"}, "summary: {{index $labels \"summary\"}}", "description: {{index $labels \"description\"}}", "runbook: {{index $labels \"runbook\"}}", ) time := model.Time(0) // Expected fields expSummary := "summary: This" expDescription := "description: is" expRunbook := "runbook: Sparta!" notification := m.expandAlerts(*alert, rule, time) if notification.Summary != expSummary { t.Fatalf( "Summary expansion failed: '%s' != '%s'", notification.Summary, expSummary, ) } if notification.Description != expDescription { t.Fatalf( "Description expansion failed: '%s' != '%s'", notification.Description, expDescription, ) } if notification.Runbook != expRunbook { t.Fatalf( "Runbook expansion failed: '%s' != '%s'", notification.Runbook, expRunbook, ) } }