func (node *MatrixSelector) String() string { vecSelector := &VectorSelector{ Name: node.Name, LabelMatchers: node.LabelMatchers, } offset := "" if node.Offset != time.Duration(0) { offset = fmt.Sprintf(" OFFSET %s", model.Duration(node.Offset)) } return fmt.Sprintf("%s[%s]%s", vecSelector.String(), model.Duration(node.Range), offset) }
func TestNewHTTPCACert(t *testing.T) { server := httptest.NewUnstartedServer( http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", `text/plain; version=0.0.4`) w.Write([]byte{}) }, ), ) server.TLS = newTLSConfig(t) server.StartTLS() defer server.Close() cfg := &config.ScrapeConfig{ ScrapeTimeout: model.Duration(1 * time.Second), TLSConfig: config.TLSConfig{ CAFile: "testdata/ca.cer", }, } c, err := newHTTPClient(cfg) if err != nil { t.Fatal(err) } _, err = c.Get(server.URL) if err != nil { t.Fatal(err) } }
func TestMarathonSDRunAndStop(t *testing.T) { var ( refreshInterval = model.Duration(time.Millisecond * 10) conf = config.MarathonSDConfig{Servers: testServers, RefreshInterval: refreshInterval} ch = make(chan []*config.TargetGroup) ) md, err := NewDiscovery(&conf) if err != nil { t.Fatalf("%s", err) } md.appsClient = func(client *http.Client, url string) (*AppList, error) { return marathonTestAppList(marathonValidLabel, 1), nil } ctx, cancel := context.WithCancel(context.Background()) go func() { for { select { case _, ok := <-ch: if !ok { return } cancel() case <-time.After(md.refreshInterval * 3): cancel() t.Fatalf("Update took too long.") } } }() md.Run(ctx, ch) }
func TestNewHTTPBasicAuth(t *testing.T) { server := httptest.NewServer( http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { username, password, ok := r.BasicAuth() if !(ok && username == "user" && password == "password123") { t.Fatalf("Basic authorization header was not set correctly: expected '%v:%v', got '%v:%v'", "user", "password123", username, password) } }, ), ) defer server.Close() cfg := &config.ScrapeConfig{ ScrapeTimeout: model.Duration(1 * time.Second), BasicAuth: &config.BasicAuth{ Username: "******", Password: "******", }, } c, err := newHTTPClient(cfg) if err != nil { t.Fatal(err) } _, err = c.Get(server.URL) if err != nil { t.Fatal(err) } }
func TestNewHTTPWithBadServerName(t *testing.T) { server := httptest.NewUnstartedServer( http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", `text/plain; version=0.0.4`) w.Write([]byte{}) }, ), ) server.TLS = newTLSConfig("servername", t) server.StartTLS() defer server.Close() cfg := &config.ScrapeConfig{ ScrapeTimeout: model.Duration(1 * time.Second), TLSConfig: config.TLSConfig{ CAFile: caCertPath, ServerName: "badname", }, } c, err := NewHTTPClient(cfg) if err != nil { t.Fatal(err) } _, err = c.Get(server.URL) if err == nil { t.Fatal("Expected error, got nil.") } }
func newTestTarget(targetURL string, deadline time.Duration, baseLabels model.LabelSet) *Target { cfg := &config.ScrapeConfig{ ScrapeTimeout: model.Duration(deadline), } c, _ := newHTTPClient(cfg) t := &Target{ url: &url.URL{ Scheme: "http", Host: strings.TrimLeft(targetURL, "http://"), Path: "/metrics", }, status: &TargetStatus{}, scrapeInterval: 1 * time.Millisecond, httpClient: c, scraperStopping: make(chan struct{}), scraperStopped: make(chan struct{}), } t.baseLabels = model.LabelSet{ model.InstanceLabel: model.LabelValue(t.InstanceIdentifier()), } for baseLabel, baseValue := range baseLabels { t.baseLabels[baseLabel] = baseValue } return t }
func TestNewHTTPBearerTokenFile(t *testing.T) { server := httptest.NewServer( http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { expected := "Bearer 12345" received := r.Header.Get("Authorization") if expected != received { t.Fatalf("Authorization header was not set correctly: expected '%v', got '%v'", expected, received) } }, ), ) defer server.Close() cfg := &config.ScrapeConfig{ ScrapeTimeout: model.Duration(1 * time.Second), BearerTokenFile: "testdata/bearertoken.txt", } c, err := newHTTPClient(cfg) if err != nil { t.Fatal(err) } _, err = c.Get(server.URL) if err != nil { t.Fatal(err) } }
func TestNewHTTPClientCert(t *testing.T) { server := httptest.NewUnstartedServer( http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", `text/plain; version=0.0.4`) w.Write([]byte{}) }, ), ) tlsConfig := newTLSConfig(t) tlsConfig.ClientAuth = tls.RequireAndVerifyClientCert tlsConfig.ClientCAs = tlsConfig.RootCAs tlsConfig.BuildNameToCertificate() server.TLS = tlsConfig server.StartTLS() defer server.Close() cfg := &config.ScrapeConfig{ ScrapeTimeout: model.Duration(1 * time.Second), TLSConfig: config.TLSConfig{ CAFile: "testdata/ca.cer", CertFile: "testdata/client.cer", KeyFile: "testdata/client.key", }, } c, err := newHTTPClient(cfg) if err != nil { t.Fatal(err) } _, err = c.Get(server.URL) if err != nil { t.Fatal(err) } }
func TestURLParams(t *testing.T) { server := httptest.NewServer( http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", `text/plain; version=0.0.4`) w.Write([]byte{}) r.ParseForm() if r.Form["foo"][0] != "bar" { t.Fatalf("URL parameter 'foo' had unexpected first value '%v'", r.Form["foo"][0]) } if r.Form["foo"][1] != "baz" { t.Fatalf("URL parameter 'foo' had unexpected second value '%v'", r.Form["foo"][1]) } }, ), ) defer server.Close() serverURL, err := url.Parse(server.URL) if err != nil { t.Fatal(err) } target, err := NewTarget( &config.ScrapeConfig{ JobName: "test_job1", ScrapeInterval: model.Duration(1 * time.Minute), ScrapeTimeout: model.Duration(1 * time.Second), Scheme: serverURL.Scheme, Params: url.Values{ "foo": []string{"bar", "baz"}, }, }, model.LabelSet{ model.SchemeLabel: model.LabelValue(serverURL.Scheme), model.AddressLabel: model.LabelValue(serverURL.Host), "__param_foo": "bar", }, nil, ) if err != nil { t.Fatal(err) } app := &collectResultAppender{} if err = target.scrape(app); err != nil { t.Fatal(err) } }
func (r *AlertingRule) String() string { s := fmt.Sprintf("ALERT %s", r.name) s += fmt.Sprintf("\n\tIF %s", r.vector) if r.holdDuration > 0 { s += fmt.Sprintf("\n\tFOR %s", model.Duration(r.holdDuration)) } if len(r.labels) > 0 { s += fmt.Sprintf("\n\tLABELS %s", r.labels) } if len(r.annotations) > 0 { s += fmt.Sprintf("\n\tANNOTATIONS %s", r.annotations) } return s }
func (node *AlertStmt) String() string { s := fmt.Sprintf("ALERT %s", node.Name) s += fmt.Sprintf("\n\tIF %s", node.Expr) if node.Duration > 0 { s += fmt.Sprintf("\n\tFOR %s", model.Duration(node.Duration)) } if len(node.Labels) > 0 { s += fmt.Sprintf("\n\tLABELS %s", node.Labels) } if len(node.Annotations) > 0 { s += fmt.Sprintf("\n\tANNOTATIONS %s", node.Annotations) } return s }
func TestNewTargetWithBadTLSConfig(t *testing.T) { cfg := &config.ScrapeConfig{ ScrapeTimeout: model.Duration(1 * time.Second), TLSConfig: config.TLSConfig{ CAFile: "testdata/nonexistent_ca.cer", CertFile: "testdata/nonexistent_client.cer", KeyFile: "testdata/nonexistent_client.key", }, } _, err := NewTarget(cfg, nil, nil) if err == nil { t.Fatalf("Expected error, got nil.") } }
func newTestTarget(targetURL string, deadline time.Duration, labels model.LabelSet) *Target { labels = labels.Clone() labels[model.SchemeLabel] = "http" labels[model.AddressLabel] = model.LabelValue(strings.TrimLeft(targetURL, "http://")) labels[model.MetricsPathLabel] = "/metrics" t := &Target{ scrapeConfig: &config.ScrapeConfig{ ScrapeInterval: model.Duration(time.Millisecond), ScrapeTimeout: model.Duration(deadline), }, labels: labels, status: &TargetStatus{}, scraperStopping: make(chan struct{}), scraperStopped: make(chan struct{}), } var err error if t.httpClient, err = t.client(); err != nil { panic(err) } return t }
// HTMLSnippet returns an HTML snippet representing this alerting rule. The // resulting snippet is expected to be presented in a <pre> element, so that // line breaks and other returned whitespace is respected. func (r *AlertingRule) HTMLSnippet(pathPrefix string) html_template.HTML { alertMetric := model.Metric{ model.MetricNameLabel: alertMetricName, alertNameLabel: model.LabelValue(r.name), } s := fmt.Sprintf("ALERT <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(alertMetric.String()), r.name) s += fmt.Sprintf("\n IF <a href=%q>%s</a>", pathPrefix+strutil.GraphLinkForExpression(r.vector.String()), r.vector) if r.holdDuration > 0 { s += fmt.Sprintf("\n FOR %s", model.Duration(r.holdDuration)) } if len(r.labels) > 0 { s += fmt.Sprintf("\n LABELS %s", r.labels) } if len(r.annotations) > 0 { s += fmt.Sprintf("\n ANNOTATIONS %s", r.annotations) } return html_template.HTML(s) }
func (node *VectorSelector) String() string { labelStrings := make([]string, 0, len(node.LabelMatchers)-1) for _, matcher := range node.LabelMatchers { // Only include the __name__ label if its no equality matching. if matcher.Name == model.MetricNameLabel && matcher.Type == metric.Equal { continue } labelStrings = append(labelStrings, matcher.String()) } offset := "" if node.Offset != time.Duration(0) { offset = fmt.Sprintf(" OFFSET %s", model.Duration(node.Offset)) } if len(labelStrings) == 0 { return fmt.Sprintf("%s%s", node.Name, offset) } sort.Strings(labelStrings) return fmt.Sprintf("%s{%s}%s", node.Name, strings.Join(labelStrings, ","), offset) }
return nil, err } resolveFilepaths(filepath.Dir(filename), cfg) return cfg, nil } // The defaults applied before parsing the respective config sections. var ( // DefaultConfig is the default top-level configuration. DefaultConfig = Config{ GlobalConfig: DefaultGlobalConfig, } // DefaultGlobalConfig is the default global configuration. DefaultGlobalConfig = GlobalConfig{ ScrapeInterval: model.Duration(1 * time.Minute), ScrapeTimeout: model.Duration(10 * time.Second), EvaluationInterval: model.Duration(1 * time.Minute), } // DefaultScrapeConfig is the default scrape configuration. DefaultScrapeConfig = ScrapeConfig{ // ScrapeTimeout and ScrapeInterval default to the // configured globals. MetricsPath: "/metrics", Scheme: "http", HonorLabels: false, } // DefaultRelabelConfig is the default Relabel configuration. DefaultRelabelConfig = RelabelConfig{
func TestTargetManagerConfigUpdate(t *testing.T) { testJob1 := &config.ScrapeConfig{ JobName: "test_job1", ScrapeInterval: model.Duration(1 * time.Minute), Params: url.Values{ "testParam": []string{"paramValue", "secondValue"}, }, TargetGroups: []*config.TargetGroup{{ Targets: []model.LabelSet{ {model.AddressLabel: "example.org:80"}, {model.AddressLabel: "example.com"}, }, }}, RelabelConfigs: []*config.RelabelConfig{ { // Copy out the URL parameter. SourceLabels: model.LabelNames{"__param_testParam"}, Regex: config.MustNewRegexp("(.*)"), TargetLabel: "testParam", Replacement: "$1", Action: config.RelabelReplace, }, { // The port number is added after relabeling, so // this relabel rule should have no effect. SourceLabels: model.LabelNames{model.AddressLabel}, Regex: config.MustNewRegexp("example.com:80"), Action: config.RelabelDrop, }, }, } testJob2 := &config.ScrapeConfig{ JobName: "test_job2", ScrapeInterval: model.Duration(1 * time.Minute), TargetGroups: []*config.TargetGroup{ { Targets: []model.LabelSet{ {model.AddressLabel: "example.org:8080"}, {model.AddressLabel: "example.com:8081"}, }, Labels: model.LabelSet{ "foo": "bar", "boom": "box", }, }, { Targets: []model.LabelSet{ {model.AddressLabel: "test.com:1234"}, }, }, { Targets: []model.LabelSet{ {model.AddressLabel: "test.com:1235"}, }, Labels: model.LabelSet{"instance": "fixed"}, }, }, RelabelConfigs: []*config.RelabelConfig{ { SourceLabels: model.LabelNames{model.AddressLabel}, Regex: config.MustNewRegexp(`test\.(.*?):(.*)`), Replacement: "foo.${1}:${2}", TargetLabel: model.AddressLabel, Action: config.RelabelReplace, }, { // Add a new label for example.* targets. SourceLabels: model.LabelNames{model.AddressLabel, "boom", "foo"}, Regex: config.MustNewRegexp("example.*?-b([a-z-]+)r"), TargetLabel: "new", Replacement: "$1", Separator: "-", Action: config.RelabelReplace, }, { // Drop an existing label. SourceLabels: model.LabelNames{"boom"}, Regex: config.MustNewRegexp(".*"), TargetLabel: "boom", Replacement: "", Action: config.RelabelReplace, }, }, } // Test that targets without host:port addresses are dropped. testJob3 := &config.ScrapeConfig{ JobName: "test_job1", ScrapeInterval: model.Duration(1 * time.Minute), TargetGroups: []*config.TargetGroup{{ Targets: []model.LabelSet{ {model.AddressLabel: "example.net:80"}, }, }}, RelabelConfigs: []*config.RelabelConfig{ { SourceLabels: model.LabelNames{model.AddressLabel}, Regex: config.MustNewRegexp("(.*)"), TargetLabel: "__address__", Replacement: "http://$1", Action: config.RelabelReplace, }, }, } sequence := []struct { scrapeConfigs []*config.ScrapeConfig expected map[string][]model.LabelSet }{ { scrapeConfigs: []*config.ScrapeConfig{testJob1}, expected: map[string][]model.LabelSet{ "test_job1:static:0:0": { {model.JobLabel: "test_job1", model.InstanceLabel: "example.org:80", "testParam": "paramValue", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "example.org:80", model.ParamLabelPrefix + "testParam": "paramValue"}, {model.JobLabel: "test_job1", model.InstanceLabel: "example.com:80", "testParam": "paramValue", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "example.com:80", model.ParamLabelPrefix + "testParam": "paramValue"}, }, }, }, { scrapeConfigs: []*config.ScrapeConfig{testJob1}, expected: map[string][]model.LabelSet{ "test_job1:static:0:0": { {model.JobLabel: "test_job1", model.InstanceLabel: "example.org:80", "testParam": "paramValue", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "example.org:80", model.ParamLabelPrefix + "testParam": "paramValue"}, {model.JobLabel: "test_job1", model.InstanceLabel: "example.com:80", "testParam": "paramValue", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "example.com:80", model.ParamLabelPrefix + "testParam": "paramValue"}, }, }, }, { scrapeConfigs: []*config.ScrapeConfig{testJob1, testJob2}, expected: map[string][]model.LabelSet{ "test_job1:static:0:0": { {model.JobLabel: "test_job1", model.InstanceLabel: "example.org:80", "testParam": "paramValue", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "example.org:80", model.ParamLabelPrefix + "testParam": "paramValue"}, {model.JobLabel: "test_job1", model.InstanceLabel: "example.com:80", "testParam": "paramValue", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "example.com:80", model.ParamLabelPrefix + "testParam": "paramValue"}, }, "test_job2:static:0:0": { {model.JobLabel: "test_job2", model.InstanceLabel: "example.org:8080", "foo": "bar", "new": "ox-ba", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "example.org:8080"}, {model.JobLabel: "test_job2", model.InstanceLabel: "example.com:8081", "foo": "bar", "new": "ox-ba", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "example.com:8081"}, }, "test_job2:static:0:1": { {model.JobLabel: "test_job2", model.InstanceLabel: "foo.com:1234", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "foo.com:1234"}, }, "test_job2:static:0:2": { {model.JobLabel: "test_job2", model.InstanceLabel: "fixed", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "foo.com:1235"}, }, }, }, { scrapeConfigs: []*config.ScrapeConfig{}, expected: map[string][]model.LabelSet{}, }, { scrapeConfigs: []*config.ScrapeConfig{testJob2}, expected: map[string][]model.LabelSet{ "test_job2:static:0:0": { {model.JobLabel: "test_job2", model.InstanceLabel: "example.org:8080", "foo": "bar", "new": "ox-ba", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "example.org:8080"}, {model.JobLabel: "test_job2", model.InstanceLabel: "example.com:8081", "foo": "bar", "new": "ox-ba", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "example.com:8081"}, }, "test_job2:static:0:1": { {model.JobLabel: "test_job2", model.InstanceLabel: "foo.com:1234", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "foo.com:1234"}, }, "test_job2:static:0:2": { {model.JobLabel: "test_job2", model.InstanceLabel: "fixed", model.SchemeLabel: "", model.MetricsPathLabel: "", model.AddressLabel: "foo.com:1235"}, }, }, }, { scrapeConfigs: []*config.ScrapeConfig{testJob3}, expected: map[string][]model.LabelSet{}, }, } conf := &config.Config{} *conf = config.DefaultConfig targetManager := NewTargetManager(nopAppender{}) targetManager.ApplyConfig(conf) targetManager.Run() defer targetManager.Stop() for i, step := range sequence { conf.ScrapeConfigs = step.scrapeConfigs targetManager.ApplyConfig(conf) time.Sleep(50 * time.Millisecond) if len(targetManager.targets) != len(step.expected) { t.Fatalf("step %d: sources mismatch: expected %v, got %v", i, step.expected, targetManager.targets) } for source, actTargets := range targetManager.targets { expTargets, ok := step.expected[source] if !ok { t.Fatalf("step %d: unexpected source %q: %v", i, source, actTargets) } for _, expt := range expTargets { found := false for _, actt := range actTargets { if reflect.DeepEqual(expt, actt.fullLabels()) { found = true break } } if !found { t.Errorf("step %d: expected target %v for %q not found in actual targets", i, expt, source) } } } } }
import ( "encoding/json" "io/ioutil" "net/url" "reflect" "strings" "testing" "time" "github.com/prometheus/common/model" "gopkg.in/yaml.v2" ) var expectedConf = &Config{ GlobalConfig: GlobalConfig{ ScrapeInterval: model.Duration(15 * time.Second), ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout, EvaluationInterval: model.Duration(30 * time.Second), ExternalLabels: model.LabelSet{ "monitor": "codelab", "foo": "bar", }, }, RuleFiles: []string{ "testdata/first.rules", "/absolute/second.rules", "testdata/my/*.rules", },
func TestTargetManagerChan(t *testing.T) { testJob1 := &config.ScrapeConfig{ JobName: "test_job1", ScrapeInterval: model.Duration(1 * time.Minute), TargetGroups: []*config.TargetGroup{{ Targets: []model.LabelSet{ {model.AddressLabel: "example.org:80"}, {model.AddressLabel: "example.com:80"}, }, }}, } prov1 := &fakeTargetProvider{ sources: []string{"src1", "src2"}, update: make(chan *config.TargetGroup), } targetManager := &TargetManager{ sampleAppender: nopAppender{}, providers: map[*config.ScrapeConfig][]TargetProvider{ testJob1: {prov1}, }, targets: make(map[string][]*Target), } go targetManager.Run() defer targetManager.Stop() sequence := []struct { tgroup *config.TargetGroup expected map[string][]model.LabelSet }{ { tgroup: &config.TargetGroup{ Source: "src1", Targets: []model.LabelSet{ {model.AddressLabel: "test-1:1234"}, {model.AddressLabel: "test-2:1234", "label": "set"}, {model.AddressLabel: "test-3:1234"}, }, }, expected: map[string][]model.LabelSet{ "src1": { {model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1234"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-2:1234", "label": "set"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1234"}, }, }, }, { tgroup: &config.TargetGroup{ Source: "src2", Targets: []model.LabelSet{ {model.AddressLabel: "test-1:1235"}, {model.AddressLabel: "test-2:1235"}, {model.AddressLabel: "test-3:1235"}, }, Labels: model.LabelSet{"group": "label"}, }, expected: map[string][]model.LabelSet{ "src1": { {model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1234"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-2:1234", "label": "set"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1234"}, }, "src2": { {model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1235", "group": "label"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-2:1235", "group": "label"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1235", "group": "label"}, }, }, }, { tgroup: &config.TargetGroup{ Source: "src2", Targets: []model.LabelSet{}, }, expected: map[string][]model.LabelSet{ "src1": { {model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1234"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-2:1234", "label": "set"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1234"}, }, }, }, { tgroup: &config.TargetGroup{ Source: "src1", Targets: []model.LabelSet{ {model.AddressLabel: "test-1:1234", "added": "label"}, {model.AddressLabel: "test-3:1234"}, {model.AddressLabel: "test-4:1234", "fancy": "label"}, }, }, expected: map[string][]model.LabelSet{ "src1": { {model.JobLabel: "test_job1", model.InstanceLabel: "test-1:1234", "added": "label"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-3:1234"}, {model.JobLabel: "test_job1", model.InstanceLabel: "test-4:1234", "fancy": "label"}, }, }, }, } for i, step := range sequence { prov1.update <- step.tgroup time.Sleep(20 * time.Millisecond) if len(targetManager.targets) != len(step.expected) { t.Fatalf("step %d: sources mismatch %v, %v", i, targetManager.targets, step.expected) } for source, actTargets := range targetManager.targets { expTargets, ok := step.expected[source] if !ok { t.Fatalf("step %d: unexpected source %q: %v", i, source, actTargets) } for _, expt := range expTargets { found := false for _, actt := range actTargets { if reflect.DeepEqual(expt, actt.BaseLabels()) { found = true break } } if !found { t.Errorf("step %d: expected target %v not found in actual targets", i, expt) } } } } }
func TestScrapePoolReload(t *testing.T) { var mtx sync.Mutex numTargets := 20 stopped := map[uint64]bool{} reloadCfg := &config.ScrapeConfig{ ScrapeInterval: model.Duration(3 * time.Second), ScrapeTimeout: model.Duration(2 * time.Second), } // On starting to run, new loops created on reload check whether their preceeding // equivalents have been stopped. newLoop := func(ctx context.Context, s scraper, app, reportApp storage.SampleAppender) loop { l := &testLoop{} l.startFunc = func(interval, timeout time.Duration, errc chan<- error) { if interval != 3*time.Second { t.Errorf("Expected scrape interval %d but got %d", 3*time.Second, interval) } if timeout != 2*time.Second { t.Errorf("Expected scrape timeout %d but got %d", 2*time.Second, timeout) } mtx.Lock() if !stopped[s.(*targetScraper).hash()] { t.Errorf("Scrape loop for %v not stopped yet", s.(*targetScraper)) } mtx.Unlock() } return l } sp := &scrapePool{ targets: map[uint64]*Target{}, loops: map[uint64]loop{}, newLoop: newLoop, } // Reloading a scrape pool with a new scrape configuration must stop all scrape // loops and start new ones. A new loop must not be started before the preceeding // one terminated. for i := 0; i < numTargets; i++ { t := &Target{ labels: model.LabelSet{ model.AddressLabel: model.LabelValue(fmt.Sprintf("example.com:%d", i)), }, } l := &testLoop{} l.stopFunc = func() { time.Sleep(time.Duration(i*20) * time.Millisecond) mtx.Lock() stopped[t.hash()] = true mtx.Unlock() } sp.targets[t.hash()] = t sp.loops[t.hash()] = l } done := make(chan struct{}) beforeTargets := map[uint64]*Target{} for h, t := range sp.targets { beforeTargets[h] = t } reloadTime := time.Now() go func() { sp.reload(reloadCfg) close(done) }() select { case <-time.After(5 * time.Second): t.Fatalf("scrapeLoop.reload() did not return as expected") case <-done: // This should have taken at least as long as the last target slept. if time.Since(reloadTime) < time.Duration(numTargets*20)*time.Millisecond { t.Fatalf("scrapeLoop.stop() exited before all targets stopped") } } mtx.Lock() if len(stopped) != numTargets { t.Fatalf("Expected 20 stopped loops, got %d", len(stopped)) } mtx.Unlock() if !reflect.DeepEqual(sp.targets, beforeTargets) { t.Fatalf("Reloading affected target states unexpectedly") } if len(sp.loops) != numTargets { t.Fatalf("Expected %d loops after reload but got %d", numTargets, len(sp.loops)) } }
// references a receiver not in the given map. func checkReceiver(r *Route, receivers map[string]struct{}) error { if _, ok := receivers[r.Receiver]; !ok { return fmt.Errorf("Undefined receiver %q used in route", r.Receiver) } for _, sr := range r.Routes { if err := checkReceiver(sr, receivers); err != nil { return err } } return nil } // DefaultGlobalConfig provides global default values. var DefaultGlobalConfig = GlobalConfig{ ResolveTimeout: model.Duration(5 * time.Minute), PagerdutyURL: "https://events.pagerduty.com/generic/2010-04-15/create_event.json", HipchatURL: "https://api.hipchat.com/", OpsGenieAPIHost: "https://api.opsgenie.com/", } // GlobalConfig defines configuration parameters that are valid globally // unless overwritten. type GlobalConfig struct { // ResolveTimeout is the time after which an alert is declared resolved // if it has not been updated. ResolveTimeout model.Duration `yaml:"resolve_timeout"` SMTPFrom string `yaml:"smtp_from"` SMTPSmarthost string `yaml:"smtp_smarthost"`
func testFileSD(t *testing.T, ext string) { // As interval refreshing is more of a fallback, we only want to test // whether file watches work as expected. var conf config.FileSDConfig conf.Files = []string{"fixtures/_*" + ext} conf.RefreshInterval = model.Duration(1 * time.Hour) var ( fsd = NewDiscovery(&conf) ch = make(chan []*config.TargetGroup) ctx, cancel = context.WithCancel(context.Background()) ) go fsd.Run(ctx, ch) select { case <-time.After(25 * time.Millisecond): // Expected. case tgs := <-ch: t.Fatalf("Unexpected target groups in file discovery: %s", tgs) } newf, err := os.Create("fixtures/_test" + ext) if err != nil { t.Fatal(err) } defer newf.Close() f, err := os.Open("fixtures/target_groups" + ext) if err != nil { t.Fatal(err) } defer f.Close() _, err = io.Copy(newf, f) if err != nil { t.Fatal(err) } newf.Close() timeout := time.After(15 * time.Second) // The files contain two target groups. retry: for { select { case <-timeout: t.Fatalf("Expected new target group but got none") case tgs := <-ch: if len(tgs) != 2 { continue retry // Potentially a partial write, just retry. } tg := tgs[0] if _, ok := tg.Labels["foo"]; !ok { t.Fatalf("Label not parsed") } if tg.String() != fmt.Sprintf("fixtures/_test%s:0", ext) { t.Fatalf("Unexpected target group %s", tg) } tg = tgs[1] if tg.String() != fmt.Sprintf("fixtures/_test%s:1", ext) { t.Fatalf("Unexpected target groups %s", tg) } break retry } } // Based on unknown circumstances, sometimes fsnotify will trigger more events in // some runs (which might be empty, chains of different operations etc.). // We have to drain those (as the target manager would) to avoid deadlocking and must // not try to make sense of it all... drained := make(chan struct{}) go func() { Loop: for { select { case tgs := <-ch: // Below we will change the file to a bad syntax. Previously extracted target // groups must not be deleted via sending an empty target group. if len(tgs[0].Targets) == 0 { t.Errorf("Unexpected empty target groups received: %s", tgs) } case <-time.After(500 * time.Millisecond): break Loop } } close(drained) }() newf, err = os.Create("fixtures/_test.new") if err != nil { t.Fatal(err) } defer os.Remove(newf.Name()) if _, err := newf.Write([]byte("]gibberish\n][")); err != nil { t.Fatal(err) } newf.Close() os.Rename(newf.Name(), "fixtures/_test"+ext) cancel() <-drained }