func TestAddEngineStatsPartial(t *testing.T) { engine := &Engine{ ClientConns: 0, ClientActive: 0, QueriesPerSec: 0, ReadsPerSec: 0, WritesPerSec: 0, } var acc testutil.Accumulator keys := []string{ "active_clients", "clients", "queries_per_sec", "read_docs_per_sec", "written_docs_per_sec", } missing_keys := []string{ "total_queries", "total_reads", "total_writes", } engine.AddEngineStats(keys, &acc, tags) for _, metric := range missing_keys { assert.False(t, acc.HasIntField("rethinkdb", metric)) } }
func TestMemcachedGeneratesMetrics(t *testing.T) { if testing.Short() { t.Skip("Skipping integration test in short mode") } m := &Memcached{ Servers: []string{testutil.GetLocalHost()}, } var acc testutil.Accumulator err := m.Gather(&acc) require.NoError(t, err) intMetrics := []string{"get_hits", "get_misses", "evictions", "limit_maxbytes", "bytes", "uptime", "curr_items", "total_items", "curr_connections", "total_connections", "connection_structures", "cmd_get", "cmd_set", "delete_hits", "delete_misses", "incr_hits", "incr_misses", "decr_hits", "decr_misses", "cas_hits", "cas_misses", "evictions", "bytes_read", "bytes_written", "threads", "conn_yields"} for _, metric := range intMetrics { assert.True(t, acc.HasIntField("memcached", metric), metric) } }
func testMain(t *testing.T, code string, endpoint string, serverType ServerType) { // Build the fake snmpwalk for test src := makeFakeSNMPSrc(code) defer os.Remove(src) buildFakeSNMPCmd(src) defer os.Remove("./snmpwalk") envPathOrigin := os.Getenv("PATH") // Refer to the fake snmpwalk os.Setenv("PATH", ".") defer os.Setenv("PATH", envPathOrigin) l := &LeoFS{ Servers: []string{endpoint}, } var acc testutil.Accumulator acc.SetDebug(true) err := l.Gather(&acc) require.NoError(t, err) floatMetrics := KeyMapping[serverType] for _, metric := range floatMetrics { assert.True(t, acc.HasFloatField("leofs", metric), metric) } }
func TestAerospikeStatistics(t *testing.T) { if testing.Short() { t.Skip("Skipping integration test in short mode") } a := &Aerospike{ Servers: []string{testutil.GetLocalHost() + ":3000"}, } var acc testutil.Accumulator err := a.Gather(&acc) require.NoError(t, err) // Only use a few of the metrics asMetrics := []string{ "transactions", "stat_write_errs", "stat_read_reqs", "stat_write_reqs", } for _, metric := range asMetrics { assert.True(t, acc.HasIntField("aerospike", metric), metric) } }
func TestAddNonReplStats(t *testing.T) { d := NewMongodbData( &StatLine{ StorageEngine: "", Time: time.Now(), Insert: 0, Query: 0, Update: 0, Delete: 0, GetMore: 0, Command: 0, Flushes: 0, Virtual: 0, Resident: 0, QueuedReaders: 0, QueuedWriters: 0, ActiveReaders: 0, ActiveWriters: 0, NetIn: 0, NetOut: 0, NumConnections: 0, }, tags, ) var acc testutil.Accumulator d.AddDefaultStats() d.flush(&acc) for key, _ := range DefaultStats { assert.True(t, acc.HasIntField("mongodb", key)) } }
func TestAddTableStats(t *testing.T) { var acc testutil.Accumulator err := server.addTableStats(&acc) require.NoError(t, err) for _, metric := range TableTracking { assert.True(t, acc.HasIntValue(metric)) } keys := []string{ "cache_bytes_in_use", "disk_read_bytes_per_sec", "disk_read_bytes_total", "disk_written_bytes_per_sec", "disk_written_bytes_total", "disk_usage_data_bytes", "disk_usage_garbage_bytes", "disk_usage_metadata_bytes", "disk_usage_preallocated_bytes", } for _, metric := range keys { assert.True(t, acc.HasIntValue(metric)) } }
func TestPrometheusGeneratesMetrics(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { fmt.Fprintln(w, sampleTextFormat) })) defer ts.Close() p := &Prometheus{ Urls: []string{ts.URL}, } var acc testutil.Accumulator err := p.Gather(&acc) require.NoError(t, err) expected := []struct { name string value float64 tags map[string]string }{ {"prometheus_go_gc_duration_seconds_count", 7, map[string]string{}}, {"prometheus_go_goroutines", 15, map[string]string{}}, } for _, e := range expected { assert.True(t, acc.HasFloatField(e.name, "value")) } }
func TestZookeeperGeneratesMetrics(t *testing.T) { if testing.Short() { t.Skip("Skipping integration test in short mode") } z := &Zookeeper{ Servers: []string{testutil.GetLocalHost() + ":2181"}, } var acc testutil.Accumulator err := z.Gather(&acc) require.NoError(t, err) intMetrics := []string{ "avg_latency", "max_latency", "min_latency", "packets_received", "packets_sent", "outstanding_requests", "znode_count", "watch_count", "ephemerals_count", "approximate_data_size", "open_file_descriptor_count", "max_file_descriptor_count", } for _, metric := range intMetrics { assert.True(t, acc.HasIntField("zookeeper", metric), metric) } }
// Test response to empty string as response objectgT func TestHttpJsonEmptyResponse(t *testing.T) { httpjson := genMockHttpJson(empty, 200) var acc testutil.Accumulator err := httpjson[0].Gather(&acc) assert.NotNil(t, err) assert.Equal(t, 0, acc.NFields()) }
// Test response to malformed JSON func TestHttpJsonBadJson(t *testing.T) { httpjson := genMockHttpJson(invalidJSON, 200) var acc testutil.Accumulator err := httpjson[0].Gather(&acc) assert.NotNil(t, err) assert.Equal(t, 0, acc.NFields()) }
// Test response to HTTP 405 func TestHttpJsonBadMethod(t *testing.T) { httpjson := genMockHttpJson(validJSON, 200) httpjson[0].Method = "NOT_A_REAL_METHOD" var acc testutil.Accumulator err := httpjson[0].Gather(&acc) assert.NotNil(t, err) assert.Equal(t, 0, acc.NFields()) }
func TestAddMemberStats(t *testing.T) { var acc testutil.Accumulator err := server.addMemberStats(&acc) require.NoError(t, err) for _, metric := range MemberTracking { assert.True(t, acc.HasIntValue(metric)) } }
func TestHaproxyGeneratesMetricsWithoutAuthentication(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { fmt.Fprint(w, csvOutputSample) })) defer ts.Close() r := &haproxy{ Servers: []string{ts.URL}, } var acc testutil.Accumulator err := r.Gather(&acc) require.NoError(t, err) tags := map[string]string{ "proxy": "be_app", "server": ts.Listener.Addr().String(), "sv": "host0", } fields := map[string]interface{}{ "active_servers": uint64(1), "backup_servers": uint64(0), "bin": uint64(510913516), "bout": uint64(2193856571), "check_duration": uint64(10), "cli_abort": uint64(73), "ctime": uint64(2), "downtime": uint64(0), "dresp": uint64(0), "econ": uint64(0), "eresp": uint64(1), "http_response.1xx": uint64(0), "http_response.2xx": uint64(119534), "http_response.3xx": uint64(48051), "http_response.4xx": uint64(2345), "http_response.5xx": uint64(1056), "lbtot": uint64(171013), "qcur": uint64(0), "qmax": uint64(0), "qtime": uint64(0), "rate": uint64(3), "rate_max": uint64(12), "rtime": uint64(312), "scur": uint64(1), "smax": uint64(32), "srv_abort": uint64(1), "stot": uint64(171014), "ttime": uint64(2341), "wredis": uint64(0), "wretr": uint64(1), } acc.AssertContainsTaggedFields(t, "haproxy", fields, tags) }
func TestExecMalformed(t *testing.T) { e := &Exec{ runner: newRunnerMock([]byte(malformedJson), nil), Command: "badcommand arg1", } var acc testutil.Accumulator err := e.Gather(&acc) require.Error(t, err) assert.Equal(t, acc.NFields(), 0, "No new points should have been added") }
func TestCommandError(t *testing.T) { e := &Exec{ runner: newRunnerMock(nil, fmt.Errorf("exit status code 1")), Command: "badcommand", } var acc testutil.Accumulator err := e.Gather(&acc) require.Error(t, err) assert.Equal(t, acc.NFields(), 0, "No new points should have been added") }
// Test that the proper values are ignored or collected func TestHttpJsonOn404(t *testing.T) { jolokia := genJolokiaClientStub(validMultiValueJSON, 404, Servers, []Metric{UsedHeapMetric}) var acc testutil.Accumulator acc.SetDebug(true) err := jolokia.Gather(&acc) assert.Nil(t, err) assert.Equal(t, 0, len(acc.Points)) }
func TestNginxGeneratesMetrics(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { var rsp string if r.URL.Path == "/stub_status" { rsp = sampleResponse } else { panic("Cannot handle request") } fmt.Fprintln(w, rsp) })) defer ts.Close() n := &Nginx{ Urls: []string{fmt.Sprintf("%s/stub_status", ts.URL)}, } var acc testutil.Accumulator err := n.Gather(&acc) require.NoError(t, err) fields := map[string]interface{}{ "active": uint64(585), "accepts": uint64(85340), "handled": uint64(85340), "requests": uint64(35085), "reading": uint64(4), "writing": uint64(135), "waiting": uint64(446), } addr, err := url.Parse(ts.URL) if err != nil { panic(err) } host, port, err := net.SplitHostPort(addr.Host) if err != nil { host = addr.Host if addr.Scheme == "http" { port = "80" } else if addr.Scheme == "https" { port = "443" } else { port = "" } } tags := map[string]string{"server": host, "port": port} acc.AssertContainsTaggedFields(t, "nginx", fields, tags) }
func TestRedis_ParseMetrics(t *testing.T) { var acc testutil.Accumulator tags := map[string]string{"host": "redis.net"} rdr := bufio.NewReader(strings.NewReader(testOutput)) err := gatherInfoOutput(rdr, &acc, tags) require.NoError(t, err) fields := map[string]interface{}{ "uptime": uint64(238), "clients": uint64(1), "used_memory": uint64(1003936), "used_memory_rss": uint64(811008), "used_memory_peak": uint64(1003936), "used_memory_lua": uint64(33792), "rdb_changes_since_last_save": uint64(0), "total_connections_received": uint64(2), "total_commands_processed": uint64(1), "instantaneous_ops_per_sec": uint64(0), "sync_full": uint64(0), "sync_partial_ok": uint64(0), "sync_partial_err": uint64(0), "expired_keys": uint64(0), "evicted_keys": uint64(0), "keyspace_hits": uint64(1), "keyspace_misses": uint64(1), "pubsub_channels": uint64(0), "pubsub_patterns": uint64(0), "latest_fork_usec": uint64(0), "connected_slaves": uint64(0), "master_repl_offset": uint64(0), "repl_backlog_active": uint64(0), "repl_backlog_size": uint64(1048576), "repl_backlog_histlen": uint64(0), "mem_fragmentation_ratio": float64(0.81), "instantaneous_input_kbps": float64(876.16), "instantaneous_output_kbps": float64(3010.23), "used_cpu_sys": float64(0.14), "used_cpu_user": float64(0.05), "used_cpu_sys_children": float64(0.00), "used_cpu_user_children": float64(0.00), "keyspace_hitrate": float64(0.50), } keyspaceFields := map[string]interface{}{ "avg_ttl": uint64(0), "expires": uint64(0), "keys": uint64(2), } acc.AssertContainsTaggedFields(t, "redis", fields, tags) acc.AssertContainsTaggedFields(t, "redis_keyspace", keyspaceFields, tags) }
func TestAddDefaultStats(t *testing.T) { var acc testutil.Accumulator err := server.gatherData(&acc) require.NoError(t, err) time.Sleep(time.Duration(1) * time.Second) // need to call this twice so it can perform the diff err = server.gatherData(&acc) require.NoError(t, err) for key, _ := range DefaultStats { assert.True(t, acc.HasIntValue(key)) } }
// Test that the parser parses kafka messages into points func TestRunParserAndGather(t *testing.T) { k, in := NewTestKafka() defer close(k.done) go k.parser() in <- saramaMsg(testMsg) time.Sleep(time.Millisecond) acc := testutil.Accumulator{} k.Gather(&acc) assert.Equal(t, len(acc.Points), 1) acc.AssertContainsFields(t, "cpu_load_short", map[string]interface{}{"value": float64(23422)}) }
// Test that the proper values are ignored or collected func TestHttpJson200(t *testing.T) { httpjson := genMockHttpJson(validJSON, 200) for _, service := range httpjson { var acc testutil.Accumulator err := service.Gather(&acc) require.NoError(t, err) assert.Equal(t, 4, acc.NFields()) for _, srv := range service.Servers { tags := map[string]string{"server": srv} mname := "httpjson_" + service.Name acc.AssertContainsTaggedFields(t, mname, expectedFields, tags) } } }
func TestGather(t *testing.T) { var acc testutil.Accumulator pid := os.Getpid() file, err := ioutil.TempFile(os.TempDir(), "telegraf") require.NoError(t, err) file.Write([]byte(strconv.Itoa(pid))) file.Close() defer os.Remove(file.Name()) p := Procstat{ PidFile: file.Name(), Prefix: "foo", } p.Gather(&acc) assert.True(t, acc.HasFloatField("procstat", "foo_cpu_time_user")) assert.True(t, acc.HasUIntField("procstat", "foo_memory_vms")) }
func TestMysqlDefaultsToLocal(t *testing.T) { if testing.Short() { t.Skip("Skipping integration test in short mode") } m := &Mysql{ Servers: []string{fmt.Sprintf("root@tcp(%s:3306)/", testutil.GetLocalHost())}, } var acc testutil.Accumulator err := m.Gather(&acc) require.NoError(t, err) assert.True(t, acc.HasMeasurement("mysql")) }
func TestStateTag(t *testing.T) { d := NewMongodbData( &StatLine{ StorageEngine: "", Time: time.Now(), Insert: 0, Query: 0, NodeType: "PRI", }, tags, ) stateTags := make(map[string]string) stateTags["state"] = "PRI" var acc testutil.Accumulator d.AddDefaultStats() d.flush(&acc) fields := map[string]interface{}{ "active_reads": int64(0), "active_writes": int64(0), "commands_per_sec": int64(0), "deletes_per_sec": int64(0), "flushes_per_sec": int64(0), "getmores_per_sec": int64(0), "inserts_per_sec": int64(0), "member_status": "PRI", "net_in_bytes": int64(0), "net_out_bytes": int64(0), "open_connections": int64(0), "queries_per_sec": int64(0), "queued_reads": int64(0), "queued_writes": int64(0), "repl_commands_per_sec": int64(0), "repl_deletes_per_sec": int64(0), "repl_getmores_per_sec": int64(0), "repl_inserts_per_sec": int64(0), "repl_queries_per_sec": int64(0), "repl_updates_per_sec": int64(0), "resident_megabytes": int64(0), "updates_per_sec": int64(0), "vsize_megabytes": int64(0), } acc.AssertContainsTaggedFields(t, "mongodb", fields, stateTags) }
// Test that Gather works on a ping with no transmitted packets, even though the // command returns an error func TestBadPingGather(t *testing.T) { var acc testutil.Accumulator p := Ping{ Urls: []string{"www.amazon.com"}, pingHost: mockErrorHostPinger, } p.Gather(&acc) tags := map[string]string{"url": "www.amazon.com"} fields := map[string]interface{}{ "packets_transmitted": 2, "packets_received": 0, "percent_packet_loss": 100.0, "average_response_ms": 0.0, } acc.AssertContainsTaggedFields(t, "ping", fields, tags) }
// Test that Gather works on a ping with lossy packets func TestLossyPingGather(t *testing.T) { var acc testutil.Accumulator p := Ping{ Urls: []string{"www.google.com"}, pingHost: mockLossyHostPinger, } p.Gather(&acc) tags := map[string]string{"url": "www.google.com"} fields := map[string]interface{}{ "packets_transmitted": 5, "packets_received": 3, "percent_packet_loss": 40.0, "average_response_ms": 44.033, } acc.AssertContainsTaggedFields(t, "ping", fields, tags) }
func TestReadAerospikeStatsNamespace(t *testing.T) { var acc testutil.Accumulator stats := map[string]string{ "stat_write_errs": "12345", "stat_read_reqs": "12345", } readAerospikeStats(stats, &acc, "host1", "test") fields := map[string]interface{}{ "stat_write_errs": int64(12345), "stat_read_reqs": int64(12345), } tags := map[string]string{ "aerospike_host": "host1", "namespace": "test", } acc.AssertContainsTaggedFields(t, "aerospike", fields, tags) }
// Test that the proper values are ignored or collected func TestHttpJson200Tags(t *testing.T) { httpjson := genMockHttpJson(validJSONTags, 200) for _, service := range httpjson { if service.Name == "other_webapp" { var acc testutil.Accumulator err := service.Gather(&acc) require.NoError(t, err) assert.Equal(t, 2, acc.NFields()) for _, srv := range service.Servers { tags := map[string]string{"server": srv, "role": "master", "build": "123"} fields := map[string]interface{}{"value": float64(15)} mname := "httpjson_" + service.Name acc.AssertContainsTaggedFields(t, mname, fields, tags) } } } }
func TestAddWiredTigerStats(t *testing.T) { d := NewMongodbData( &StatLine{ StorageEngine: "wiredTiger", CacheDirtyPercent: 0, CacheUsedPercent: 0, }, tags, ) var acc testutil.Accumulator d.AddDefaultStats() d.flush(&acc) for key, _ := range WiredTigerStats { assert.True(t, acc.HasFloatField("mongodb", key)) } }
func TestExec(t *testing.T) { e := &Exec{ runner: newRunnerMock([]byte(validJson), nil), Command: "testcommand arg1", } var acc testutil.Accumulator err := e.Gather(&acc) require.NoError(t, err) assert.Equal(t, acc.NFields(), 4, "non-numeric measurements should be ignored") fields := map[string]interface{}{ "num_processes": float64(82), "cpu_used": float64(8234), "cpu_free": float64(32), "percent": float64(0.81), } acc.AssertContainsFields(t, "exec", fields) }