func TestPhpFpmGeneratesMetrics_From_Http(t *testing.T) { sv := statServer{} ts := httptest.NewServer(sv) defer ts.Close() r := &phpfpm{ Urls: []string{ts.URL}, } var acc testutil.Accumulator err := r.Gather(&acc) require.NoError(t, err) tags := map[string]string{ "pool": "www", } fields := map[string]interface{}{ "accepted_conn": int64(3), "listen_queue": int64(1), "max_listen_queue": int64(0), "listen_queue_len": int64(0), "idle_processes": int64(1), "active_processes": int64(1), "total_processes": int64(2), "max_active_processes": int64(1), "max_children_reached": int64(2), "slow_requests": int64(1), } acc.AssertContainsTaggedFields(t, "phpfpm", fields, tags) }
func TestHaproxyGeneratesMetricsWithoutAuthentication(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { fmt.Fprint(w, csvOutputSample) })) defer ts.Close() r := &haproxy{ Servers: []string{ts.URL}, } var acc testutil.Accumulator err := r.Gather(&acc) require.NoError(t, err) tags := map[string]string{ "proxy": "be_app", "server": ts.Listener.Addr().String(), "sv": "host0", } fields := map[string]interface{}{ "active_servers": uint64(1), "backup_servers": uint64(0), "bin": uint64(510913516), "bout": uint64(2193856571), "check_duration": uint64(10), "cli_abort": uint64(73), "ctime": uint64(2), "downtime": uint64(0), "dresp": uint64(0), "econ": uint64(0), "eresp": uint64(1), "http_response.1xx": uint64(0), "http_response.2xx": uint64(119534), "http_response.3xx": uint64(48051), "http_response.4xx": uint64(2345), "http_response.5xx": uint64(1056), "lbtot": uint64(171013), "qcur": uint64(0), "qmax": uint64(0), "qtime": uint64(0), "rate": uint64(3), "rate_max": uint64(12), "rtime": uint64(312), "scur": uint64(1), "smax": uint64(32), "srv_abort": uint64(1), "stot": uint64(171014), "ttime": uint64(2341), "wredis": uint64(0), "wretr": uint64(1), } acc.AssertContainsTaggedFields(t, "haproxy", fields, tags) }
func TestNginxGeneratesMetrics(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { var rsp string if r.URL.Path == "/stub_status" { rsp = sampleResponse } else { panic("Cannot handle request") } fmt.Fprintln(w, rsp) })) defer ts.Close() n := &Nginx{ Urls: []string{fmt.Sprintf("%s/stub_status", ts.URL)}, } var acc testutil.Accumulator err := n.Gather(&acc) require.NoError(t, err) fields := map[string]interface{}{ "active": uint64(585), "accepts": uint64(85340), "handled": uint64(85340), "requests": uint64(35085), "reading": uint64(4), "writing": uint64(135), "waiting": uint64(446), } addr, err := url.Parse(ts.URL) if err != nil { panic(err) } host, port, err := net.SplitHostPort(addr.Host) if err != nil { host = addr.Host if addr.Scheme == "http" { port = "80" } else if addr.Scheme == "https" { port = "443" } else { port = "" } } tags := map[string]string{"server": host, "port": port} acc.AssertContainsTaggedFields(t, "nginx", fields, tags) }
func TestRedis_ParseMetrics(t *testing.T) { var acc testutil.Accumulator tags := map[string]string{"host": "redis.net"} rdr := bufio.NewReader(strings.NewReader(testOutput)) err := gatherInfoOutput(rdr, &acc, tags) require.NoError(t, err) fields := map[string]interface{}{ "uptime": uint64(238), "clients": uint64(1), "used_memory": uint64(1003936), "used_memory_rss": uint64(811008), "used_memory_peak": uint64(1003936), "used_memory_lua": uint64(33792), "rdb_changes_since_last_save": uint64(0), "total_connections_received": uint64(2), "total_commands_processed": uint64(1), "instantaneous_ops_per_sec": uint64(0), "sync_full": uint64(0), "sync_partial_ok": uint64(0), "sync_partial_err": uint64(0), "expired_keys": uint64(0), "evicted_keys": uint64(0), "keyspace_hits": uint64(1), "keyspace_misses": uint64(1), "pubsub_channels": uint64(0), "pubsub_patterns": uint64(0), "latest_fork_usec": uint64(0), "connected_slaves": uint64(0), "master_repl_offset": uint64(0), "repl_backlog_active": uint64(0), "repl_backlog_size": uint64(1048576), "repl_backlog_histlen": uint64(0), "mem_fragmentation_ratio": float64(0.81), "instantaneous_input_kbps": float64(876.16), "instantaneous_output_kbps": float64(3010.23), "used_cpu_sys": float64(0.14), "used_cpu_user": float64(0.05), "used_cpu_sys_children": float64(0.00), "used_cpu_user_children": float64(0.00), "keyspace_hitrate": float64(0.50), } keyspaceFields := map[string]interface{}{ "avg_ttl": uint64(0), "expires": uint64(0), "keys": uint64(2), } acc.AssertContainsTaggedFields(t, "redis", fields, tags) acc.AssertContainsTaggedFields(t, "redis_keyspace", keyspaceFields, tags) }
// Test that the proper values are ignored or collected func TestHttpJson200(t *testing.T) { httpjson := genMockHttpJson(validJSON, 200) for _, service := range httpjson { var acc testutil.Accumulator err := service.Gather(&acc) require.NoError(t, err) assert.Equal(t, 4, acc.NFields()) for _, srv := range service.Servers { tags := map[string]string{"server": srv} mname := "httpjson_" + service.Name acc.AssertContainsTaggedFields(t, mname, expectedFields, tags) } } }
// Test that Gather works on a ping with no transmitted packets, even though the // command returns an error func TestBadPingGather(t *testing.T) { var acc testutil.Accumulator p := Ping{ Urls: []string{"www.amazon.com"}, pingHost: mockErrorHostPinger, } p.Gather(&acc) tags := map[string]string{"url": "www.amazon.com"} fields := map[string]interface{}{ "packets_transmitted": 2, "packets_received": 0, "percent_packet_loss": 100.0, } acc.AssertContainsTaggedFields(t, "ping", fields, tags) }
// Test that Gather works on a ping with lossy packets func TestLossyPingGather(t *testing.T) { var acc testutil.Accumulator p := Ping{ Urls: []string{"www.google.com"}, pingHost: mockLossyHostPinger, } p.Gather(&acc) tags := map[string]string{"url": "www.google.com"} fields := map[string]interface{}{ "packets_transmitted": 5, "packets_received": 3, "percent_packet_loss": 40.0, "average_response_ms": 44.033, } acc.AssertContainsTaggedFields(t, "ping", fields, tags) }
func TestStateTag(t *testing.T) { d := NewMongodbData( &StatLine{ StorageEngine: "", Time: time.Now(), Insert: 0, Query: 0, NodeType: "PRI", }, tags, ) stateTags := make(map[string]string) stateTags["state"] = "PRI" var acc testutil.Accumulator d.AddDefaultStats() d.flush(&acc) fields := map[string]interface{}{ "active_reads": int64(0), "active_writes": int64(0), "commands_per_sec": int64(0), "deletes_per_sec": int64(0), "flushes_per_sec": int64(0), "getmores_per_sec": int64(0), "inserts_per_sec": int64(0), "member_status": "PRI", "net_in_bytes": int64(0), "net_out_bytes": int64(0), "open_connections": int64(0), "queries_per_sec": int64(0), "queued_reads": int64(0), "queued_writes": int64(0), "repl_commands_per_sec": int64(0), "repl_deletes_per_sec": int64(0), "repl_getmores_per_sec": int64(0), "repl_inserts_per_sec": int64(0), "repl_queries_per_sec": int64(0), "repl_updates_per_sec": int64(0), "resident_megabytes": int64(0), "updates_per_sec": int64(0), "vsize_megabytes": int64(0), } acc.AssertContainsTaggedFields(t, "mongodb", fields, stateTags) }
func TestReadAerospikeStatsNamespace(t *testing.T) { var acc testutil.Accumulator stats := map[string]string{ "stat_write_errs": "12345", "stat_read_reqs": "12345", } readAerospikeStats(stats, &acc, "host1", "test") fields := map[string]interface{}{ "stat_write_errs": int64(12345), "stat_read_reqs": int64(12345), } tags := map[string]string{ "aerospike_host": "host1", "namespace": "test", } acc.AssertContainsTaggedFields(t, "aerospike", fields, tags) }
// Test that the proper values are ignored or collected func TestHttpJson200Tags(t *testing.T) { httpjson := genMockHttpJson(validJSONTags, 200) for _, service := range httpjson { if service.Name == "other_webapp" { var acc testutil.Accumulator err := service.Gather(&acc) require.NoError(t, err) assert.Equal(t, 2, acc.NFields()) for _, srv := range service.Servers { tags := map[string]string{"server": srv, "role": "master", "build": "123"} fields := map[string]interface{}{"value": float64(15)} mname := "httpjson_" + service.Name acc.AssertContainsTaggedFields(t, mname, fields, tags) } } } }
func TestPhpFpmGeneratesMetrics_From_Socket_Custom_Status_Path(t *testing.T) { // Create a socket in /tmp because we always have write permission. If the // removing of socket fail we won't have junk files around. Cuz when system // restart, it clears out /tmp var randomNumber int64 binary.Read(rand.Reader, binary.LittleEndian, &randomNumber) tcp, err := net.Listen("unix", fmt.Sprintf("/tmp/test-fpm%d.sock", randomNumber)) if err != nil { t.Fatal("Cannot initalize server on port ") } defer tcp.Close() s := statServer{} go fcgi.Serve(tcp, s) r := &phpfpm{ Urls: []string{tcp.Addr().String() + ":custom-status-path"}, } var acc testutil.Accumulator err = r.Gather(&acc) require.NoError(t, err) tags := map[string]string{ "pool": "www", } fields := map[string]interface{}{ "accepted_conn": int64(3), "listen_queue": int64(1), "max_listen_queue": int64(0), "listen_queue_len": int64(0), "idle_processes": int64(1), "active_processes": int64(1), "total_processes": int64(2), "max_active_processes": int64(1), "max_children_reached": int64(2), "slow_requests": int64(1), } acc.AssertContainsTaggedFields(t, "phpfpm", fields, tags) }
func TestGather(t *testing.T) { var acc testutil.Accumulator pa := PuppetAgent{ Location: "last_run_summary.yaml", } pa.Gather(&acc) tags := map[string]string{"location": "last_run_summary.yaml"} fields := map[string]interface{}{ "events_failure": int64(0), "events_total": int64(0), "events_success": int64(0), "resources_failed": int64(0), "resources_scheduled": int64(0), "resources_changed": int64(0), "resources_skipped": int64(0), "resources_total": int64(109), "resources_failedtorestart": int64(0), "resources_restarted": int64(0), "resources_outofsync": int64(0), "changes_total": int64(0), "time_lastrun": int64(1444936531), "version_config": int64(1444936521), "time_user": float64(0.004331), "time_schedule": float64(0.001123), "time_filebucket": float64(0.000353), "time_file": float64(0.441472), "time_exec": float64(0.508123), "time_anchor": float64(0.000555), "time_sshauthorizedkey": float64(0.000764), "time_service": float64(1.807795), "time_package": float64(1.325788), "time_total": float64(8.85354707064819), "time_configretrieval": float64(4.75567007064819), "time_cron": float64(0.000584), "version_puppet": "3.7.5", } acc.AssertContainsTaggedFields(t, "puppetagent", fields, tags) }
// Test that the proper values are ignored or collected func TestHttpJsonMultiValue(t *testing.T) { jolokia := genJolokiaClientStub(validMultiValueJSON, 200, Servers, []Metric{HeapMetric}) var acc testutil.Accumulator err := jolokia.Gather(&acc) assert.Nil(t, err) assert.Equal(t, 1, len(acc.Points)) fields := map[string]interface{}{ "heap_memory_usage_init": 67108864.0, "heap_memory_usage_committed": 456130560.0, "heap_memory_usage_max": 477626368.0, "heap_memory_usage_used": 203288528.0, } tags := map[string]string{ "host": "127.0.0.1", "port": "8080", "server": "as1", } acc.AssertContainsTaggedFields(t, "jolokia", fields, tags) }
func TestPhpFpmGeneratesMetrics_From_Fcgi(t *testing.T) { // Let OS find an available port tcp, err := net.Listen("tcp", "127.0.0.1:0") if err != nil { t.Fatal("Cannot initalize test server") } defer tcp.Close() s := statServer{} go fcgi.Serve(tcp, s) //Now we tested again above server r := &phpfpm{ Urls: []string{"fcgi://" + tcp.Addr().String() + "/status"}, } var acc testutil.Accumulator err = r.Gather(&acc) require.NoError(t, err) tags := map[string]string{ "pool": "www", } fields := map[string]interface{}{ "accepted_conn": int64(3), "listen_queue": int64(1), "max_listen_queue": int64(0), "listen_queue_len": int64(0), "idle_processes": int64(1), "active_processes": int64(1), "total_processes": int64(2), "max_active_processes": int64(1), "max_children_reached": int64(2), "slow_requests": int64(1), } acc.AssertContainsTaggedFields(t, "phpfpm", fields, tags) }
func TestZfsPoolMetrics(t *testing.T) { err := os.MkdirAll(testKstatPath, 0755) require.NoError(t, err) err = os.MkdirAll(testKstatPath+"/HOME", 0755) require.NoError(t, err) err = ioutil.WriteFile(testKstatPath+"/HOME/io", []byte(pool_ioContents), 0644) require.NoError(t, err) err = ioutil.WriteFile(testKstatPath+"/arcstats", []byte(arcstatsContents), 0644) require.NoError(t, err) poolMetrics := getPoolMetrics() var acc testutil.Accumulator z := &Zfs{KstatPath: testKstatPath, KstatMetrics: []string{"arcstats"}} err = z.Gather(&acc) require.NoError(t, err) require.False(t, acc.HasMeasurement("zfs_pool")) acc.Points = nil z = &Zfs{KstatPath: testKstatPath, KstatMetrics: []string{"arcstats"}, PoolMetrics: true} err = z.Gather(&acc) require.NoError(t, err) //one pool, all metrics tags := map[string]string{ "pool": "HOME", } acc.AssertContainsTaggedFields(t, "zfs_pool", poolMetrics, tags) err = os.RemoveAll(os.TempDir() + "/telegraf") require.NoError(t, err) }
func TestPhpFpmGeneratesMetrics(t *testing.T) { //We create a fake server to return test data ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { fmt.Fprint(w, outputSample) })) defer ts.Close() //Now we tested again above server, with our authentication data r := &phpfpm{ Urls: []string{ts.URL}, } var acc testutil.Accumulator err := r.Gather(&acc) require.NoError(t, err) tags := map[string]string{ "url": ts.Listener.Addr().String(), "pool": "www", } fields := map[string]interface{}{ "accepted_conn": int64(3), "listen_queue": int64(1), "max_listen_queue": int64(0), "listen_queue_len": int64(0), "idle_processes": int64(1), "active_processes": int64(1), "total_processes": int64(2), "max_active_processes": int64(1), "max_children_reached": int64(2), "slow_requests": int64(1), } acc.AssertContainsTaggedFields(t, "phpfpm", fields, tags) }
func TestGatherClusterStats(t *testing.T) { es := NewElasticsearch() es.Servers = []string{"http://example.com:9200"} es.ClusterHealth = true es.client.Transport = newTransportMock(http.StatusOK, clusterResponse) var acc testutil.Accumulator require.NoError(t, es.Gather(&acc)) acc.AssertContainsTaggedFields(t, "elasticsearch_cluster_health", clusterHealthExpected, map[string]string{"name": "elasticsearch_telegraf"}) acc.AssertContainsTaggedFields(t, "elasticsearch_indices", v1IndexExpected, map[string]string{"index": "v1"}) acc.AssertContainsTaggedFields(t, "elasticsearch_indices", v2IndexExpected, map[string]string{"index": "v2"}) }
func TestDiskStats(t *testing.T) { var mps MockPS defer mps.AssertExpectations(t) var acc testutil.Accumulator var err error du := []*disk.DiskUsageStat{ { Path: "/", Fstype: "ext4", Total: 128, Free: 23, InodesTotal: 1234, InodesFree: 234, }, { Path: "/home", Fstype: "ext4", Total: 256, Free: 46, InodesTotal: 2468, InodesFree: 468, }, } mps.On("DiskUsage").Return(du, nil) err = (&DiskStats{ps: &mps}).Gather(&acc) require.NoError(t, err) numDiskPoints := acc.NFields() expectedAllDiskPoints := 12 assert.Equal(t, expectedAllDiskPoints, numDiskPoints) tags1 := map[string]string{ "path": "/", "fstype": "ext4", } tags2 := map[string]string{ "path": "/home", "fstype": "ext4", } fields1 := map[string]interface{}{ "total": uint64(128), //tags1) "used": uint64(105), //tags1) "free": uint64(23), //tags1) "inodes_total": uint64(1234), //tags1) "inodes_free": uint64(234), //tags1) "inodes_used": uint64(1000), //tags1) } fields2 := map[string]interface{}{ "total": uint64(256), //tags2) "used": uint64(210), //tags2) "free": uint64(46), //tags2) "inodes_total": uint64(2468), //tags2) "inodes_free": uint64(468), //tags2) "inodes_used": uint64(2000), //tags2) } acc.AssertContainsTaggedFields(t, "disk", fields1, tags1) acc.AssertContainsTaggedFields(t, "disk", fields2, tags2) // We expect 6 more DiskPoints to show up with an explicit match on "/" // and /home not matching the /dev in Mountpoints err = (&DiskStats{ps: &mps, Mountpoints: []string{"/", "/dev"}}).Gather(&acc) assert.Equal(t, expectedAllDiskPoints+6, acc.NFields()) // We should see all the diskpoints as Mountpoints includes both // / and /home err = (&DiskStats{ps: &mps, Mountpoints: []string{"/", "/home"}}).Gather(&acc) assert.Equal(t, 2*expectedAllDiskPoints+6, acc.NFields()) }
func TestDockerStats_GenerateStats(t *testing.T) { var mps MockPS var acc testutil.Accumulator ds := &DockerContainerStat{ Name: "blah", CPU: &cpu.CPUTimesStat{ CPU: "all", User: 3.1, System: 8.2, Idle: 80.1, Nice: 1.3, Iowait: 0.2, Irq: 0.1, Softirq: 0.11, Steal: 0.0001, Guest: 8.1, GuestNice: 0.324, }, Mem: &docker.CgroupMemStat{ ContainerID: "blah", Cache: 1, RSS: 2, RSSHuge: 3, MappedFile: 4, Pgpgin: 5, Pgpgout: 6, Pgfault: 7, Pgmajfault: 8, InactiveAnon: 9, ActiveAnon: 10, InactiveFile: 11, ActiveFile: 12, Unevictable: 13, HierarchicalMemoryLimit: 14, TotalCache: 15, TotalRSS: 16, TotalRSSHuge: 17, TotalMappedFile: 18, TotalPgpgIn: 19, TotalPgpgOut: 20, TotalPgFault: 21, TotalPgMajFault: 22, TotalInactiveAnon: 23, TotalActiveAnon: 24, TotalInactiveFile: 25, TotalActiveFile: 26, TotalUnevictable: 27, }, } mps.On("DockerStat").Return([]*DockerContainerStat{ds}, nil) err := (&DockerStats{&mps}).Gather(&acc) require.NoError(t, err) dockertags := map[string]string{ "name": "blah", "id": "", "command": "", } fields := map[string]interface{}{ "user": 3.1, "system": 8.2, "idle": 80.1, "nice": 1.3, "iowait": 0.2, "irq": 0.1, "softirq": 0.11, "steal": 0.0001, "guest": 8.1, "guest_nice": 0.324, "cache": uint64(1), "rss": uint64(2), "rss_huge": uint64(3), "mapped_file": uint64(4), "swap_in": uint64(5), "swap_out": uint64(6), "page_fault": uint64(7), "page_major_fault": uint64(8), "inactive_anon": uint64(9), "active_anon": uint64(10), "inactive_file": uint64(11), "active_file": uint64(12), "unevictable": uint64(13), "memory_limit": uint64(14), "total_cache": uint64(15), "total_rss": uint64(16), "total_rss_huge": uint64(17), "total_mapped_file": uint64(18), "total_swap_in": uint64(19), "total_swap_out": uint64(20), "total_page_fault": uint64(21), "total_page_major_fault": uint64(22), "total_inactive_anon": uint64(23), "total_active_anon": uint64(24), "total_inactive_file": uint64(25), "total_active_file": uint64(26), "total_unevictable": uint64(27), } acc.AssertContainsTaggedFields(t, "docker", fields, dockertags) }
func TestMailChimpGatherReport(t *testing.T) { ts := httptest.NewServer( http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) fmt.Fprintln(w, sampleReport) }, )) defer ts.Close() u, err := url.ParseRequestURI(ts.URL) require.NoError(t, err) api := &ChimpAPI{ url: u, Debug: true, } m := MailChimp{ api: api, CampaignId: "test", } var acc testutil.Accumulator err = m.Gather(&acc) require.NoError(t, err) tags := make(map[string]string) tags["id"] = "42694e9e57" tags["campaign_title"] = "Freddie's Jokes Vol. 1" fields := map[string]interface{}{ "emails_sent": int(200), "abuse_reports": int(0), "unsubscribed": int(2), "hard_bounces": int(0), "soft_bounces": int(2), "syntax_errors": int(0), "forwards_count": int(0), "forwards_opens": int(0), "opens_total": int(186), "unique_opens": int(100), "clicks_total": int(42), "unique_clicks": int(400), "unique_subscriber_clicks": int(42), "facebook_recipient_likes": int(5), "facebook_unique_likes": int(8), "facebook_likes": int(42), "open_rate": float64(42), "click_rate": float64(42), "industry_open_rate": float64(0.17076777144396), "industry_click_rate": float64(0.027431311866951), "industry_bounce_rate": float64(0.0063767751251474), "industry_unopen_rate": float64(0.82285545343089), "industry_unsub_rate": float64(0.001436957032815), "industry_abuse_rate": float64(0.00021111996110887), "list_stats_sub_rate": float64(10), "list_stats_unsub_rate": float64(20), "list_stats_open_rate": float64(42), "list_stats_click_rate": float64(42), "industry_type": "Social Networks and Online Communities", } acc.AssertContainsTaggedFields(t, "mailchimp", fields, tags) }
func TestElasticsearch(t *testing.T) { es := NewElasticsearch() es.Servers = []string{"http://example.com:9200"} es.client.Transport = newTransportMock(http.StatusOK, statsResponse) var acc testutil.Accumulator if err := es.Gather(&acc); err != nil { t.Fatal(err) } tags := map[string]string{ "cluster_name": "es-testcluster", "node_attribute_master": "true", "node_id": "SDFsfSDFsdfFSDSDfSFDSDF", "node_name": "test.host.com", "node_host": "test", } acc.AssertContainsTaggedFields(t, "elasticsearch_indices", indicesExpected, tags) acc.AssertContainsTaggedFields(t, "elasticsearch_os", osExpected, tags) acc.AssertContainsTaggedFields(t, "elasticsearch_process", processExpected, tags) acc.AssertContainsTaggedFields(t, "elasticsearch_jvm", jvmExpected, tags) acc.AssertContainsTaggedFields(t, "elasticsearch_thread_pool", threadPoolExpected, tags) acc.AssertContainsTaggedFields(t, "elasticsearch_fs", fsExpected, tags) acc.AssertContainsTaggedFields(t, "elasticsearch_transport", transportExpected, tags) acc.AssertContainsTaggedFields(t, "elasticsearch_http", httpExpected, tags) acc.AssertContainsTaggedFields(t, "elasticsearch_breakers", breakersExpected, tags) }
func TestLustre2GeneratesMetrics(t *testing.T) { tempdir := os.TempDir() + "/telegraf/proc/fs/lustre/" ost_name := "OST0001" mdtdir := tempdir + "/mdt/" err := os.MkdirAll(mdtdir+"/"+ost_name, 0755) require.NoError(t, err) osddir := tempdir + "/osd-ldiskfs/" err = os.MkdirAll(osddir+"/"+ost_name, 0755) require.NoError(t, err) obddir := tempdir + "/obdfilter/" err = os.MkdirAll(obddir+"/"+ost_name, 0755) require.NoError(t, err) err = ioutil.WriteFile(mdtdir+"/"+ost_name+"/md_stats", []byte(mdtProcContents), 0644) require.NoError(t, err) err = ioutil.WriteFile(osddir+"/"+ost_name+"/stats", []byte(osdldiskfsProcContents), 0644) require.NoError(t, err) err = ioutil.WriteFile(obddir+"/"+ost_name+"/stats", []byte(obdfilterProcContents), 0644) require.NoError(t, err) m := &Lustre2{ Ost_procfiles: []string{obddir + "/*/stats", osddir + "/*/stats"}, Mds_procfiles: []string{mdtdir + "/*/md_stats"}, } var acc testutil.Accumulator err = m.Gather(&acc) require.NoError(t, err) tags := map[string]string{ "name": ost_name, } fields := map[string]interface{}{ "cache_access": uint64(19047063027), "cache_hit": uint64(7393729777), "cache_miss": uint64(11653333250), "close": uint64(873243496), "crossdir_rename": uint64(369571), "getattr": uint64(1503663097), "getxattr": uint64(6145349681), "link": uint64(445), "mkdir": uint64(705499), "mknod": uint64(349042), "open": uint64(1024577037), "read_bytes": uint64(78026117632000), "read_calls": uint64(203238095), "rename": uint64(629196), "rmdir": uint64(227434), "samedir_rename": uint64(259625), "setattr": uint64(1898364), "setxattr": uint64(83969), "statfs": uint64(2916320), "sync": uint64(434081), "unlink": uint64(3549417), "write_bytes": uint64(15201500833981), "write_calls": uint64(71893382), } acc.AssertContainsTaggedFields(t, "lustre2", fields, tags) err = os.RemoveAll(os.TempDir() + "/telegraf") require.NoError(t, err) }
func TestBcacheGeneratesMetrics(t *testing.T) { err := os.MkdirAll(testBcacheUuidPath, 0755) require.NoError(t, err) err = os.MkdirAll(testBcacheDevPath, 0755) require.NoError(t, err) err = os.MkdirAll(testBcacheBackingDevPath+"/bcache", 0755) require.NoError(t, err) err = os.Symlink(testBcacheBackingDevPath+"/bcache", testBcacheUuidPath+"/bdev0") require.NoError(t, err) err = os.Symlink(testBcacheDevPath, testBcacheUuidPath+"/bdev0/dev") require.NoError(t, err) err = os.MkdirAll(testBcacheUuidPath+"/bdev0/stats_total", 0755) require.NoError(t, err) err = ioutil.WriteFile(testBcacheUuidPath+"/bdev0/dirty_data", []byte(dirty_data), 0644) require.NoError(t, err) err = ioutil.WriteFile(testBcacheUuidPath+"/bdev0/stats_total/bypassed", []byte(bypassed), 0644) require.NoError(t, err) err = ioutil.WriteFile(testBcacheUuidPath+"/bdev0/stats_total/cache_bypass_hits", []byte(cache_bypass_hits), 0644) require.NoError(t, err) err = ioutil.WriteFile(testBcacheUuidPath+"/bdev0/stats_total/cache_bypass_misses", []byte(cache_bypass_misses), 0644) require.NoError(t, err) err = ioutil.WriteFile(testBcacheUuidPath+"/bdev0/stats_total/cache_hit_ratio", []byte(cache_hit_ratio), 0644) require.NoError(t, err) err = ioutil.WriteFile(testBcacheUuidPath+"/bdev0/stats_total/cache_hits", []byte(cache_hits), 0644) require.NoError(t, err) err = ioutil.WriteFile(testBcacheUuidPath+"/bdev0/stats_total/cache_miss_collisions", []byte(cache_miss_collisions), 0644) require.NoError(t, err) err = ioutil.WriteFile(testBcacheUuidPath+"/bdev0/stats_total/cache_misses", []byte(cache_misses), 0644) require.NoError(t, err) err = ioutil.WriteFile(testBcacheUuidPath+"/bdev0/stats_total/cache_readaheads", []byte(cache_readaheads), 0644) require.NoError(t, err) fields := map[string]interface{}{ "dirty_data": uint64(1610612736), "bypassed": uint64(5167704440832), "cache_bypass_hits": uint64(146155333), "cache_bypass_misses": uint64(0), "cache_hit_ratio": uint64(90), "cache_hits": uint64(511469583), "cache_miss_collisions": uint64(157567), "cache_misses": uint64(50616331), "cache_readaheads": uint64(2), } tags := map[string]string{ "backing_dev": "md10", "bcache_dev": "bcache0", } var acc testutil.Accumulator // all devs b := &Bcache{BcachePath: testBcachePath} err = b.Gather(&acc) require.NoError(t, err) acc.AssertContainsTaggedFields(t, "bcache", fields, tags) // one exist dev b = &Bcache{BcachePath: testBcachePath, BcacheDevs: []string{"bcache0"}} err = b.Gather(&acc) require.NoError(t, err) acc.AssertContainsTaggedFields(t, "bcache", fields, tags) err = os.RemoveAll(os.TempDir() + "/telegraf") require.NoError(t, err) }
func TestZfsGeneratesMetrics(t *testing.T) { err := os.MkdirAll(testKstatPath, 0755) require.NoError(t, err) err = os.MkdirAll(testKstatPath+"/HOME", 0755) require.NoError(t, err) err = ioutil.WriteFile(testKstatPath+"/HOME/io", []byte(""), 0644) require.NoError(t, err) err = ioutil.WriteFile(testKstatPath+"/arcstats", []byte(arcstatsContents), 0644) require.NoError(t, err) err = ioutil.WriteFile(testKstatPath+"/zfetchstats", []byte(zfetchstatsContents), 0644) require.NoError(t, err) err = ioutil.WriteFile(testKstatPath+"/vdev_cache_stats", []byte(vdev_cache_statsContents), 0644) require.NoError(t, err) intMetrics := getKstatMetricsAll() var acc testutil.Accumulator //one pool, all metrics tags := map[string]string{ "pools": "HOME", } z := &Zfs{KstatPath: testKstatPath} err = z.Gather(&acc) require.NoError(t, err) acc.AssertContainsTaggedFields(t, "zfs", intMetrics, tags) acc.Points = nil //two pools, all metrics err = os.MkdirAll(testKstatPath+"/STORAGE", 0755) require.NoError(t, err) err = ioutil.WriteFile(testKstatPath+"/STORAGE/io", []byte(""), 0644) require.NoError(t, err) tags = map[string]string{ "pools": "HOME::STORAGE", } z = &Zfs{KstatPath: testKstatPath} acc = testutil.Accumulator{} err = z.Gather(&acc) require.NoError(t, err) acc.AssertContainsTaggedFields(t, "zfs", intMetrics, tags) acc.Points = nil intMetrics = getKstatMetricsArcOnly() //two pools, one metric z = &Zfs{KstatPath: testKstatPath, KstatMetrics: []string{"arcstats"}} acc = testutil.Accumulator{} err = z.Gather(&acc) require.NoError(t, err) acc.AssertContainsTaggedFields(t, "zfs", intMetrics, tags) err = os.RemoveAll(os.TempDir() + "/telegraf") require.NoError(t, err) }
func TestHaproxyGeneratesMetricsWithAuthentication(t *testing.T) { //We create a fake server to return test data ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { username, password, ok := r.BasicAuth() if !ok { w.WriteHeader(http.StatusNotFound) fmt.Fprint(w, "Unauthorized") return } if username == "user" && password == "password" { fmt.Fprint(w, csvOutputSample) } else { w.WriteHeader(http.StatusNotFound) fmt.Fprint(w, "Unauthorized") } })) defer ts.Close() //Now we tested again above server, with our authentication data r := &haproxy{ Servers: []string{strings.Replace(ts.URL, "http://", "http://*****:*****@", 1)}, } var acc testutil.Accumulator err := r.Gather(&acc) require.NoError(t, err) tags := map[string]string{ "server": ts.Listener.Addr().String(), "proxy": "be_app", "sv": "host0", } fields := map[string]interface{}{ "active_servers": uint64(1), "backup_servers": uint64(0), "bin": uint64(510913516), "bout": uint64(2193856571), "check_duration": uint64(10), "cli_abort": uint64(73), "ctime": uint64(2), "downtime": uint64(0), "dresp": uint64(0), "econ": uint64(0), "eresp": uint64(1), "http_response.1xx": uint64(0), "http_response.2xx": uint64(119534), "http_response.3xx": uint64(48051), "http_response.4xx": uint64(2345), "http_response.5xx": uint64(1056), "lbtot": uint64(171013), "qcur": uint64(0), "qmax": uint64(0), "qtime": uint64(0), "rate": uint64(3), "rate_max": uint64(12), "rtime": uint64(312), "scur": uint64(1), "smax": uint64(32), "srv_abort": uint64(1), "stot": uint64(171014), "ttime": uint64(2341), "wredis": uint64(0), "wretr": uint64(1), } acc.AssertContainsTaggedFields(t, "haproxy", fields, tags) //Here, we should get error because we don't pass authentication data r = &haproxy{ Servers: []string{ts.URL}, } err = r.Gather(&acc) require.Error(t, err) }
func TestPassengerGenerateMetric(t *testing.T) { fakePassengerStatus(sampleStat) defer teardown() //Now we tested again above server, with our authentication data r := &passenger{ Command: "/tmp/passenger-status", } var acc testutil.Accumulator err := r.Gather(&acc) require.NoError(t, err) tags := map[string]string{ "passenger_version": "5.0.17", } fields := map[string]interface{}{ "process_count": 23, "max": 23, "capacity_used": 23, "get_wait_list_size": 3, } acc.AssertContainsTaggedFields(t, "passenger", fields, tags) tags = map[string]string{ "name": "/var/app/current/public", "app_root": "/var/app/current", "app_type": "rack", } fields = map[string]interface{}{ "processes_being_spawned": 2, "capacity_used": 23, "get_wait_list_size": 3, } acc.AssertContainsTaggedFields(t, "passenger_group", fields, tags) tags = map[string]string{ "name": "/var/app/current/public", } fields = map[string]interface{}{ "capacity_used": 23, "get_wait_list_size": 3, } acc.AssertContainsTaggedFields(t, "passenger_supergroup", fields, tags) tags = map[string]string{ "app_root": "/var/app/current", "group_name": "/var/app/current/public", "supergroup_name": "/var/app/current/public", "pid": "11553", "code_revision": "899ac7f", "life_status": "ALIVE", "process_group_id": "13608", } fields = map[string]interface{}{ "concurrency": 1, "sessions": 0, "busyness": 0, "processed": 951, "spawner_creation_time": int64(1452746835922747), "spawn_start_time": int64(1452746844946982), "spawn_end_time": int64(1452746845013365), "last_used": int64(1452747071764940), "uptime": int64(226), // in seconds of 3m 46s "cpu": int64(58), "rss": int64(418548), "pss": int64(319391), "private_dirty": int64(314900), "swap": int64(0), "real_memory": int64(314900), "vmsize": int64(1563580), } acc.AssertContainsTaggedFields(t, "passenger_process", fields, tags) }
func TestNSQStats(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) fmt.Fprintln(w, response) })) defer ts.Close() n := &NSQ{ Endpoints: []string{ts.URL}, } var acc testutil.Accumulator err := n.Gather(&acc) require.NoError(t, err) u, err := url.Parse(ts.URL) require.NoError(t, err) host := u.Host // actually validate the tests tests := []struct { m string f map[string]interface{} g map[string]string }{ { "nsq_server", map[string]interface{}{ "server_count": int64(1), "topic_count": int64(2), }, map[string]string{ "server_host": host, "server_version": "0.3.6", }, }, { "nsq_topic", map[string]interface{}{ "depth": int64(12), "backend_depth": int64(13), "message_count": int64(14), "channel_count": int64(1), }, map[string]string{ "server_host": host, "server_version": "0.3.6", "topic": "t1"}, }, { "nsq_channel", map[string]interface{}{ "depth": int64(0), "backend_depth": int64(1), "inflight_count": int64(2), "deferred_count": int64(3), "message_count": int64(4), "requeue_count": int64(5), "timeout_count": int64(6), "client_count": int64(1), }, map[string]string{ "server_host": host, "server_version": "0.3.6", "topic": "t1", "channel": "c1", }, }, { "nsq_client", map[string]interface{}{ "ready_count": int64(200), "inflight_count": int64(7), "message_count": int64(8), "finish_count": int64(9), "requeue_count": int64(10), }, map[string]string{"server_host": host, "server_version": "0.3.6", "topic": "t1", "channel": "c1", "client_name": "373a715cd990", "client_id": "373a715cd990", "client_hostname": "373a715cd990", "client_version": "V2", "client_address": "172.17.0.11:35560", "client_tls": "false", "client_snappy": "false", "client_deflate": "false", "client_user_agent": "nsq_to_nsq/0.3.6 go-nsq/1.0.5"}, }, { "nsq_topic", map[string]interface{}{ "depth": int64(28), "backend_depth": int64(29), "message_count": int64(30), "channel_count": int64(1), }, map[string]string{ "server_host": host, "server_version": "0.3.6", "topic": "t2"}, }, { "nsq_channel", map[string]interface{}{ "depth": int64(15), "backend_depth": int64(16), "inflight_count": int64(17), "deferred_count": int64(18), "message_count": int64(19), "requeue_count": int64(20), "timeout_count": int64(21), "client_count": int64(1), }, map[string]string{ "server_host": host, "server_version": "0.3.6", "topic": "t2", "channel": "c2", }, }, { "nsq_client", map[string]interface{}{ "ready_count": int64(22), "inflight_count": int64(23), "message_count": int64(24), "finish_count": int64(25), "requeue_count": int64(26), }, map[string]string{"server_host": host, "server_version": "0.3.6", "topic": "t2", "channel": "c2", "client_name": "377569bd462b", "client_id": "377569bd462b", "client_hostname": "377569bd462b", "client_version": "V2", "client_address": "172.17.0.8:48145", "client_user_agent": "go-nsq/1.0.5", "client_tls": "true", "client_snappy": "true", "client_deflate": "true"}, }, } for _, test := range tests { acc.AssertContainsTaggedFields(t, test.m, test.f, test.g) } }
func TestNetStats(t *testing.T) { var mps MockPS var err error defer mps.AssertExpectations(t) var acc testutil.Accumulator netio := net.NetIOCountersStat{ Name: "eth0", BytesSent: 1123, BytesRecv: 8734422, PacketsSent: 781, PacketsRecv: 23456, Errin: 832, Errout: 8, Dropin: 7, Dropout: 1, } mps.On("NetIO").Return([]net.NetIOCountersStat{netio}, nil) netprotos := []net.NetProtoCountersStat{ net.NetProtoCountersStat{ Protocol: "Udp", Stats: map[string]int64{ "InDatagrams": 4655, "NoPorts": 892592, }, }, } mps.On("NetProto").Return(netprotos, nil) netstats := []net.NetConnectionStat{ net.NetConnectionStat{ Type: syscall.SOCK_DGRAM, }, net.NetConnectionStat{ Status: "ESTABLISHED", }, net.NetConnectionStat{ Status: "ESTABLISHED", }, net.NetConnectionStat{ Status: "CLOSE", }, } mps.On("NetConnections").Return(netstats, nil) err = (&NetIOStats{ps: &mps, skipChecks: true}).Gather(&acc) require.NoError(t, err) ntags := map[string]string{ "interface": "eth0", } fields1 := map[string]interface{}{ "bytes_sent": uint64(1123), "bytes_recv": uint64(8734422), "packets_sent": uint64(781), "packets_recv": uint64(23456), "err_in": uint64(832), "err_out": uint64(8), "drop_in": uint64(7), "drop_out": uint64(1), } acc.AssertContainsTaggedFields(t, "net", fields1, ntags) fields2 := map[string]interface{}{ "udp_noports": int64(892592), "udp_indatagrams": int64(4655), } ntags = map[string]string{ "interface": "all", } acc.AssertContainsTaggedFields(t, "net", fields2, ntags) acc.Points = nil err = (&NetStats{&mps}).Gather(&acc) require.NoError(t, err) fields3 := map[string]interface{}{ "tcp_established": 2, "tcp_syn_sent": 0, "tcp_syn_recv": 0, "tcp_fin_wait1": 0, "tcp_fin_wait2": 0, "tcp_time_wait": 0, "tcp_close": 1, "tcp_close_wait": 0, "tcp_last_ack": 0, "tcp_listen": 0, "tcp_closing": 0, "tcp_none": 0, "udp_socket": 1, } acc.AssertContainsTaggedFields(t, "netstat", fields3, make(map[string]string)) }
func TestMemStats(t *testing.T) { var mps MockPS var err error defer mps.AssertExpectations(t) var acc testutil.Accumulator vms := &mem.VirtualMemoryStat{ Total: 12400, Available: 7600, Used: 5000, Free: 1235, // Active: 8134, // Inactive: 1124, // Buffers: 771, // Cached: 4312, // Wired: 134, // Shared: 2142, } mps.On("VMStat").Return(vms, nil) sms := &mem.SwapMemoryStat{ Total: 8123, Used: 1232, Free: 6412, UsedPercent: 12.2, Sin: 7, Sout: 830, } mps.On("SwapStat").Return(sms, nil) err = (&MemStats{&mps}).Gather(&acc) require.NoError(t, err) memfields := map[string]interface{}{ "total": uint64(12400), "available": uint64(7600), "used": uint64(5000), "available_percent": float64(7600) / float64(12400) * 100, "used_percent": float64(5000) / float64(12400) * 100, "free": uint64(1235), "cached": uint64(0), "buffered": uint64(0), } acc.AssertContainsTaggedFields(t, "mem", memfields, make(map[string]string)) acc.Points = nil err = (&SwapStats{&mps}).Gather(&acc) require.NoError(t, err) swapfields := map[string]interface{}{ "total": uint64(8123), "used": uint64(1232), "used_percent": float64(12.2), "free": uint64(6412), "in": uint64(7), "out": uint64(830), } acc.AssertContainsTaggedFields(t, "swap", swapfields, make(map[string]string)) }
func TestBasic(t *testing.T) { js := ` { "_1": { "name": "foo", "tags": { "id": "ex1" }, "values": { "i": -1, "f": 0.5, "b": true, "s": "string" } }, "ignored": { "willBeRecorded": false }, "ignoredAndNested": { "hash": { "is": "nested" } }, "array": [ "makes parsing more difficult than necessary" ], "string": "makes parsing more difficult than necessary", "_2": { "name": "bar", "tags": { "id": "ex2" }, "values": { "x": "x" } }, "pointWithoutFields_willNotBeIncluded": { "name": "asdf", "tags": { "id": "ex3" }, "values": {} } } ` fakeServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/endpoint" { _, _ = w.Write([]byte(js)) } else { w.WriteHeader(http.StatusNotFound) } })) defer fakeServer.Close() plugin := &influxdb.InfluxDB{ URLs: []string{fakeServer.URL + "/endpoint"}, } var acc testutil.Accumulator require.NoError(t, plugin.Gather(&acc)) require.Len(t, acc.Points, 2) fields := map[string]interface{}{ // JSON will truncate floats to integer representations. // Since there's no distinction in JSON, we can't assume it's an int. "i": -1.0, "f": 0.5, "b": true, "s": "string", } tags := map[string]string{ "id": "ex1", "url": fakeServer.URL + "/endpoint", } acc.AssertContainsTaggedFields(t, "foo", fields, tags) fields = map[string]interface{}{ "x": "x", } tags = map[string]string{ "id": "ex2", "url": fakeServer.URL + "/endpoint", } acc.AssertContainsTaggedFields(t, "bar", fields, tags) }