func TestMesosSlave(t *testing.T) { var acc testutil.Accumulator m := Mesos{ Masters: []string{}, Slaves: []string{slaveTestServer.Listener.Addr().String()}, SlaveTasks: true, Timeout: 10, } err := m.Gather(&acc) if err != nil { t.Errorf(err.Error()) } acc.AssertContainsFields(t, "mesos", slaveMetrics) jf := jsonparser.JSONFlattener{} err = jf.FlattenJSON("", slaveTaskMetrics) if err != nil { t.Errorf(err.Error()) } acc.AssertContainsFields(t, "mesos-tasks", jf.Fields) }
func TestPartialVmStatProcFile(t *testing.T) { tmpfile := makeFakeStatFile([]byte(vmStatFile_Partial)) defer os.Remove(tmpfile) k := KernelVmstat{ statFile: tmpfile, } acc := testutil.Accumulator{} err := k.Gather(&acc) assert.NoError(t, err) fields := map[string]interface{}{ "unevictable_pgs_culled": int64(1531), "unevictable_pgs_scanned": int64(0), "unevictable_pgs_rescued": int64(5426), "unevictable_pgs_mlocked": int64(6988), "unevictable_pgs_munlocked": int64(6988), "unevictable_pgs_cleared": int64(0), "unevictable_pgs_stranded": int64(0), "unevictable_pgs_mlockfreed": int64(0), "thp_fault_alloc": int64(346219), "thp_fault_fallback": int64(895453), "thp_collapse_alloc": int64(24857), "thp_collapse_alloc_failed": int64(102214), "thp_split": int64(9817), } acc.AssertContainsFields(t, "kernel_vmstat", fields) }
func TestMesosSlave(t *testing.T) { var acc testutil.Accumulator m := Mesos{ Masters: []string{}, Slaves: []string{slaveTestServer.Listener.Addr().String()}, // SlaveTasks: true, Timeout: 10, } err := m.Gather(&acc) if err != nil { t.Errorf(err.Error()) } acc.AssertContainsFields(t, "mesos", slaveMetrics) // expectedFields := make(map[string]interface{}, len(slaveTaskMetrics["statistics"].(map[string]interface{}))+1) // for k, v := range slaveTaskMetrics["statistics"].(map[string]interface{}) { // expectedFields[k] = v // } // expectedFields["executor_id"] = slaveTaskMetrics["executor_id"] // acc.AssertContainsTaggedFields( // t, // "mesos_tasks", // expectedFields, // map[string]string{"server": "127.0.0.1", "framework_id": slaveTaskMetrics["framework_id"].(string)}) }
func TestExec(t *testing.T) { parser, _ := parsers.NewJSONParser("exec", []string{}, nil) e := &Exec{ runner: newRunnerMock([]byte(validJson), nil), Commands: []string{"testcommand arg1"}, parser: parser, } var acc testutil.Accumulator err := e.Gather(&acc) require.NoError(t, err) assert.Equal(t, acc.NFields(), 8, "non-numeric measurements should be ignored") fields := map[string]interface{}{ "num_processes": float64(82), "cpu_used": float64(8234), "cpu_free": float64(32), "percent": float64(0.81), "users_0": float64(0), "users_1": float64(1), "users_2": float64(2), "users_3": float64(3), } acc.AssertContainsFields(t, "exec", fields) }
func TestExecCommandWithoutGlobAndPath(t *testing.T) { parser, _ := parsers.NewValueParser("metric", "string", nil) e := NewExec() e.Commands = []string{"echo metric_value"} e.SetParser(parser) var acc testutil.Accumulator err := e.Gather(&acc) require.NoError(t, err) fields := map[string]interface{}{ "value": "metric_value", } acc.AssertContainsFields(t, "metric", fields) }
// Test that the parser parses kafka messages into points func TestRunParserAndGather(t *testing.T) { k, in := NewTestKafka() defer close(k.done) go k.parser() in <- saramaMsg(testMsg) time.Sleep(time.Millisecond) acc := testutil.Accumulator{} k.Gather(&acc) assert.Equal(t, len(acc.Points), 1) acc.AssertContainsFields(t, "cpu_load_short", map[string]interface{}{"value": float64(23422)}) }
func TestMesosMaster(t *testing.T) { var acc testutil.Accumulator m := Mesos{ Masters: []string{masterTestServer.Listener.Addr().String()}, Timeout: 10, } err := m.Gather(&acc) if err != nil { t.Errorf(err.Error()) } acc.AssertContainsFields(t, "mesos", masterMetrics) }
// Test that the parser parses line format messages into metrics func TestRunParserAndGather(t *testing.T) { n, in := newTestNatsConsumer() acc := testutil.Accumulator{} n.acc = &acc defer close(n.done) n.parser, _ = parsers.NewInfluxParser() go n.receiver() in <- natsMsg(testMsg) time.Sleep(time.Millisecond * 25) n.Gather(&acc) acc.AssertContainsFields(t, "cpu_load_short", map[string]interface{}{"value": float64(23422)}) }
// Test that the parser parses graphite format messages into metrics func TestRunParserAndGatherGraphite(t *testing.T) { n, in := newTestMQTTConsumer() acc := testutil.Accumulator{} n.acc = &acc defer close(n.done) n.parser, _ = parsers.NewGraphiteParser("_", []string{}, nil) go n.receiver() in <- mqttMsg(testMsgGraphite) time.Sleep(time.Millisecond * 25) n.Gather(&acc) acc.AssertContainsFields(t, "cpu_load_short_graphite", map[string]interface{}{"value": float64(23422)}) }
// Test that the parser parses kafka messages into points func TestRunParserAndGatherGraphite(t *testing.T) { k, in := NewTestKafka() defer close(k.done) k.parser, _ = parsers.NewGraphiteParser("_", []string{}, nil) go k.receiver() in <- saramaMsg(testMsgGraphite) time.Sleep(time.Millisecond) acc := testutil.Accumulator{} k.Gather(&acc) assert.Equal(t, len(acc.Metrics), 1) acc.AssertContainsFields(t, "cpu_load_short_graphite", map[string]interface{}{"value": float64(23422)}) }
// Test that the parser parses kafka messages into points func TestRunParserAndGather(t *testing.T) { k, in := newTestKafka() acc := testutil.Accumulator{} k.acc = &acc defer close(k.done) k.parser, _ = parsers.NewInfluxParser() go k.receiver() in <- saramaMsg(testMsg) time.Sleep(time.Millisecond * 5) k.Gather(&acc) assert.Equal(t, acc.NFields(), 1) acc.AssertContainsFields(t, "cpu_load_short", map[string]interface{}{"value": float64(23422)}) }
// Test that the parser parses line format messages into metrics func TestRunParserAndGather(t *testing.T) { n, in := newTestNatsConsumer() defer close(n.done) n.parser, _ = parsers.NewInfluxParser() go n.receiver() in <- natsMsg(testMsg) time.Sleep(time.Millisecond) acc := testutil.Accumulator{} n.Gather(&acc) if a := len(acc.Metrics); a != 1 { t.Errorf("got %v, expected %v", a, 1) } acc.AssertContainsFields(t, "cpu_load_short", map[string]interface{}{"value": float64(23422)}) }
func TestHTTPApache(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) fmt.Fprintln(w, apacheStatus) })) defer ts.Close() a := Apache{ // Fetch it 2 times to catch possible data races. Urls: []string{ts.URL, ts.URL}, } var acc testutil.Accumulator err := a.Gather(&acc) require.NoError(t, err) fields := map[string]interface{}{ "TotalAccesses": float64(1.29811861e+08), "TotalkBytes": float64(5.213701865e+09), "CPULoad": float64(6.51929), "Uptime": float64(941553), "ReqPerSec": float64(137.87), "BytesPerSec": float64(5.67024e+06), "BytesPerReq": float64(41127.4), "BusyWorkers": float64(270), "IdleWorkers": float64(630), "ConnsTotal": float64(1451), "ConnsAsyncWriting": float64(32), "ConnsAsyncKeepAlive": float64(945), "ConnsAsyncClosing": float64(205), "scboard_waiting": float64(630), "scboard_starting": float64(0), "scboard_reading": float64(157), "scboard_sending": float64(113), "scboard_keepalive": float64(0), "scboard_dnslookup": float64(0), "scboard_closing": float64(0), "scboard_logging": float64(0), "scboard_finishing": float64(0), "scboard_idle_cleanup": float64(0), "scboard_open": float64(2850), } acc.AssertContainsFields(t, "apache", fields) }
// Test that the parser parses json format messages into metrics func TestRunParserAndGatherJSON(t *testing.T) { n, in := newTestMQTTConsumer() acc := testutil.Accumulator{} n.acc = &acc defer close(n.done) n.parser, _ = parsers.NewJSONParser("nats_json_test", []string{}, nil) go n.receiver() in <- mqttMsg(testMsgJSON) time.Sleep(time.Millisecond * 25) n.Gather(&acc) acc.AssertContainsFields(t, "nats_json_test", map[string]interface{}{ "a": float64(5), "b_c": float64(6), }) }
// Test that the parser parses kafka messages into points func TestRunParserAndGatherJSON(t *testing.T) { k, in := NewTestKafka() defer close(k.done) k.parser, _ = parsers.NewJSONParser("kafka_json_test", []string{}, nil) go k.receiver() in <- saramaMsg(testMsgJSON) time.Sleep(time.Millisecond) acc := testutil.Accumulator{} k.Gather(&acc) assert.Equal(t, len(acc.Metrics), 1) acc.AssertContainsFields(t, "kafka_json_test", map[string]interface{}{ "a": float64(5), "b_c": float64(6), }) }
func TestRunParserGraphiteMsg(t *testing.T) { var testmsg = []byte("cpu.load.graphite 12 1454780029") listener, in := newTestTcpListener() acc := testutil.Accumulator{} listener.acc = &acc defer close(listener.done) listener.parser, _ = parsers.NewGraphiteParser("_", []string{}, nil) listener.wg.Add(1) go listener.tcpParser() in <- testmsg time.Sleep(time.Millisecond * 25) listener.Gather(&acc) acc.AssertContainsFields(t, "cpu_load_graphite", map[string]interface{}{"value": float64(12)}) }
func TestTrig(t *testing.T) { s := &Trig{ Amplitude: 10.0, } for i := 0.0; i < 10.0; i++ { var acc testutil.Accumulator sine := math.Sin((i*math.Pi)/5.0) * s.Amplitude cosine := math.Cos((i*math.Pi)/5.0) * s.Amplitude s.Gather(&acc) fields := make(map[string]interface{}) fields["sine"] = sine fields["cosine"] = cosine acc.AssertContainsFields(t, "trig", fields) } }
func TestPartialProcFile(t *testing.T) { tmpfile := makeFakeStatFile([]byte(statFile_Partial)) defer os.Remove(tmpfile) k := Kernel{ statFile: tmpfile, } acc := testutil.Accumulator{} err := k.Gather(&acc) assert.NoError(t, err) fields := map[string]interface{}{ "boot_time": int64(1457505775), "context_switches": int64(2626618), "disk_pages_in": int64(5741), "disk_pages_out": int64(1808), "interrupts": int64(1472736), } acc.AssertContainsFields(t, "kernel", fields) }
// Test that the parser parses json format messages into metrics func TestRunParserAndGatherJSON(t *testing.T) { n, in := newTestNatsConsumer() defer close(n.done) n.parser, _ = parsers.NewJSONParser("nats_json_test", []string{}, nil) go n.receiver() in <- natsMsg(testMsgJSON) time.Sleep(time.Millisecond) acc := testutil.Accumulator{} n.Gather(&acc) if a := len(acc.Metrics); a != 1 { t.Errorf("got %v, expected %v", a, 1) } acc.AssertContainsFields(t, "nats_json_test", map[string]interface{}{ "a": float64(5), "b_c": float64(6), }) }
func TestPrometheusWritePointTag(t *testing.T) { if testing.Short() { t.Skip("Skipping integration test in short mode") } p := &prometheus.Prometheus{ Urls: []string{"http://localhost:9126/metrics"}, } tags := make(map[string]string) tags["testtag"] = "testvalue" pt1, _ := telegraf.NewMetric( "test_point_3", tags, map[string]interface{}{"value": 0.0}) pt2, _ := telegraf.NewMetric( "test_point_4", tags, map[string]interface{}{"value": 1.0}) var metrics = []telegraf.Metric{ pt1, pt2, } require.NoError(t, pTesting.Write(metrics)) expected := []struct { name string value float64 }{ {"test_point_3", 0.0}, {"test_point_4", 1.0}, } var acc testutil.Accumulator require.NoError(t, p.Gather(&acc)) for _, e := range expected { acc.AssertContainsFields(t, "prometheus_"+e.name, map[string]interface{}{"value": e.value}) } }
func TestPrometheusWritePointEmptyTag(t *testing.T) { if testing.Short() { t.Skip("Skipping integration test in short mode") } p := &prometheus.Prometheus{ Urls: []string{"http://localhost:9126/metrics"}, } tags := make(map[string]string) pt1, _ := client.NewPoint( "test_point_1", tags, map[string]interface{}{"value": 0.0}) pt2, _ := client.NewPoint( "test_point_2", tags, map[string]interface{}{"value": 1.0}) var points = []*client.Point{ pt1, pt2, } require.NoError(t, pTesting.Write(points)) expected := []struct { name string value float64 tags map[string]string }{ {"test_point_1", 0.0, tags}, {"test_point_2", 1.0, tags}, } var acc testutil.Accumulator require.NoError(t, p.Gather(&acc)) for _, e := range expected { acc.AssertContainsFields(t, "prometheus_"+e.name, map[string]interface{}{"value": e.value}) } }
func TestAddAndPushOnePeriod(t *testing.T) { a := &TestAggregator{} ra := NewRunningAggregator(a, &AggregatorConfig{ Name: "TestRunningAggregator", Filter: Filter{ NamePass: []string{"*"}, }, Period: time.Millisecond * 500, }) assert.NoError(t, ra.Config.Filter.Compile()) acc := testutil.Accumulator{} shutdown := make(chan struct{}) var wg sync.WaitGroup wg.Add(1) go func() { defer wg.Done() ra.Run(&acc, shutdown) }() m := ra.MakeMetric( "RITest", map[string]interface{}{"value": int(101)}, map[string]string{}, telegraf.Untyped, time.Now().Add(time.Millisecond*100), ) assert.False(t, ra.Add(m)) for { time.Sleep(time.Millisecond) if acc.NMetrics() > 0 { break } } acc.AssertContainsFields(t, "TestMetric", map[string]interface{}{"sum": int64(101)}) close(shutdown) wg.Wait() }
func TestRunParserJSONMsg(t *testing.T) { var testmsg = []byte("{\"a\": 5, \"b\": {\"c\": 6}}\n") listener, in := newTestTcpListener() acc := testutil.Accumulator{} listener.acc = &acc defer close(listener.done) listener.parser, _ = parsers.NewJSONParser("udp_json_test", []string{}, nil) listener.wg.Add(1) go listener.tcpParser() in <- testmsg time.Sleep(time.Millisecond * 25) listener.Gather(&acc) acc.AssertContainsFields(t, "udp_json_test", map[string]interface{}{ "a": float64(5), "b_c": float64(6), }) }
func TestFullVmStatProcFile(t *testing.T) { tmpfile := makeFakeStatFile([]byte(vmStatFile_Full)) defer os.Remove(tmpfile) k := KernelVmstat{ statFile: tmpfile, } acc := testutil.Accumulator{} err := k.Gather(&acc) assert.NoError(t, err) fields := map[string]interface{}{ "nr_free_pages": int64(78730), "nr_inactive_anon": int64(426259), "nr_active_anon": int64(2515657), "nr_inactive_file": int64(2366791), "nr_active_file": int64(2244914), "nr_unevictable": int64(0), "nr_mlock": int64(0), "nr_anon_pages": int64(1358675), "nr_mapped": int64(558821), "nr_file_pages": int64(5153546), "nr_dirty": int64(5690), "nr_writeback": int64(0), "nr_slab_reclaimable": int64(459806), "nr_slab_unreclaimable": int64(47859), "nr_page_table_pages": int64(11115), "nr_kernel_stack": int64(579), "nr_unstable": int64(0), "nr_bounce": int64(0), "nr_vmscan_write": int64(6206), "nr_writeback_temp": int64(0), "nr_isolated_anon": int64(0), "nr_isolated_file": int64(0), "nr_shmem": int64(541689), "numa_hit": int64(5113399878), "numa_miss": int64(0), "numa_foreign": int64(0), "numa_interleave": int64(35793), "numa_local": int64(5113399878), "numa_other": int64(0), "nr_anon_transparent_hugepages": int64(2034), "pgpgin": int64(219717626), "pgpgout": int64(3495885510), "pswpin": int64(2092), "pswpout": int64(6206), "pgalloc_dma": int64(0), "pgalloc_dma32": int64(122480220), "pgalloc_normal": int64(5233176719), "pgalloc_movable": int64(0), "pgfree": int64(5359765021), "pgactivate": int64(375664931), "pgdeactivate": int64(122735906), "pgfault": int64(8699921410), "pgmajfault": int64(122210), "pgrefill_dma": int64(0), "pgrefill_dma32": int64(1180010), "pgrefill_normal": int64(119866676), "pgrefill_movable": int64(0), "pgsteal_dma": int64(0), "pgsteal_dma32": int64(4466436), "pgsteal_normal": int64(318463755), "pgsteal_movable": int64(0), "pgscan_kswapd_dma": int64(0), "pgscan_kswapd_dma32": int64(4480608), "pgscan_kswapd_normal": int64(287857984), "pgscan_kswapd_movable": int64(0), "pgscan_direct_dma": int64(0), "pgscan_direct_dma32": int64(12256), "pgscan_direct_normal": int64(31501600), "pgscan_direct_movable": int64(0), "zone_reclaim_failed": int64(0), "pginodesteal": int64(9188431), "slabs_scanned": int64(93775616), "kswapd_steal": int64(291534428), "kswapd_inodesteal": int64(29770874), "kswapd_low_wmark_hit_quickly": int64(8756), "kswapd_high_wmark_hit_quickly": int64(25439), "kswapd_skip_congestion_wait": int64(0), "pageoutrun": int64(505006), "allocstall": int64(81496), "pgrotated": int64(60620), "compact_blocks_moved": int64(238196), "compact_pages_moved": int64(6370588), "compact_pagemigrate_failed": int64(0), "compact_stall": int64(142092), "compact_fail": int64(135220), "compact_success": int64(6872), "htlb_buddy_alloc_success": int64(0), "htlb_buddy_alloc_fail": int64(0), "unevictable_pgs_culled": int64(1531), "unevictable_pgs_scanned": int64(0), "unevictable_pgs_rescued": int64(5426), "unevictable_pgs_mlocked": int64(6988), "unevictable_pgs_munlocked": int64(6988), "unevictable_pgs_cleared": int64(0), "unevictable_pgs_stranded": int64(0), "unevictable_pgs_mlockfreed": int64(0), "thp_fault_alloc": int64(346219), "thp_fault_fallback": int64(895453), "thp_collapse_alloc": int64(24857), "thp_collapse_alloc_failed": int64(102214), "thp_split": int64(9817), } acc.AssertContainsFields(t, "kernel_vmstat", fields) }
func TestDisqueCanPullStatsFromMultipleServers(t *testing.T) { if testing.Short() { t.Skip("Skipping integration test in short mode") } l, err := net.Listen("tcp", "localhost:0") require.NoError(t, err) defer l.Close() go func() { c, err := l.Accept() if err != nil { return } buf := bufio.NewReader(c) for { line, err := buf.ReadString('\n') if err != nil { return } if line != "info\r\n" { return } fmt.Fprintf(c, "$%d\n", len(testOutput)) c.Write([]byte(testOutput)) } }() addr := fmt.Sprintf("disque://%s", l.Addr().String()) r := &Disque{ Servers: []string{addr}, } var acc testutil.Accumulator err = r.Gather(&acc) require.NoError(t, err) fields := map[string]interface{}{ "uptime": uint64(1452705), "clients": uint64(31), "blocked_clients": uint64(13), "used_memory": uint64(1840104), "used_memory_rss": uint64(3227648), "used_memory_peak": uint64(89603656), "total_connections_received": uint64(5062777), "total_commands_processed": uint64(12308396), "instantaneous_ops_per_sec": uint64(18), "latest_fork_usec": uint64(1644), "registered_jobs": uint64(360), "registered_queues": uint64(12), "mem_fragmentation_ratio": float64(1.75), "used_cpu_sys": float64(19585.73), "used_cpu_user": float64(11255.96), "used_cpu_sys_children": float64(1.75), "used_cpu_user_children": float64(1.91), } acc.AssertContainsFields(t, "disque", fields) }
// Test that POST Parameters are applied properly func TestHttpJsonPOST(t *testing.T) { params := map[string]string{ "api_key": "mykey", } ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { body, err := ioutil.ReadAll(r.Body) assert.NoError(t, err) assert.Equal(t, "api_key=mykey", string(body)) w.WriteHeader(http.StatusOK) fmt.Fprintln(w, validJSON2) })) defer ts.Close() a := HttpJson{ Servers: []string{ts.URL}, Name: "", Method: "POST", Parameters: params, client: &RealHTTPClient{client: &http.Client{}}, } var acc testutil.Accumulator err := a.Gather(&acc) require.NoError(t, err) // remove response_time from gathered fields because it's non-deterministic delete(acc.Metrics[0].Fields, "response_time") fields := map[string]interface{}{ "market_btc_usd": float64(422.852), "market_ltc_btc": float64(0.00798), "market_ltc_cny": float64(21.3883), "market_ltc_eur": float64(3.113), "market_ltc_gbp": float64(2.32807), "market_ltc_rub": float64(241.796), "market_ltc_usd": float64(3.37801), "network_block_number": float64(944895), "network_difficulty": float64(51825.72835216), "network_hash_rate": float64(1.426117703e+09), "network_next_difficulty": float64(51916.15249019), "network_retarget_time": float64(95053), "network_time_per_block": float64(156), "pool_active_users": float64(843), "pool_hash_rate": float64(1.141e+08), "pool_pps_rate": float64(7.655e-09), "pool_pps_ratio": float64(1.04), "user_blocks_found": float64(0), "user_expected_24h_rewards": float64(0), "user_hash_rate": float64(0), "user_paid_rewards": float64(0), "user_past_24h_rewards": float64(0), "user_total_rewards": float64(0.000595109232), "user_unpaid_rewards": float64(0.000595109232), "workers_brminer.1_hash_rate": float64(0), "workers_brminer.1_hash_rate_24h": float64(0), "workers_brminer.1_reset_time": float64(1.45540995e+09), "workers_brminer.1_rewards": float64(4.5506464e-05), "workers_brminer.1_rewards_24h": float64(0), "workers_brminer.2_hash_rate": float64(0), "workers_brminer.2_hash_rate_24h": float64(0), "workers_brminer.2_reset_time": float64(1.455936726e+09), "workers_brminer.2_rewards": float64(0), "workers_brminer.2_rewards_24h": float64(0), "workers_brminer.3_hash_rate": float64(0), "workers_brminer.3_hash_rate_24h": float64(0), "workers_brminer.3_reset_time": float64(1.455936733e+09), "workers_brminer.3_rewards": float64(0), "workers_brminer.3_rewards_24h": float64(0), } acc.AssertContainsFields(t, "httpjson", fields) }
func TestPrometheusWritePointEmptyTag(t *testing.T) { if testing.Short() { t.Skip("Skipping integration test in short mode") } now := time.Now() pTesting = &PrometheusClient{Listen: "localhost:9127"} err := pTesting.Start() time.Sleep(time.Millisecond * 200) require.NoError(t, err) defer pTesting.Stop() p := &prometheus.Prometheus{ Urls: []string{"http://localhost:9127/metrics"}, } tags := make(map[string]string) pt1, _ := telegraf.NewMetric( "test_point_1", tags, map[string]interface{}{"value": 0.0}, now) pt2, _ := telegraf.NewMetric( "test_point_2", tags, map[string]interface{}{"value": 1.0}, now) var metrics = []telegraf.Metric{ pt1, pt2, } require.NoError(t, pTesting.Write(metrics)) expected := []struct { name string value float64 tags map[string]string }{ {"test_point_1", 0.0, tags}, {"test_point_2", 1.0, tags}, } var acc testutil.Accumulator require.NoError(t, p.Gather(&acc)) for _, e := range expected { acc.AssertContainsFields(t, e.name, map[string]interface{}{"value": e.value}) } tags = make(map[string]string) tags["testtag"] = "testvalue" pt3, _ := telegraf.NewMetric( "test_point_3", tags, map[string]interface{}{"value": 0.0}, now) pt4, _ := telegraf.NewMetric( "test_point_4", tags, map[string]interface{}{"value": 1.0}, now) metrics = []telegraf.Metric{ pt3, pt4, } require.NoError(t, pTesting.Write(metrics)) expected2 := []struct { name string value float64 }{ {"test_point_3", 0.0}, {"test_point_4", 1.0}, } require.NoError(t, p.Gather(&acc)) for _, e := range expected2 { acc.AssertContainsFields(t, e.name, map[string]interface{}{"value": e.value}) } }