// Run a testcase. Settings are specified in Seconds! func (test *TestScenario) Run(name string, testcase func(*Meta, Settings), delay float64, runfor float64, rampup float64, users int, pacing float64, settings Settings) { test.wg.Add(1) // the "Scheduler" itself is a goroutine! go func(test *TestScenario) { // ramp up the users defer test.wg.Done() time.Sleep(time.Duration(delay * float64(time.Second))) userStart := time.Now() test.wg.Add(int(users)) for i := 0; i < users; i++ { // start user go func(nbr int) { defer test.wg.Done() time.Sleep(time.Duration(float64(nbr) * rampup * float64(time.Second))) for j := 0; time.Now().Sub(userStart) < time.Duration(runfor*float64(time.Second)); j++ { // next iteration start := time.Now() meta := &Meta{Testcase: name, Iteration: j, User: nbr} if test.status == Stopping { break } testcase(meta, settings) if test.status == Stopping { break } test.paceMaker(time.Duration(pacing*float64(time.Second)), time.Now().Sub(start)) } }(i) } }(test) }
func TestRunSequential(t *testing.T) { time.Freeze(time.Now()) defer time.Unfreeze() fake := NewTest() fake.config["Scenario"] = "scenario1" var counter int = 0 // assemble testcase tc1 := func(meta *Meta, s Settings) { // check meta if meta.Iteration != counter { t.Errorf("Iteration %d but expected %d!", meta.Iteration, counter) } if meta.User != 0 { t.Error("User meta not as expected!") } time.Sleep(20) counter++ } // run the testcase start := time.Now() fake.DoIterations(tc1, 20, 0, false) if time.Now().Sub(start) != 400 { t.Error("Testcase execution time not as expected!") } if counter != 20 { t.Error("Testcase iteration counter not as expected!") } }
func TestThinktimeVariance(t *testing.T) { // create a fake loadmodel for testing var fake = NewTest() fake.status = Running fake.config["Scenario"] = "scenario1" fake.config["ThinkTimeFactor"] = 2.0 fake.config["ThinkTimeVariance"] = 0.1 min, max, avg := 20.0, 20.0, 0.0 time.Freeze(time.Now()) defer time.Unfreeze() for i := 0; i < 1000; i++ { start := time.Now() fake.Thinktime(0.010) sleep := float64(time.Now().Sub(start)) / float64(time.Millisecond) if sleep < min { min = sleep } if max < sleep { max = sleep } avg += sleep } avg = avg / 1000 if min < 18.0 { t.Errorf("Minimum sleep time %f out of defined range!\n", min) } if max >= 22.0 { t.Errorf("Maximum sleep time %f out of defined range!", max) } if avg < 19.9 || avg > 20.1 { t.Fatalf("Average sleep time %f out of defined range!", avg) } }
func TestAScenarioAvoidingConcurrency(t *testing.T) { time.Freeze(time.Now()) defer time.Unfreeze() bak := stdout stdout = new(bytes.Buffer) defer func() { stdout = bak }() // init gg.Testscenario("scenario1", scenario1) // main part err := gg.ReadConfigValidate(noConcurrencyLoadmodel, LoadmodelSchema) if err != nil { t.Fatalf("Error while reading loadmodel config: %s!", err.Error()) } start := time.Now() gg.Exec() // exec the scenario that has been selected in the config file execution := time.Now().Sub(start) // verify total run time of the baseline senario if execution != 360*time.Second { t.Errorf("Error: execution time of scenario1 not as expected: %v\n", execution) } gg.Report(stdout) // verify Report! report := stdout.(*bytes.Buffer).String() if report != ("01_01_teststep, 50.000000, 50.000000, 50.000000, 2000, 0\n") { t.Fatalf("Report output of scenario1 not as expected: %s", report) } }
func (test *TestScenario) DoIterations(testcase func(*Meta, Settings), iterations int, pacing float64, parallel bool) { f := func(test *TestScenario) { settings := test.GetSettings() defer test.wg.Done() for i := 0; i < iterations; i++ { start := time.Now() meta := &Meta{Iteration: i, User: 0} if test.status == Stopping { break } testcase(meta, settings) if test.status == Stopping { break } test.paceMaker(time.Duration(pacing*float64(time.Second)), time.Now().Sub(start)) } } if parallel { test.wg.Add(1) go f(test) } else { test.wg.Wait() // sequential processing: wait for running goroutines to finish test.wg.Add(1) f(test) } }
func TestPaceMakerNegativeValue(t *testing.T) { time.Freeze(time.Now()) defer time.Unfreeze() var fake = NewTest() fake.config["Scenario"] = "scenario1" fake.status = Running start := time.Now() fake.paceMaker(-10, 0) if time.Now().Sub(start) != 0 { t.Fatal("Function paceMaker sleep out of range!") } }
func TestPaceMakerStops(t *testing.T) { time.Freeze(time.Now()) defer time.Unfreeze() // create a fake loadmodel for testing var fake = NewTest() fake.status = Stopping fake.config["Scenario"] = "scenario1" start := time.Now() fake.paceMaker(time.Duration(10*time.Second), time.Duration(0)) sleep := float64(time.Now().Sub(start)) / float64(time.Millisecond) if sleep != 0 { t.Errorf("PaceMaker did not stop! It sleept: %v\n", sleep) } }
func TestThinktimeStops(t *testing.T) { time.Freeze(time.Now()) defer time.Unfreeze() // create a fake loadmodel for testing var fake = NewTest() fake.status = Stopping fake.config["Scenario"] = "scenario1" start := time.Now() fake.Thinktime(10.0) sleep := float64(time.Now().Sub(start)) / float64(time.Millisecond) if sleep != 0 { t.Errorf("Thinktime did not stop! It sleept: %v\n", sleep) } }
func TestThinktimeNoVariance(t *testing.T) { // create a fake loadmodel for testing var fake = NewTest() fake.status = Running fake.config["Scenario"] = "scenario1" time.Freeze(time.Now()) defer time.Unfreeze() start := time.Now() fake.Thinktime(0.020) sleep := time.Now().Sub(start) if sleep != 20*time.Millisecond { t.Errorf("Expected to sleep for 20ms but something went wrong: %v", sleep) } }
func TestHttpMetricUpdate(t *testing.T) { hmr := NewHttpMetricReporter() // add datapoint hm := &HttpMetric{gogrinder.Meta{"01_tc", "01_01_ts", 0, 0, gogrinder.Timestamp(time.Now()), gogrinder.Elapsed(600 * time.Millisecond), "something is wrong!"}, gogrinder.Elapsed(500 * time.Millisecond), 10240, http.StatusOK} hmr.Update(hm) // check that datapoint was reported if exp, got := 600.0, readSummaryVec(hmr.elapsed, prometheus.Labels{"teststep": "01_01_ts"})[0].GetValue(); exp != got { t.Errorf("Expected elapsed %d, got %d.", exp, got) } if exp, got := 500.0, readSummaryVec(hmr.firstByte, prometheus.Labels{"teststep": "01_01_ts"})[0].GetValue(); exp != got { t.Errorf("Expected firstByte %d, got %d.", exp, got) } if exp, got := 10.0, readSummaryVec(hmr.bytes, prometheus.Labels{"teststep": "01_01_ts"})[0].GetValue(); exp != got { t.Errorf("Expected kb %d, got %d.", exp, got) } if exp, got := 1.0, readCounterVec(hmr.error, prometheus.Labels{"teststep": "01_01_ts"}); exp != got { t.Errorf("Expected error counter %f, got %f.", exp, got) } if exp, got := 1.0, readCounterVec(hmr.code, prometheus.Labels{"teststep": "01_01_ts", "code": "200"}); exp != got { t.Errorf("Expected code counter %f, got %f.", exp, got) } }
func TestFirstByteAfterReader(t *testing.T) { time.Freeze(time.Now()) defer time.Unfreeze() tr := testReader{} fbr := newMetricReader(tr) b1 := make([]byte, 4) fbr.Read(b1) body := string(b1) if !(body == "mark") { t.Fatalf("Read buffer was expected '%s', but was: '%v'", "mark", body) } if fbr.firstByteAfter != gogrinder.Elapsed(55*time.Millisecond) { t.Fatalf("First byte was expected after 55 ms but was: %v", fbr.firstByteAfter) } // read a second time b2 := make([]byte, 4) fbr.Read(b2) body = string(b2) if body != "fink" { t.Fatalf("Read buffer was expected '%s', but was: '%v'", "fink", body) } }
func TestRouteGetConfig(t *testing.T) { // prepare time.Freeze(time.Now()) defer time.Unfreeze() file, _ := ioutil.TempFile(os.TempDir(), "gogrinder_test") defer os.Remove(file.Name()) srv := TestServer{} srv.test = NewTest() loadmodel := `{ "Scenario": "scenario1", "ThinkTimeFactor": 2.0, "ThinkTimeVariance": 0.1 }` srv.test.ReadConfigValidate(loadmodel, LoadmodelSchema) req, _ := http.NewRequest("GET", "/config", nil) rsp := httptest.NewRecorder() srv.Router().ServeHTTP(rsp, req) if rsp.Code != http.StatusOK { t.Fatalf("Status code expected: %s but was: %v", "200", rsp.Code) } config := rsp.Body.String() if config != `{"config":{"Scenario":"scenario1","ThinkTimeFactor":2,`+ `"ThinkTimeVariance":0.1},"mtime":"0001-01-01T00:00:00Z"}` { t.Errorf("Config not as expected: %s!", config) } }
//////////////////////////////// // test routes //////////////////////////////// func TestRouteGetCsv(t *testing.T) { // test with 3 measurements fake := NewTest() srv := TestServer{} srv.test = fake // put 3 measurements into the fake server done := fake.Collect() // this needs a collector to unblock update now := Timestamp(time.Now().UTC()) fake.Update(&Meta{Teststep: "sth", Elapsed: Elapsed(8 * time.Millisecond), Timestamp: now}) fake.Update(&Meta{Teststep: "sth", Elapsed: Elapsed(10 * time.Millisecond), Timestamp: now}) fake.Update(&Meta{Teststep: "sth", Elapsed: Elapsed(2 * time.Millisecond), Timestamp: now}) close(fake.measurements) <-done // invoke REST service req, _ := http.NewRequest("GET", "/csv", nil) rsp := httptest.NewRecorder() // I separated the Router() from the actual Webserver() // In this way I can test routes without running a server srv.Router().ServeHTTP(rsp, req) if rsp.Code != http.StatusOK { t.Fatalf("Status code expected: %v but was: %v", http.StatusOK, rsp.Code) } body := rsp.Body.String() if body != "teststep, avg_ms, min_ms, max_ms, count, error\n"+ "sth, 6.666666, 2.000000, 10.000000, 3, 0\n" { t.Fatalf("Response not as expected: %s!", body) } }
func TestRouteGetStatistics(t *testing.T) { // test with 3 measurements fake := NewTest() srv := TestServer{} srv.test = fake // put 3 measurements into the fake server done := fake.Collect() // this needs a collector to unblock update now := time.Now().UTC() fake.Update(&Meta{Teststep: "sth", Elapsed: Elapsed(8 * time.Millisecond), Timestamp: Timestamp(now)}) fake.Update(&Meta{Teststep: "sth", Elapsed: Elapsed(10 * time.Millisecond), Timestamp: Timestamp(now)}) fake.Update(&Meta{Teststep: "sth", Elapsed: Elapsed(2 * time.Millisecond), Timestamp: Timestamp(now)}) close(fake.measurements) <-done // invoke REST service req, _ := http.NewRequest("GET", "/statistics", nil) rsp := httptest.NewRecorder() // I separated the Router() from the actual Webserver() // In this way I can test routes without running a server srv.Router().ServeHTTP(rsp, req) if rsp.Code != http.StatusOK { t.Fatalf("Status code expected: %s but was: %v", "200", rsp.Code) } body := rsp.Body.String() if body != fmt.Sprintf(`{"results":[{"teststep":"sth","avg_ms":6.666666,"min_ms":2,`+ `"max_ms":10,"count":3,"error":0,"last":"%s"}],"running":false}`, now.Format(ISO8601)) { t.Fatalf("Response not as expected: %s", body) } }
func TestRouteSaveConfig(t *testing.T) { // prepare time.Freeze(time.Now()) defer time.Unfreeze() file, _ := ioutil.TempFile(os.TempDir(), "gogrinder_test") defer os.Remove(file.Name()) srv := TestServer{} scenario := NewTest() scenario.filename = file.Name() srv.test = scenario { config := `{"Scenario":"scenario1","ThinkTimeFactor":2,"ThinkTimeVariance":0.1}` req, _ := http.NewRequest("PUT", "/config", strings.NewReader(config)) rsp := httptest.NewRecorder() srv.Router().ServeHTTP(rsp, req) if rsp.Code != http.StatusOK { t.Fatalf("Status code expected: %s but was: %v", "200", rsp.Code) } } buf, err := ioutil.ReadFile(file.Name()) if err != nil { t.Errorf("Unexpected problem while reading from the config file %s!", file.Name()) } loadmodel := string(buf) if loadmodel != `{"Scenario":"scenario1","ThinkTimeFactor":2,"ThinkTimeVariance":0.1}` { t.Errorf("Config not as expected: %s!", loadmodel) } }
func TestBaseline2(t *testing.T) { time.Freeze(time.Now()) defer time.Unfreeze() bak := stdout stdout = new(bytes.Buffer) defer func() { stdout = bak }() // we do not need a full loadmodel to run the baseline scenario loadmodel := `{ "Scenario": "baseline", "ThinkTimeFactor": 2.0, "ThinkTimeVariance": 0.1, "PacingVariance": 0.0 }` // no Loadmodel required! ,"Loadmodel": [] // init gg.Testscenario("baseline", baseline2) // main part err := gg.ReadConfigValidate(loadmodel, LoadmodelSchema) if err != nil { t.Fatalf("Error while reading loadmodel config: %s!", err.Error()) } start := time.Now() gg.Exec() // exec the scenario that has been selected in the config file execution := time.Now().Sub(start) // verify total run time of the baseline senario // 18 * (100+100) + 90 = 3690 //if execution <= 369000*time.Millisecond { if execution <= 3690*time.Millisecond { t.Errorf("Error: execution time of scenario1 not as expected: %v\n", execution) } gg.Report(stdout) // verify Report! report := stdout.(*bytes.Buffer).String() if report != ("01_01_teststep, 50.000000, 50.000000, 50.000000, 18, 0\n" + "02_01_teststep, 100.000000, 100.000000, 100.000000, 9, 0\n" + "03_01_teststep, 150.000000, 150.000000, 150.000000, 6, 0\n") { t.Fatalf("Report output of baseline2 scenario not as expected: %s", report) } }
func TestDebug(t *testing.T) { // just run a single testcase once time.Freeze(time.Now()) defer time.Unfreeze() bak := stdout stdout = new(bytes.Buffer) defer func() { stdout = bak }() // we do not need a full loadmodel for this loadmodel := `{ "Scenario": "01_testcase", "ThinkTimeFactor": 2.0, "ThinkTimeVariance": 0.0, "PacingVariance": 0.0 }` // init gg.Reset() gg.Testscenario("baseline", baseline1) gg.Testscenario("01_testcase", tc1) // main part err := gg.ReadConfigValidate(loadmodel, LoadmodelSchema) if err != nil { t.Fatalf("Error while reading loadmodel config: %s!", err.Error()) } start := time.Now() gg.Exec() // exec the scenario that has been selected in the config file execution := time.Now().Sub(start) // verify total run time of the baseline senario // 50+2*50 =150ms if execution != 150*time.Millisecond { t.Errorf("Error: execution time of debug test not as expected: %f ms.\n", d2f(execution)) } gg.Report(stdout) // verify Report! report := stdout.(*bytes.Buffer).String() if report != "01_01_teststep, 50.000000, 50.000000, 50.000000, 1, 0\n" { t.Fatalf("Report output of debug test not as expected: %s", report) } }
func (fb *metricReader) Read(p []byte) (n int, err error) { if fb.firstByteAfter == gogrinder.Elapsed(0) { fb.readFrom.ReadByte() fb.firstByteAfter = gogrinder.Elapsed(time.Now().Sub(fb.start)) fb.readFrom.UnreadByte() } n, err = fb.readFrom.Read(p) fb.bytes += n return }
func TestTimestampMarshalJSON(t *testing.T) { tt := time.Now() ts := Timestamp(tt) tt_json, _ := json.Marshal(tt) ts_json, _ := json.Marshal(ts) if string(ts_json) != string(tt_json) { t.Errorf("Timstamp JSON Marshal expected: %s, but was: %s", string(tt_json), string(ts_json)) } }
func TestReader(t *testing.T) { // prepare time.Freeze(time.Now()) defer time.Unfreeze() size := 18000 latency := 500 * time.Millisecond start := time.Now() slow := NewSlowReader(size, latency) all, err := ioutil.ReadAll(slow) if err != nil { t.Fatalf("Error while reading from SlowReader: %v", err) } elapsed := time.Now().Sub(start) if elapsed != latency { t.Fatalf("Elapsed expected: %v but was: %v", latency, elapsed) } if len(all) != size { t.Fatalf("Buffer sise expected: %d but was: %d", size, len(all)) } }
func TestPaceMakerVariance(t *testing.T) { // create a fake loadmodel for testing var fake = NewTest() fake.status = Running fake.config["Scenario"] = "scenario1" fake.config["ThinkTimeFactor"] = 2.0 fake.config["ThinkTimeVariance"] = 0.1 fake.config["PacingVariance"] = 0.1 min, max, avg := 1000.0, 1000.0, 0.0 time.Freeze(time.Now()) defer time.Unfreeze() for i := 0; i < 1000; i++ { start := time.Now() fake.paceMaker(time.Duration(1*time.Second), time.Duration(0)) sleep := float64(time.Now().Sub(start)) / float64(time.Millisecond) if sleep < min { min = sleep } if max < sleep { max = sleep } avg += sleep } avg = avg / 1000 if min < 900.0 { t.Errorf("Minimum pace time %f out of defined range!\n", min) } if max >= 1100.0 { t.Errorf("Maximum pace time %f out of defined range!", max) } if avg < 990.0 || avg > 1010.0 { t.Fatalf("Average pace time %f out of defined range!", avg) } }
func TestHttpMetricUpdate(t *testing.T) { mr := NewMetricReporter() // add datapoint m := &Meta{"01_tc", "01_01_ts", 0, 0, Timestamp(time.Now()), Elapsed(600 * time.Millisecond), "something went wrong!"} mr.Update(m) // check that datapoint was reported if exp, got := 600.0, readSummaryVec(mr.elapsed, prometheus.Labels{"teststep": "01_01_ts"})[0].GetValue(); exp != got { t.Errorf("Expected elapsed %d, got %d.", exp, got) } if exp, got := 1.0, readCounterVec(mr.error, prometheus.Labels{"teststep": "01_01_ts"}); exp != got { t.Errorf("Expected error counter %f, got %f.", exp, got) } }
func TestRouteStartStop(t *testing.T) { // prepare time.Freeze(time.Now()) defer time.Unfreeze() srv := TestServer{} srv.test = NewTest() tc1 := func(meta *Meta, s Settings) { srv.test.Thinktime(0.050) } srv.test.Testscenario("fake", func() { gg.DoIterations(tc1, 500, 0, false) }) loadmodel := `{"Scenario": "fake", "ThinkTimeFactor": 2.0, "ThinkTimeVariance": 0.0 }` srv.test.ReadConfigValidate(loadmodel, LoadmodelSchema) { // startTest req, _ := http.NewRequest("POST", "/test", nil) rsp := httptest.NewRecorder() srv.Router().ServeHTTP(rsp, req) if rsp.Code != http.StatusOK { t.Fatalf("Status code expected: %s but was: %v", "200", rsp.Code) } } // another fake clock problem here! // if srv.test.status != running { // t.Fatalf("Status code expected: %v but was: %v", running, srv.test.status) // } { // stopTest req, _ := http.NewRequest("DELETE", "/test", nil) rsp := httptest.NewRecorder() srv.Router().ServeHTTP(rsp, req) if rsp.Code != http.StatusOK { t.Fatalf("Status code expected: %s but was: %v", "200", rsp.Code) } } if srv.test.Status() == Running { t.Fatalf("Status code expected not running but was: %v", srv.test.Status()) } }
func TestEventReporterUpdateWithSomeMetric(t *testing.T) { fake := NewTest() tmp, _ := ioutil.TempFile(os.TempDir(), "gogrinder_test") defer os.Remove(tmp.Name()) fake.AddReportPlugin(&EventReporter{tmp}) done := fake.Collect() // this needs a collector to unblock update now := time.Now() fake.Update(Metric(&someMetric{Meta{Teststep: "sth", Elapsed: Elapsed(8 * time.Millisecond), Timestamp: Timestamp(now)}, 100})) exp := fmt.Sprintf(`{"testcase":"","teststep":"sth","user":0,"iteration"`+ `:0,"ts":"%s","elapsed":8.000000,"status":100}`, now.Format(time.RFC3339Nano)) buf, _ := ioutil.ReadFile(tmp.Name()) last := strings.TrimSpace(string(buf)) if last != exp { t.Errorf("Entry for SomeMetric expected: %s, but got: %s", exp, last) } close(fake.measurements) <-done }
func TestIntegrationOfHttpPackage(t *testing.T) { if testing.Short() { t.Skip("skipping test in short mode.") } // init gg.Testscenario("scenario1", endurance) // main part err := gg.ReadConfigValidate(airbiscuitLoadmodel, gogrinder.LoadmodelSchema) if err != nil { t.Fatalf("Error while reading loadmodel config: %s!", err.Error()) } // start the airbiscuit server s := &airbiscuit.Stats{Sleep: 50 * time.Millisecond} r := airbiscuit.Router(s) srv := graceful.Server{ Timeout: 50 * time.Millisecond, Server: &http.Server{ Handler: r, Addr: ":3001", }, } // stop server after wait time go func() { time.Sleep(time.Duration(1050 * int(time.Millisecond))) srv.Stop(80 * time.Millisecond) fmt.Printf("Get count: %d\n", s.G) fmt.Printf("Post count: %d\n", s.P) }() go srv.ListenAndServe() // run the test start := time.Now() gg.Exec() // exec the scenario that has been selected in the config file execution := time.Now().Sub(start) // verify total run time of the endurance scenario if execution > 1100*time.Millisecond { t.Errorf("Error: execution time of scenario1 not as expected: %v\n", execution) } results := gg.Results("") // check 01_01_teststep (get requests) if results[0].Teststep != "01_01_teststep" { t.Errorf("Teststep name not as expected: %s!", results[0].Teststep) } if results[0].Count < 170 { t.Errorf("Less than 170 get requests: %v!", results[0].Count) } if results[0].Avg < 50.0 && results[0].Avg > 62.0 { t.Errorf("Average not as expected: %f!", results[0].Avg) } if results[0].Min < 50.0 && results[0].Min > 62.0 { t.Errorf("Minimum not as expected: %f!", results[0].Min) } if results[0].Max < 50.0 && results[0].Max > 62.0 { t.Errorf("Maximum not as expected: %f!", results[0].Max) } // check 02_01_teststep (post requests) if results[1].Teststep != "02_01_teststep" { t.Errorf("Teststep name not as expected: %s!", results[1].Teststep) } if results[1].Count < 170 { t.Errorf("Less than 170 get requests: %v!", results[1].Count) } if results[1].Avg < 50.0 && results[1].Avg > 62.0 { t.Errorf("Average not as expected: %f!", results[1].Avg) } if results[1].Min < 50.0 && results[1].Min > 62.0 { t.Errorf("Minimum not as expected: %f!", results[1].Min) } if results[1].Max < 50.0 && results[1].Max > 62.0 { t.Errorf("Maximum not as expected: %f!", results[1].Max) } }
func newMetricReader(readFrom io.Reader) *metricReader { // wrap into buffered reader return &metricReader{0, time.Now(), gogrinder.Elapsed(0), bufio.NewReader(readFrom)} }
func TestReport(t *testing.T) { bak := stdout stdout = new(bytes.Buffer) defer func() { stdout = bak }() fake := NewTest() done := fake.Collect() // this needs a collector to unblock update insert := func(name string) { fake.Update(&Meta{Teststep: name, Elapsed: Elapsed(8 * time.Millisecond), Timestamp: Timestamp(time.Now())}) fake.Update(&Meta{Teststep: name, Elapsed: Elapsed(10 * time.Millisecond), Timestamp: Timestamp(time.Now())}) fake.Update(&Meta{Teststep: name, Elapsed: Elapsed(2 * time.Millisecond), Timestamp: Timestamp(time.Now())}) } insert("tc2") insert("tc1") insert("tc3") close(fake.measurements) <-done fake.Report(stdout) // run the report report := stdout.(*bytes.Buffer).String() if report != ("tc1, 6.666666, 2.000000, 10.000000, 3, 0\n" + "tc2, 6.666666, 2.000000, 10.000000, 3, 0\n" + "tc3, 6.666666, 2.000000, 10.000000, 3, 0\n") { t.Fatalf("Report output not as expected: %s", report) } }
func TestReset(t *testing.T) { fake := NewTest() done := fake.Collect() // this needs a collector to unblock update // first measurement fake.Update(&Meta{Teststep: "sth", Elapsed: Elapsed(8 * time.Millisecond), Timestamp: Timestamp(time.Now())}) close(fake.measurements) <-done if _, ok := fake.stats["sth"]; ok { fake.Reset() // now the measurement should be gone if _, ok := fake.stats["sth"]; ok { t.Error("Reset failed to clear the statistics!\n") } } else { t.Errorf("Update failed to insert values for 'sth'!") } }
func TestUpdateMultipleMeasurements(t *testing.T) { fake := NewTest() done := fake.Collect() // this needs a collector to unblock update fake.Update(&Meta{Teststep: "sth", Elapsed: Elapsed(8 * time.Millisecond), Timestamp: Timestamp(time.Now())}) fake.Update(&Meta{Teststep: "sth", Elapsed: Elapsed(10 * time.Millisecond), Timestamp: Timestamp(time.Now())}) fake.Update(&Meta{Teststep: "sth", Elapsed: Elapsed(2 * time.Millisecond), Timestamp: Timestamp(time.Now())}) close(fake.measurements) <-done if v, ok := fake.stats["sth"]; ok { if v.avg != 6666666*time.Nanosecond { t.Errorf("Statistics update avg %d not as expected 6.66ms!\n", v.avg) } if v.min != 2*time.Millisecond { t.Errorf("Statistics update min %d not as expected 2ms!\n", v.min) } if v.max != 10*time.Millisecond { t.Errorf("Statistics update max %d not as expected 10ms!\n", v.max) } } else { t.Errorf("Update failed to insert values for 'sth'!") } }
func TestCollectCallsReporterUpdate(t *testing.T) { fake := NewTest() fake.AddReportPlugin(someReporter{}) // first measurement done := fake.Collect() // this needs a collector to unblock update fake.Update(&Meta{Teststep: "sth", Elapsed: Elapsed(8 * time.Millisecond), Timestamp: Timestamp(time.Now())}) close(fake.measurements) <-done }