func bench(requests, concurrency int, images []string, args []string) { start := time.Now() timings := make([]float64, requests) // Create a buffered channel so our display goroutine can't slow down the workers. completeCh := make(chan time.Duration, requests) doneCh := make(chan struct{}) current := 0 go func() { for timing := range completeCh { timings = append(timings, timing.Seconds()) current++ percent := float64(current) / float64(requests) * 100 fmt.Printf("[%3.f%%] %d/%d containers started\n", percent, current, requests) } doneCh <- struct{}{} }() session(requests, concurrency, images, args, completeCh) close(completeCh) <-doneCh total := time.Since(start) mean, _ := stats.Mean(timings) p90th, _ := stats.Percentile(timings, 90) p99th, _ := stats.Percentile(timings, 99) meanMillis := mean * MILLIS_IN_SECOND p90thMillis := p90th * MILLIS_IN_SECOND p99thMillis := p99th * MILLIS_IN_SECOND fmt.Printf("\n") fmt.Printf("Time taken for tests: %.3fs\n", total.Seconds()) fmt.Printf("Time per container: %.3fms [mean] | %.3fms [90th] | %.3fms [99th]\n", meanMillis, p90thMillis, p99thMillis) }
func bench(requests, concurrency int, image string) { start := time.Now() timings := make([]float64, requests) completeCh := make(chan time.Duration) current := 0 go func() { for timing := range completeCh { timings = append(timings, timing.Seconds()) current++ percent := float64(current) / float64(requests) * 100 fmt.Printf("[%3.f%%] %d/%d containers started\n", percent, current, requests) } }() session(requests, concurrency, image, completeCh) close(completeCh) total := time.Since(start) p50th, _ := stats.Median(timings) p90th, _ := stats.Percentile(timings, 90) p99th, _ := stats.Percentile(timings, 99) fmt.Println("") fmt.Printf("Time taken for tests: %s\n", total.String()) fmt.Printf("Time per container: %vms [50th] | %vms [90th] | %vms [99th]\n", int(p50th*1000), int(p90th*1000), int(p99th*1000)) }
func statString(data []float64) (string, error) { strs := make([]string, 3) for i, pct := range []float64{50, 90, 99} { stat, err := stats.Percentile(data, pct) if err != nil { return "", err } strs[i] = strconv.FormatFloat(stat, 'f', 2, 64) } return strings.Join(strs, "/"), nil }
func (hist *History) PrintSummary() { nanos := []float64{} for _, duration := range hist.values { nanos = append(nanos, float64(duration)) } fmt.Printf("%10s", hist.name) fmt.Printf(" %10s", time.Duration(stats.Min(nanos))) for _, p := range percentiles { nano := time.Duration(stats.Percentile(nanos, p)) fmt.Printf(" %10s", nano) } fmt.Printf(" %10s", time.Duration(stats.Max(nanos))) fmt.Println() }
func FprintSummary(out io.Writer, hists ...*History) { fmt.Fprintf(out, "%10s", "") fmt.Fprintf(out, " %10s", "MIN") for _, p := range percentiles { fmt.Fprintf(out, " %9d%%", int(p)) } fmt.Fprintf(out, " %10s", "MAX") fmt.Fprintln(out) for _, hist := range hists { nanos := []float64{} for _, duration := range hist.values { nanos = append(nanos, float64(duration)) } fmt.Fprintf(out, "%10s", hist.name) fmt.Fprintf(out, " %10s", time.Duration(stats.Min(nanos))) for _, p := range percentiles { fmt.Fprintf(out, " %10s", time.Duration(stats.Percentile(nanos, p))) } fmt.Fprintf(out, " %10s", time.Duration(stats.Max(nanos))) fmt.Fprintln(out) } }
func main() { d := stats.LoadRawData([]interface{}{1.1, "2", 3.0, 4, "5"}) a, _ := stats.Min(d) fmt.Println(a) // 1.1 a, _ = stats.Max(d) fmt.Println(a) // 5 a, _ = stats.Sum([]float64{1.1, 2.2, 3.3}) fmt.Println(a) // 6.6 a, _ = stats.Mean([]float64{1, 2, 3, 4, 5}) fmt.Println(a) // 3 a, _ = stats.Median([]float64{1, 2, 3, 4, 5, 6, 7}) fmt.Println(a) // 4 m, _ := stats.Mode([]float64{5, 5, 3, 3, 4, 2, 1}) fmt.Println(m) // [5 3] a, _ = stats.PopulationVariance([]float64{1, 2, 3, 4, 5}) fmt.Println(a) // 2 a, _ = stats.SampleVariance([]float64{1, 2, 3, 4, 5}) fmt.Println(a) // 2.5 a, _ = stats.MedianAbsoluteDeviationPopulation([]float64{1, 2, 3}) fmt.Println(a) // 1 a, _ = stats.StandardDeviationPopulation([]float64{1, 2, 3}) fmt.Println(a) // 0.816496580927726 a, _ = stats.StandardDeviationSample([]float64{1, 2, 3}) fmt.Println(a) // 1 a, _ = stats.Percentile([]float64{1, 2, 3, 4, 5}, 75) fmt.Println(a) // 4 a, _ = stats.PercentileNearestRank([]float64{35, 20, 15, 40, 50}, 75) fmt.Println(a) // 40 c := []stats.Coordinate{ {1, 2.3}, {2, 3.3}, {3, 3.7}, {4, 4.3}, {5, 5.3}, } r, _ := stats.LinearRegression(c) fmt.Println(r) // [{1 2.3800000000000026} {2 3.0800000000000014} {3 3.7800000000000002} {4 4.479999999999999} {5 5.179999999999998}] r, _ = stats.ExponentialRegression(c) fmt.Println(r) // [{1 2.5150181024736638} {2 3.032084111136781} {3 3.6554544271334493} {4 4.406984298281804} {5 5.313022222665875}] r, _ = stats.LogarithmicRegression(c) fmt.Println(r) // [{1 2.1520822363811702} {2 3.3305559222492214} {3 4.019918836568674} {4 4.509029608117273} {5 4.888413396683663}] s, _ := stats.Sample([]float64{0.1, 0.2, 0.3, 0.4}, 3, false) fmt.Println(s) // [0.2,0.4,0.3] s, _ = stats.Sample([]float64{0.1, 0.2, 0.3, 0.4}, 10, true) fmt.Println(s) // [0.2,0.2,0.4,0.1,0.2,0.4,0.3,0.2,0.2,0.1] q, _ := stats.Quartile([]float64{7, 15, 36, 39, 40, 41}) fmt.Println(q) // {15 37.5 40} iqr, _ := stats.InterQuartileRange([]float64{102, 104, 105, 107, 108, 109, 110, 112, 115, 116, 118}) fmt.Println(iqr) // 10 mh, _ := stats.Midhinge([]float64{1, 3, 4, 4, 6, 6, 6, 6, 7, 7, 7, 8, 8, 9, 9, 10, 11, 12, 13}) fmt.Println(mh) // 7.5 tr, _ := stats.Trimean([]float64{1, 3, 4, 4, 6, 6, 6, 6, 7, 7, 7, 8, 8, 9, 9, 10, 11, 12, 13}) fmt.Println(tr) // 7.25 o, _ := stats.QuartileOutliers([]float64{-1000, 1, 3, 4, 4, 6, 6, 6, 6, 7, 8, 15, 18, 100}) fmt.Printf("%+v\n", o) // {Mild:[15 18] Extreme:[-1000 100]} gm, _ := stats.GeometricMean([]float64{10, 51.2, 8}) fmt.Println(gm) // 15.999999999999991 hm, _ := stats.HarmonicMean([]float64{1, 2, 3, 4, 5}) fmt.Println(hm) // 2.18978102189781 a, _ = stats.Round(2.18978102189781, 3) fmt.Println(a) // 2.189 }
func main() { verbose := flag.Bool("v", false, "verbose output") flag.Parse() file, err := os.Open("delta_data.bin") check(err) defer file.Close() buffer := bufio.NewReader(file) sizes := make([]float64, 0) speeds := make([]float64, 0) encode := qpc.NewHistory("encode") decode := qpc.NewHistory("decode") server := physics.NewState(901) client := physics.NewState(901) // initialize the base state for i := 0; i < 6; i += 1 { server.ReadNext(buffer) client.IncFrame() client.Current().Assign(server.Current()) } frame := 6 for { err = server.ReadNext(buffer) if err == io.EOF { break } check(err) frame += 1 runtime.GC() // Server side encode.Start() snapshot := server.Encode() encode.Stop() // === runtime.GC() // Client side decode.Start() client.IncFrame() client.Decode(snapshot) decode.Stop() // === size := float64(len(snapshot)*8) / 1000.0 sizes = append(sizes, size) speed := size * 60.0 speeds = append(speeds, speed) equal := server.Current().Equals(client.Current()) if *verbose { if !equal { fmt.Print("! ") } fmt.Printf("%04d %8.3fkbps %10s %10s\n", frame, speed, encode.Last(), decode.Last()) } else { if equal { fmt.Print(".") } else { fmt.Print("X") } } } fmt.Println() fmt.Printf("#%d %.3fkbps ±%.3fkbps\n", len(sizes), stats.Mean(speeds), stats.StdDevS(speeds)) fmt.Println() fmt.Printf("MIN %10.3f kbps\n", stats.Min(speeds)) for _, p := range []float64{5, 10, 25, 50, 75, 90, 95} { fmt.Printf("P%02.f %10.3f kbps\n", p, stats.Percentile(speeds, p)) } fmt.Printf("MAX %10.3f kbps\n", stats.Max(speeds)) fmt.Println() fmt.Printf("TOTAL %10.3f kb\n", stats.Sum(sizes)) fmt.Printf(" AVG %10.3f kb per frame\n", stats.Mean(sizes)) fmt.Printf(" AVG %10.3f bits per cube\n", stats.Mean(sizes)*1000/float64(len(sizes))) fmt.Println() fmt.Println("TIMING:") qpc.PrintSummary(encode, decode) }