func updateComplexity(v *Web, pop neat.Population) { // Build complexity slice x := make([]float64, len(pop.Genomes)) for i, g := range pop.Genomes { x[i] = float64(g.Complexity()) } var b neat.Genome max := -1.0 for _, g := range pop.Genomes { if g.Fitness > max { b = g max = g.Fitness } } // Append the record min, _ := stats.Min(x) max, _ = stats.Max(x) mean, _ := stats.Mean(x) v.complexity = append(v.complexity, [4]float64{ min, mean, max, float64(b.Complexity()), }) }
func (hist *History) PrintSummary() { nanos := []float64{} for _, duration := range hist.values { nanos = append(nanos, float64(duration)) } fmt.Printf("%10s", hist.name) fmt.Printf(" %10s", time.Duration(stats.Min(nanos))) for _, p := range percentiles { nano := time.Duration(stats.Percentile(nanos, p)) fmt.Printf(" %10s", nano) } fmt.Printf(" %10s", time.Duration(stats.Max(nanos))) fmt.Println() }
func updateFitness(v *Web, pop neat.Population) { // Build fitness slice x := make([]float64, len(pop.Genomes)) for i, g := range pop.Genomes { x[i] = g.Fitness } // Append the record min, _ := stats.Min(x) max, _ := stats.Max(x) mean, _ := stats.Mean(x) v.fitness = append(v.fitness, [3]float64{ min, mean, max, }) }
//fuzzyMap uses a word-similarity algorithm to match a value to its dictionary key, then assigns it the dictionary value func fuzzyMap(datum string, mappings []datasources.Setting) string { var distances []float64 result := "" for _, mapping := range mappings { d := matchr.Levenshtein(datum, mapping.Label) distances = append(distances, float64(d)) } minDistance, err := stats.Min(distances) if err != nil { log.Fatal("Error finding minimum: ", err) } for i, distance := range distances { if int(minDistance) == int(distance) { result = mappings[i].Value break } } return result }
func FprintSummary(out io.Writer, hists ...*History) { fmt.Fprintf(out, "%10s", "") fmt.Fprintf(out, " %10s", "MIN") for _, p := range percentiles { fmt.Fprintf(out, " %9d%%", int(p)) } fmt.Fprintf(out, " %10s", "MAX") fmt.Fprintln(out) for _, hist := range hists { nanos := []float64{} for _, duration := range hist.values { nanos = append(nanos, float64(duration)) } fmt.Fprintf(out, "%10s", hist.name) fmt.Fprintf(out, " %10s", time.Duration(stats.Min(nanos))) for _, p := range percentiles { fmt.Fprintf(out, " %10s", time.Duration(stats.Percentile(nanos, p))) } fmt.Fprintf(out, " %10s", time.Duration(stats.Max(nanos))) fmt.Fprintln(out) } }
func main() { d := stats.LoadRawData([]interface{}{1.1, "2", 3.0, 4, "5"}) a, _ := stats.Min(d) fmt.Println(a) // 1.1 a, _ = stats.Max(d) fmt.Println(a) // 5 a, _ = stats.Sum([]float64{1.1, 2.2, 3.3}) fmt.Println(a) // 6.6 a, _ = stats.Mean([]float64{1, 2, 3, 4, 5}) fmt.Println(a) // 3 a, _ = stats.Median([]float64{1, 2, 3, 4, 5, 6, 7}) fmt.Println(a) // 4 m, _ := stats.Mode([]float64{5, 5, 3, 3, 4, 2, 1}) fmt.Println(m) // [5 3] a, _ = stats.PopulationVariance([]float64{1, 2, 3, 4, 5}) fmt.Println(a) // 2 a, _ = stats.SampleVariance([]float64{1, 2, 3, 4, 5}) fmt.Println(a) // 2.5 a, _ = stats.MedianAbsoluteDeviationPopulation([]float64{1, 2, 3}) fmt.Println(a) // 1 a, _ = stats.StandardDeviationPopulation([]float64{1, 2, 3}) fmt.Println(a) // 0.816496580927726 a, _ = stats.StandardDeviationSample([]float64{1, 2, 3}) fmt.Println(a) // 1 a, _ = stats.Percentile([]float64{1, 2, 3, 4, 5}, 75) fmt.Println(a) // 4 a, _ = stats.PercentileNearestRank([]float64{35, 20, 15, 40, 50}, 75) fmt.Println(a) // 40 c := []stats.Coordinate{ {1, 2.3}, {2, 3.3}, {3, 3.7}, {4, 4.3}, {5, 5.3}, } r, _ := stats.LinearRegression(c) fmt.Println(r) // [{1 2.3800000000000026} {2 3.0800000000000014} {3 3.7800000000000002} {4 4.479999999999999} {5 5.179999999999998}] r, _ = stats.ExponentialRegression(c) fmt.Println(r) // [{1 2.5150181024736638} {2 3.032084111136781} {3 3.6554544271334493} {4 4.406984298281804} {5 5.313022222665875}] r, _ = stats.LogarithmicRegression(c) fmt.Println(r) // [{1 2.1520822363811702} {2 3.3305559222492214} {3 4.019918836568674} {4 4.509029608117273} {5 4.888413396683663}] s, _ := stats.Sample([]float64{0.1, 0.2, 0.3, 0.4}, 3, false) fmt.Println(s) // [0.2,0.4,0.3] s, _ = stats.Sample([]float64{0.1, 0.2, 0.3, 0.4}, 10, true) fmt.Println(s) // [0.2,0.2,0.4,0.1,0.2,0.4,0.3,0.2,0.2,0.1] q, _ := stats.Quartile([]float64{7, 15, 36, 39, 40, 41}) fmt.Println(q) // {15 37.5 40} iqr, _ := stats.InterQuartileRange([]float64{102, 104, 105, 107, 108, 109, 110, 112, 115, 116, 118}) fmt.Println(iqr) // 10 mh, _ := stats.Midhinge([]float64{1, 3, 4, 4, 6, 6, 6, 6, 7, 7, 7, 8, 8, 9, 9, 10, 11, 12, 13}) fmt.Println(mh) // 7.5 tr, _ := stats.Trimean([]float64{1, 3, 4, 4, 6, 6, 6, 6, 7, 7, 7, 8, 8, 9, 9, 10, 11, 12, 13}) fmt.Println(tr) // 7.25 o, _ := stats.QuartileOutliers([]float64{-1000, 1, 3, 4, 4, 6, 6, 6, 6, 7, 8, 15, 18, 100}) fmt.Printf("%+v\n", o) // {Mild:[15 18] Extreme:[-1000 100]} gm, _ := stats.GeometricMean([]float64{10, 51.2, 8}) fmt.Println(gm) // 15.999999999999991 hm, _ := stats.HarmonicMean([]float64{1, 2, 3, 4, 5}) fmt.Println(hm) // 2.18978102189781 a, _ = stats.Round(2.18978102189781, 3) fmt.Println(a) // 2.189 }
func main() { verbose := flag.Bool("v", false, "verbose output") flag.Parse() file, err := os.Open("delta_data.bin") check(err) defer file.Close() buffer := bufio.NewReader(file) sizes := make([]float64, 0) speeds := make([]float64, 0) encode := qpc.NewHistory("encode") decode := qpc.NewHistory("decode") server := physics.NewState(901) client := physics.NewState(901) // initialize the base state for i := 0; i < 6; i += 1 { server.ReadNext(buffer) client.IncFrame() client.Current().Assign(server.Current()) } frame := 6 for { err = server.ReadNext(buffer) if err == io.EOF { break } check(err) frame += 1 runtime.GC() // Server side encode.Start() snapshot := server.Encode() encode.Stop() // === runtime.GC() // Client side decode.Start() client.IncFrame() client.Decode(snapshot) decode.Stop() // === size := float64(len(snapshot)*8) / 1000.0 sizes = append(sizes, size) speed := size * 60.0 speeds = append(speeds, speed) equal := server.Current().Equals(client.Current()) if *verbose { if !equal { fmt.Print("! ") } fmt.Printf("%04d %8.3fkbps %10s %10s\n", frame, speed, encode.Last(), decode.Last()) } else { if equal { fmt.Print(".") } else { fmt.Print("X") } } } fmt.Println() fmt.Printf("#%d %.3fkbps ±%.3fkbps\n", len(sizes), stats.Mean(speeds), stats.StdDevS(speeds)) fmt.Println() fmt.Printf("MIN %10.3f kbps\n", stats.Min(speeds)) for _, p := range []float64{5, 10, 25, 50, 75, 90, 95} { fmt.Printf("P%02.f %10.3f kbps\n", p, stats.Percentile(speeds, p)) } fmt.Printf("MAX %10.3f kbps\n", stats.Max(speeds)) fmt.Println() fmt.Printf("TOTAL %10.3f kb\n", stats.Sum(sizes)) fmt.Printf(" AVG %10.3f kb per frame\n", stats.Mean(sizes)) fmt.Printf(" AVG %10.3f bits per cube\n", stats.Mean(sizes)*1000/float64(len(sizes))) fmt.Println() fmt.Println("TIMING:") qpc.PrintSummary(encode, decode) }
func main() { flag.Parse() n := *concurrency m := *total / n fmt.Printf("concurrency: %d\nrequests per client: %d\n\n", n, m) args := prepareArgs() b, _ := proto.Marshal(args) fmt.Printf("message size: %d bytes\n\n", len(b)) var wg sync.WaitGroup wg.Add(n * m) var trans uint64 var transOK uint64 d := make([][]int64, n, n) //it contains warmup time but we can ignore it totalT := time.Now().UnixNano() for i := 0; i < n; i++ { dt := make([]int64, 0, m) d = append(d, dt) go func(i int) { conn, err := grpc.Dial(*host, grpc.WithInsecure()) if err != nil { log.Fatalf("did not connect: %v", err) } c := NewHelloClient(conn) //warmup for j := 0; j < 5; j++ { c.Say(context.Background(), args) } for j := 0; j < m; j++ { t := time.Now().UnixNano() reply, err := c.Say(context.Background(), args) t = time.Now().UnixNano() - t d[i] = append(d[i], t) if err == nil && *(reply.Field1) == "OK" { atomic.AddUint64(&transOK, 1) } atomic.AddUint64(&trans, 1) wg.Done() } conn.Close() }(i) } wg.Wait() totalT = time.Now().UnixNano() - totalT totalT = totalT / 1000000 fmt.Printf("took %d ms for %d requests", totalT, n*m) totalD := make([]int64, 0, n*m) for _, k := range d { totalD = append(totalD, k...) } totalD2 := make([]float64, 0, n*m) for _, k := range totalD { totalD2 = append(totalD2, float64(k)) } mean, _ := stats.Mean(totalD2) median, _ := stats.Median(totalD2) max, _ := stats.Max(totalD2) min, _ := stats.Min(totalD2) fmt.Printf("sent requests : %d\n", n*m) fmt.Printf("received requests : %d\n", atomic.LoadUint64(&trans)) fmt.Printf("received requests_OK : %d\n", atomic.LoadUint64(&transOK)) fmt.Printf("throughput (TPS) : %d\n", int64(n*m)*1000/totalT) fmt.Printf("mean: %.f ns, median: %.f ns, max: %.f ns, min: %.f ns\n", mean, median, max, min) fmt.Printf("mean: %d ms, median: %d ms, max: %d ms, min: %d ms\n", int64(mean/1000000), int64(median/1000000), int64(max/1000000), int64(min/1000000)) }
func main() { flag.Parse() n := *concurrency m := *total / n fmt.Printf("concurrency: %d\nrequests per client: %d\n\n", n, m) serviceMethodName := "Hello.Say" args := prepareArgs() b := make([]byte, 1024*1024) i, _ := args.MarshalTo(b) fmt.Printf("message size: %d bytes\n\n", i) var wg sync.WaitGroup wg.Add(n * m) var trans uint64 var transOK uint64 d := make([][]int64, n, n) //it contains warmup time but we can ignore it totalT := time.Now().UnixNano() for i := 0; i < n; i++ { dt := make([]int64, 0, m) d = append(d, dt) go func(i int) { s := &rpcx.DirectClientSelector{Network: "tcp", Address: *host} client := rpcx.NewClient(s) client.ClientCodecFunc = codec.NewProtobufClientCodec var reply BenchmarkMessage //warmup for j := 0; j < 5; j++ { client.Call(serviceMethodName, args, &reply) } for j := 0; j < m; j++ { t := time.Now().UnixNano() err := client.Call(serviceMethodName, args, &reply) t = time.Now().UnixNano() - t d[i] = append(d[i], t) if err == nil && reply.Field1 == "OK" { atomic.AddUint64(&transOK, 1) } atomic.AddUint64(&trans, 1) wg.Done() } client.Close() }(i) } wg.Wait() totalT = time.Now().UnixNano() - totalT totalT = totalT / 1000000 fmt.Printf("took %d ms for %d requests", totalT, n*m) totalD := make([]int64, 0, n*m) for _, k := range d { totalD = append(totalD, k...) } totalD2 := make([]float64, 0, n*m) for _, k := range totalD { totalD2 = append(totalD2, float64(k)) } mean, _ := stats.Mean(totalD2) median, _ := stats.Median(totalD2) max, _ := stats.Max(totalD2) min, _ := stats.Min(totalD2) fmt.Printf("sent requests : %d\n", n*m) fmt.Printf("received requests : %d\n", atomic.LoadUint64(&trans)) fmt.Printf("received requests_OK : %d\n", atomic.LoadUint64(&transOK)) fmt.Printf("throughput (TPS) : %d\n", int64(n*m)*1000/totalT) fmt.Printf("mean: %.f ns, median: %.f ns, max: %.f ns, min: %.f ns\n", mean, median, max, min) fmt.Printf("mean: %d ms, median: %d ms, max: %d ms, min: %d ms\n", int64(mean/1000000), int64(median/1000000), int64(max/1000000), int64(min/1000000)) }
func main() { t := time.Now() fmt.Println(t.Format(time.RFC3339)) rand.Seed(1) // Read in data readData() // Set one level with all row criteria, // this is used to start the set creation levelOne = fullOneLevel() //levels = fullTwoLevel() outputRowCriteria(levels) // experiment variables rand_numSets = 1000 rand_maxSetMembers = 5 maxExperiments = 1 var expMin []float64 var expMax []float64 scoreCutoff = -0.89 rowThreshhold = 2 zScore = 2.58 for experiment := 1; experiment <= maxExperiments; experiment++ { // experiment variables, changes per experiment rand_numSets += 0 rand_maxSetMembers += 0 scoreCutoff += -0.00 zScore += 0.0 // Setup experiment variables var scores []scoreResult var minScore float64 = -100 var maxScore float64 = 0 levels = fullFourLevel() //randLevels() fmt.Printf("sets count: %d, max set members: %d, level 1 count: %d, rowThreshhold: %d, scoreCutoff: %f, zScore: %f\n", len(levels), rand_maxSetMembers+2, len(levelOne), rowThreshhold, scoreCutoff, zScore) for dataSetId := 1; dataSetId <= datasets; dataSetId++ { s := levelEval(dataSetId) sort.Sort(scoreResults(s)) // s contains a list of scores for one dataset, sorted // this is were we can get some info on that data //outputScoreList(s) if len(s) > 0 { //var sEval = evaluateScores(s) // pick the top score var sEval = s[0] scores = append(scores, sEval) fmt.Printf("%d, %f \n", sEval.dataSetId, sEval.score) if minScore < sEval.score { minScore = sEval.score } if maxScore > sEval.score { maxScore = sEval.score } } // For all score in this set write out the median and standard deviation var set []float64 for _, scoreItem := range s { if scoreItem.score < 0.0 { set = append(set, scoreItem.score) } } var median, _ = stats.Median(set) var sd, _ = stats.StandardDeviation(set) var min, _ = stats.Min(set) var max, _ = stats.Max(set) fmt.Printf("dataset: %d, median: %f, sd: %f, min: %f, max: %f, len: %d\n", dataSetId, median, sd, min, max, len(set)) } expMin = append(expMin, minScore) expMax = append(expMax, maxScore) //scoreCutoff = (minScore * (percentRofMin / 100.0)) + minScore //fmt.Printf(" scoreCutoff: %f \n", scoreCutoff) outputScores(scores) // Write output file outputResults(scores) // Compare to training truth data // compareTrainingDataWithResults() } t = time.Now() fmt.Println(t.Format(time.RFC3339)) // Output min max scores per experiment for _, each := range expMin { fmt.Printf("min: %f, ", each) } fmt.Println() for _, each := range expMax { fmt.Printf("max: %f, ", each) } }
//apply transforms an array of data func apply(data []string, transformation templates.Transformation) ([]string, []Mapping) { p := transformation.Parameters var wg sync.WaitGroup var mapping []Mapping switch transformation.Operation { case "toDate": if len(p) != 2 { log.Fatal("toDate transformation requires 2 parameters: current format, new format") } oldFormat := p[0] newFormat := p[1] for i, x := range data { y, err := time.Parse(oldFormat, x) if err != nil { log.Print("Error parsing date with index ", i, " with format: ", oldFormat) } else { data[i] = y.Format(newFormat) } } case "setNull": for i, x := range data { if arrayPos(x, p) != -1 { data[i] = "" } } case "standardize": if len(p) != 1 { log.Fatal("standardize transformation requires 1 parameter: type (min-max|z-score)") } stype := p[0] switch stype { case "min-max": newData := strArrToFloatArr(data) min, err := stats.Min(newData) if err != nil { log.Fatal("Error finding minimum of data: ", err) } max, err := stats.Max(newData) if err != nil { log.Fatal("Error finding maximum of data: ", err) } srange := max - min for i, x := range newData { data[i] = floatToString((x - min) / srange) } case "z-score": newData := strArrToFloatArr(data) mean, err := stats.Mean(newData) if err != nil { log.Fatal("Error finding mean of data: ", err) } sd, err := stats.StandardDeviation(newData) if err != nil { log.Fatal("Error finding standard deviation of data: ", err) } for i, x := range newData { data[i] = floatToString((x - mean) / sd) } case "decimal": newData := strArrToFloatArr(data) max, err := stats.Max(newData) if err != nil { log.Fatal("Error finding maximum of data: ", err) } min, err := stats.Min(newData) if err != nil { log.Fatal("Error finding minimum of data: ", err) } var maxAbs float64 if math.Abs(max) > math.Abs(min) { maxAbs = math.Abs(max) } else { maxAbs = math.Abs(min) } c := math.Ceil(math.Log10(maxAbs)) for i, x := range newData { data[i] = floatToString(x / math.Pow10(int(c))) } } case "binPercent": table := NewPivotTable(data) intP := strArrToIntArr(p) sort.Ints(intP) ps := NewPercentileService(*table, intP) mapping = ps.CreateMappings() ps.Bin(mapping, data) case "fuzzyMap": if len(p) != 3 { log.Fatal("fuzzyMap transformation requires 3 parameters: datasource GUID, match, put") } dsGUID := p[0] ds := datasources.NewDatasourceService(database.GetDatabase()) dsObj, err := ds.GetDatasource(dsGUID) if err != nil { log.Fatal("Error finding Datasource: ", err) } distinctValues := getDistinctValues(data) for i, datum := range distinctValues { wg.Add(1) go func(i int, datum string, dsObj datasources.Datasource) { result := fuzzyMap(datum, dsObj.Settings) fuzzyMapping := NewMapping(datum, result) mapping = append(mapping, *fuzzyMapping) defer wg.Done() }(i, datum, dsObj) } wg.Wait() data = applyMappings(mapping, data) } return data, mapping }