func printKeyAnalysis(hr *hashing.HashRing, file string) { keys := make(map[string]int) total := 0 data, err := ioutil.ReadFile(file) if err != nil { log.Fatalf("Error: %s", err) } for _, l := range strings.Split(string(data), "\n") { l = strings.TrimSpace(l) n := hr.GetNode(l) server := fmt.Sprintf("%s:%s", n.Server, n.Instance) keys[server] = keys[server] + 1 total++ } sortedKeys := make([]string, 0) for k := range keys { sortedKeys = append(sortedKeys, k) } sort.Strings(sortedKeys) fmt.Printf("Keys per node:\n") average := float64(total) / float64(hr.Len()) variance := float64(0) for _, k := range sortedKeys { fmt.Printf("%s\t%d\n", k, keys[k]) variance = variance + math.Pow(float64(keys[k])-average, 2) } fmt.Printf("\nTotal Metric Keys: %d\n", total) fmt.Printf("Ideal keys per node: %.2f\n", average) fmt.Printf("Deviation: %.4f\n", math.Sqrt(variance/float64(len(keys)))) }
func restoreTarWorker(workIn chan *MetricData, ring *hashing.HashRing, servers []string, wg *sync.WaitGroup) { for work := range workIn { server := ring.GetNode(work.Name).Server if SingleHost && server != servers[0] { log.Printf("In single mode, skipping metric %s for server %s", work.Name, server) continue } log.Printf("Uploading %s => %s", work.Name, server) err := PostMetric(server, work) if err != nil { workerErrors = true } } wg.Done() }