func handleConnection(conn net.Conn, schemas []*StorageSchema, aggrs []*StorageAggregation) { bufconn := bufio.NewReader(conn) for { line, err := bufconn.ReadBytes('\n') if err != nil { conn.Close() if err != io.EOF { logger.Logf("read failed: %s", err.Error()) } break } elems := strings.Split(string(line), " ") if len(elems) != 3 { logger.Logf("invalid line: %s", string(line)) continue } metric := elems[0] value, err := strconv.ParseFloat(elems[1], 64) if err != nil { logger.Logf("invalue value '%s': %s", elems[1], err.Error()) continue } elems[2] = strings.TrimRight(elems[2], "\n") tsf, err := strconv.ParseFloat(elems[2], 64) if err != nil { logger.Logf("invalid timestamp '%s': %s", elems[2], err.Error()) continue } ts := int(tsf) if metric == "" { logger.Logf("invalid line: %s", string(line)) continue } if ts == 0 { logger.Logf("invalid timestamp (0): %s", string(line)) continue } logger.Debugf("metric: %s, value: %f, ts: %d", metric, value, ts) // catch panics from whisper-go library defer func() { if r := recover(); r != nil { logger.Logf("recovering from whisper panic:", r) } }() // do what we want to do path := config.WhisperData + "/" + strings.Replace(metric, ".", "/", -1) + ".wsp" w, err := whisper.Open(path) if err != nil { var schema *StorageSchema = nil for _, s := range schemas { if s.pattern.MatchString(metric) { schema = s break } } if schema == nil { logger.Logf("no storage schema defined for %s", metric) continue } logger.Debugf("%s: found schema: %s", metric, schema.name) var aggr *StorageAggregation = nil for _, a := range aggrs { if a.pattern.MatchString(metric) { aggr = a break } } // http://graphite.readthedocs.org/en/latest/config-carbon.html#storage-aggregation-conf aggrName := "(default)" aggrStr := "average" aggrType := whisper.Average xfilesf := float32(0.5) if aggr != nil { aggrName = aggr.name aggrStr = aggr.aggregationMethodStr aggrType = aggr.aggregationMethod xfilesf = float32(aggr.xFilesFactor) } logger.Logf("creating %s: %s, retention: %s (section %s), aggregationMethod: %s, xFilesFactor: %f (section %s)", metric, path, schema.retentionStr, schema.name, aggrStr, xfilesf, aggrName) // whisper.Create doesn't mkdir, so let's do it ourself lastslash := strings.LastIndex(path, "/") if lastslash != -1 { os.MkdirAll(path[0:lastslash], os.ModeDir|os.ModePerm) } w, err = whisper.Create(path, schema.retentions, aggrType, xfilesf) if err != nil { logger.Logf("failed to create new whisper file %s: %s", path, err.Error()) continue } } w.Update(value, int(ts)) w.Close() } }
func fetchHandler(wr http.ResponseWriter, req *http.Request) { // URL: /render/?target=the.metric.name&format=pickle&from=1396008021&until=1396022421 Metrics.RenderRequests.Add(1) req.ParseForm() metric := req.FormValue("target") format := req.FormValue("format") from := req.FormValue("from") until := req.FormValue("until") t0 := time.Now() // Make sure we log which metric caused a panic() defer func() { if r := recover(); r != nil { var buf [1024]byte runtime.Stack(buf[:], false) logger.Logf("panic handling request: %s\n%s\n", req.RequestURI, string(buf[:])) } }() if format != "json" && format != "pickle" && format != "protobuf" { Metrics.RenderErrors.Add(1) logger.Logf("dropping invalid uri (format=%s): %s", format, req.URL.RequestURI()) http.Error(wr, "Bad request (unsupported format)", http.StatusBadRequest) return } files, leafs := expandGlobs(metric) var badTime bool i, err := strconv.Atoi(from) if err != nil { logger.Debugf("fromTime (%s) invalid: %s (in %s)", from, err, req.URL.RequestURI()) badTime = true } fromTime := int(i) i, err = strconv.Atoi(until) if err != nil { logger.Debugf("untilTime (%s) invalid: %s (in %s)", from, err, req.URL.RequestURI()) badTime = true } untilTime := int(i) if badTime { Metrics.RenderErrors.Add(1) http.Error(wr, "Bad request (invalid from/until time)", http.StatusBadRequest) return } var multi pb.MultiFetchResponse for i, metric := range files { if !leafs[i] { log.Printf("skipping directory = %q\n", metric) // can't fetch a directory continue } path := config.WhisperData + "/" + strings.Replace(metric, ".", "/", -1) + ".wsp" w, err := whisper.Open(path) if err != nil { // the FE/carbonzipper often requests metrics we don't have // We shouldn't really see this any more -- expandGlobs() should filter them out Metrics.NotFound.Add(1) log.Printf("error opening %q: %v\n", path, err) continue } points, err := w.Fetch(fromTime, untilTime) w.Close() if err != nil { Metrics.RenderErrors.Add(1) logger.Logf("failed to fetch points from %s: %s", path, err) continue } if points == nil { Metrics.NotFound.Add(1) logger.Debugf("Metric time range not found: metric=%s from=%d to=%d ", metric, fromTime, untilTime) continue } values := points.Values() fromTime := int32(points.FromTime()) untilTime := int32(points.UntilTime()) step := int32(points.Step()) response := pb.FetchResponse{ Name: proto.String(metric), StartTime: &fromTime, StopTime: &untilTime, StepTime: &step, Values: make([]float64, len(values)), IsAbsent: make([]bool, len(values)), } for i, p := range values { if math.IsNaN(p) { response.Values[i] = 0 response.IsAbsent[i] = true } else { response.Values[i] = p response.IsAbsent[i] = false } } multi.Metrics = append(multi.Metrics, &response) } var b []byte switch format { case "json": wr.Header().Set("Content-Type", "application/json") b, err = json.Marshal(multi) case "protobuf": wr.Header().Set("Content-Type", "application/protobuf") b, err = proto.Marshal(&multi) case "pickle": // transform protobuf data into what pickle expects //[{'start': 1396271100, 'step': 60, 'name': 'metric', //'values': [9.0, 19.0, None], 'end': 1396273140} var response []map[string]interface{} for _, metric := range multi.GetMetrics() { var m map[string]interface{} m = make(map[string]interface{}) m["start"] = metric.StartTime m["step"] = metric.StepTime m["end"] = metric.StopTime m["name"] = metric.Name mv := make([]interface{}, len(metric.Values)) for i, p := range metric.Values { if metric.IsAbsent[i] { mv[i] = nil } else { mv[i] = p } } m["values"] = mv response = append(response, m) } wr.Header().Set("Content-Type", "application/pickle") var buf bytes.Buffer pEnc := pickle.NewEncoder(&buf) err = pEnc.Encode(response) b = buf.Bytes() } if err != nil { Metrics.RenderErrors.Add(1) logger.Logf("failed to create %s data for %s: %s", format, "<metric>", err) return } wr.Write(b) logger.Debugf("fetch: served %q from %d to %d in %v", metric, fromTime, untilTime, time.Since(t0)) }
func infoHandler(wr http.ResponseWriter, req *http.Request) { // URL: /info/?target=the.metric.name&format=json Metrics.InfoRequests.Add(1) req.ParseForm() metric := req.FormValue("target") format := req.FormValue("format") if format == "" { format = "json" } if format != "json" && format != "protobuf" { Metrics.InfoErrors.Add(1) logger.Logf("dropping invalid uri (format=%s): %s", format, req.URL.RequestURI()) http.Error(wr, "Bad request (unsupported format)", http.StatusBadRequest) return } path := config.WhisperData + "/" + strings.Replace(metric, ".", "/", -1) + ".wsp" w, err := whisper.Open(path) defer w.Close() if err != nil { Metrics.NotFound.Add(1) logger.Debugf("failed to %s", err) http.Error(wr, "Metric not found", http.StatusNotFound) return } aggr := w.AggregationMethod() maxr := int32(w.MaxRetention()) xfiles := float32(w.XFilesFactor()) rets := make([]*pb.Retention, 0, 4) for _, retention := range w.Retentions() { spp := int32(retention.SecondsPerPoint()) nop := int32(retention.NumberOfPoints()) rets = append(rets, &pb.Retention{ SecondsPerPoint: &spp, NumberOfPoints: &nop, }) } response := pb.InfoResponse{ Name: &metric, AggregationMethod: &aggr, MaxRetention: &maxr, XFilesFactor: &xfiles, Retentions: rets, } var b []byte switch format { case "json": b, err = json.Marshal(response) case "protobuf": b, err = proto.Marshal(&response) } if err != nil { Metrics.RenderErrors.Add(1) logger.Logf("failed to create %s data for %s: %s", format, path, err) return } wr.Write(b) logger.Debugf("served info for %s", metric) return }
func fetchHandler(wr http.ResponseWriter, req *http.Request) { // URL: /render/?target=the.metric.name&format=pickle&from=1396008021&until=1396022421 Metrics.RenderRequests.Add(1) req.ParseForm() metric := req.FormValue("target") format := req.FormValue("format") from := req.FormValue("from") until := req.FormValue("until") if format != "json" && format != "pickle" && format != "protobuf" { Metrics.RenderErrors.Add(1) log.Warnf("dropping invalid uri (format=%s): %s", format, req.URL.RequestURI()) http.Error(wr, "Bad request (unsupported format)", http.StatusBadRequest) return } path := config.WhisperData + "/" + strings.Replace(metric, ".", "/", -1) + ".wsp" w, err := whisper.Open(path) if err != nil { // the FE/carbonzipper often requests metrics we don't have Metrics.NotFound.Add(1) log.Debugf("failed to %s", err) http.Error(wr, "Metric not found", http.StatusNotFound) return } i, err := strconv.Atoi(from) if err != nil { log.Debugf("fromTime (%s) invalid: %s (in %s)", from, err, req.URL.RequestURI()) if w != nil { w.Close() } w = nil } fromTime := int(i) i, err = strconv.Atoi(until) if err != nil { log.Debugf("untilTime (%s) invalid: %s (in %s)", from, err, req.URL.RequestURI()) if w != nil { w.Close() } w = nil } untilTime := int(i) if w != nil { defer w.Close() } else { Metrics.RenderErrors.Add(1) http.Error(wr, "Bad request (invalid from/until time)", http.StatusBadRequest) return } points, err := w.Fetch(fromTime, untilTime) if err != nil { Metrics.RenderErrors.Add(1) log.Errorf("failed to fetch points from %s: %s", path, err) http.Error(wr, "Fetching data points failed", http.StatusInternalServerError) return } if points == nil { Metrics.NotFound.Add(1) log.Debugf("Metric time range not found: metric=%s from=%d to=%d ", metric, fromTime, untilTime) http.Error(wr, "Metric time range not found", http.StatusNotFound) return } values := points.Values() if format == "json" || format == "protobuf" { fromTime := int32(points.FromTime()) untilTime := int32(points.UntilTime()) step := int32(points.Step()) response := pb.FetchResponse{ Name: &metric, StartTime: &fromTime, StopTime: &untilTime, StepTime: &step, Values: make([]float64, len(values)), IsAbsent: make([]bool, len(values)), } for i, p := range values { if math.IsNaN(p) { response.Values[i] = 0 response.IsAbsent[i] = true } else { response.Values[i] = p response.IsAbsent[i] = false } } var b []byte var err error switch format { case "json": b, err = json.Marshal(response) case "protobuf": b, err = proto.Marshal(&response) } if err != nil { Metrics.RenderErrors.Add(1) log.Errorf("failed to create %s data for %s: %s", format, path, err) return } wr.Write(b) } else if format == "pickle" { //[{'start': 1396271100, 'step': 60, 'name': 'metric', //'values': [9.0, 19.0, None], 'end': 1396273140} var metrics []map[string]interface{} var m map[string]interface{} m = make(map[string]interface{}) m["start"] = points.FromTime() m["step"] = points.Step() m["end"] = points.UntilTime() m["name"] = metric mv := make([]interface{}, len(values)) for i, p := range values { if math.IsNaN(p) { mv[i] = nil } else { mv[i] = p } } m["values"] = mv metrics = append(metrics, m) wr.Header().Set("Content-Type", "application/pickle") pEnc := pickle.NewEncoder(wr) pEnc.Encode(metrics) } log.Infof("served %d points for %s", len(values), metric) return }