func findHandlerPB(w http.ResponseWriter, req *http.Request, responses []serverResponse) ([]*pb.GlobMatch, map[string][]string) { // metric -> [server1, ... ] paths := make(map[string][]string) seen := make(map[nameleaf]bool) var metrics []*pb.GlobMatch for _, r := range responses { var metric pb.GlobResponse err := metric.Unmarshal(r.response) if err != nil && req != nil { logger.Logf("error decoding protobuf response from server:%s: req:%s: err=%s", r.server, req.URL.RequestURI(), err) logger.Traceln("\n" + hex.Dump(r.response)) Metrics.FindErrors.Add(1) continue } for _, match := range metric.Matches { n := nameleaf{*match.Path, *match.IsLeaf} _, ok := seen[n] if !ok { // we haven't seen this name yet // add the metric to the list of metrics to return metrics = append(metrics, match) seen[n] = true } // add the server to the list of servers that know about this metric p := paths[*match.Path] p = append(p, r.server) paths[*match.Path] = p } } return metrics, paths }
func renderHandler(w http.ResponseWriter, r *http.Request, stats *renderStats) { Metrics.Requests.Add(1) err := r.ParseForm() if err != nil { http.Error(w, http.StatusText(http.StatusBadRequest)+": "+err.Error(), http.StatusBadRequest) return } targets := r.Form["target"] from := r.FormValue("from") until := r.FormValue("until") format := r.FormValue("format") useCache := !expr.TruthyBool(r.FormValue("noCache")) var jsonp string if format == "json" { // TODO(dgryski): check jsonp only has valid characters jsonp = r.FormValue("jsonp") } if format == "" && (expr.TruthyBool(r.FormValue("rawData")) || expr.TruthyBool(r.FormValue("rawdata"))) { format = "raw" } if format == "" { format = "png" } cacheTimeout := int32(60) if tstr := r.FormValue("cacheTimeout"); tstr != "" { t, err := strconv.Atoi(tstr) if err != nil { logger.Logf("failed to parse cacheTimeout: %v: %v", tstr, err) } else { cacheTimeout = int32(t) } } // make sure the cache key doesn't say noCache, because it will never hit r.Form.Del("noCache") // jsonp callback names are frequently autogenerated and hurt our cache r.Form.Del("jsonp") // Strip some cache-busters. If you don't want to cache, use noCache=1 r.Form.Del("_salt") r.Form.Del("_ts") r.Form.Del("_t") // Used by jquery.graphite.js cacheKey := r.Form.Encode() if response, ok := queryCache.get(cacheKey); useCache && ok { Metrics.RequestCacheHits.Add(1) writeResponse(w, response, format, jsonp) return } // normalize from and until values // BUG(dgryski): doesn't handle timezones the same as graphite-web from32 := dateParamToEpoch(from, timeNow().Add(-24*time.Hour).Unix()) until32 := dateParamToEpoch(until, timeNow().Unix()) if from32 == until32 { http.Error(w, "Invalid empty time range", http.StatusBadRequest) return } var results []*expr.MetricData var errors []string metricMap := make(map[expr.MetricRequest][]*expr.MetricData) for _, target := range targets { exp, e, err := expr.ParseExpr(target) if err != nil || e != "" { msg := buildParseErrorString(target, e, err) http.Error(w, msg, http.StatusBadRequest) return } for _, m := range exp.Metrics() { mfetch := m mfetch.From += from32 mfetch.Until += until32 if _, ok := metricMap[mfetch]; ok { // already fetched this metric for this request continue } var glob pb.GlobResponse var haveCacheData bool if response, ok := findCache.get(m.Metric); useCache && ok { Metrics.FindCacheHits.Add(1) err := glob.Unmarshal(response) haveCacheData = err == nil } if !haveCacheData { var err error Metrics.FindRequests.Add(1) stats.zipperRequests++ glob, err = Zipper.Find(m.Metric) if err != nil { logger.Logf("Find: %v: %v", m.Metric, err) continue } b, err := glob.Marshal() if err == nil { findCache.set(m.Metric, b, 5*60) } } // For each metric returned in the Find response, query Render // This is a conscious decision to *not* cache render data rch := make(chan *expr.MetricData, len(glob.GetMatches())) leaves := 0 for _, m := range glob.GetMatches() { if !m.GetIsLeaf() { continue } Metrics.RenderRequests.Add(1) leaves++ Limiter.enter() stats.zipperRequests++ go func(m *pb.GlobMatch, from, until int32) { var rptr *expr.MetricData r, err := Zipper.Render(m.GetPath(), from, until) if err == nil { rptr = &r } else { logger.Logf("Render: %v: %v", m.GetPath(), err) } rch <- rptr Limiter.leave() }(m, mfetch.From, mfetch.Until) } for i := 0; i < leaves; i++ { r := <-rch if r != nil { metricMap[mfetch] = append(metricMap[mfetch], r) } } expr.SortMetrics(metricMap[mfetch], mfetch) } func() { defer func() { if r := recover(); r != nil { var buf [1024]byte runtime.Stack(buf[:], false) logger.Logf("panic during eval: %s: %s\n%s\n", cacheKey, r, string(buf[:])) } }() exprs, err := expr.EvalExpr(exp, from32, until32, metricMap) if err != nil && err != expr.ErrSeriesDoesNotExist { errors = append(errors, target+": "+err.Error()) return } results = append(results, exprs...) }() } if len(errors) > 0 { errors = append([]string{"Encountered the following errors:"}, errors...) http.Error(w, strings.Join(errors, "\n"), http.StatusBadRequest) return } var body []byte switch format { case "json": if maxDataPoints, _ := strconv.Atoi(r.FormValue("maxDataPoints")); maxDataPoints != 0 { expr.ConsolidateJSON(maxDataPoints, results) } body = expr.MarshalJSON(results) case "protobuf": body, err = expr.MarshalProtobuf(results) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } case "raw": body = expr.MarshalRaw(results) case "csv": body = expr.MarshalCSV(results) case "pickle": body = expr.MarshalPickle(results) case "png": body = expr.MarshalPNG(r, results) case "svg": body = expr.MarshalSVG(r, results) } writeResponse(w, body, format, jsonp) if len(results) != 0 { queryCache.set(cacheKey, body, cacheTimeout) } }