func post(in <-chan []LM) { for metrics := range in { if len(metrics) < 1 { fmt.Printf("at=%q\n", "post.empty.metrics") continue } token := store.Token{Id: metrics[0].Token} token.Get() payload := LP{metrics} j, err := json.Marshal(payload) postBody := bytes.NewBuffer(j) if err != nil { utils.MeasureE("librato.json", err) continue } req, err := http.NewRequest("POST", libratoUrl, postBody) if err != nil { continue } req.Header.Add("Content-Type", "application/json") req.SetBasicAuth(token.User, token.Pass) resp, err := http.DefaultClient.Do(req) if err != nil { utils.MeasureE("librato-post", err) continue } if resp.StatusCode/100 != 2 { b, _ := ioutil.ReadAll(resp.Body) fmt.Printf("status=%d post-body=%s resp-body=%s\n", resp.StatusCode, postBody, b) } utils.MeasureI("librato.post", 1) resp.Body.Close() } }
func CacheSet(c Cachable) error { defer utils.MeasureT(time.Now(), "cache-set") rc := rp.Get() defer rc.Close() bs, err := json.Marshal(c) if err != nil { utils.MeasureE("cache-json-encode", err) return err } _, err = rc.Do("SET", c.Key(), bs) if err != nil { utils.MeasureE("cache-set", err) return err } return nil }
func recieveLogs(w http.ResponseWriter, r *http.Request, ch chan<- *store.Bucket) { defer utils.MeasureT(time.Now(), "http-receiver") if r.Method != "POST" { http.Error(w, "Invalid Request", 400) return } defer r.Body.Close() token, err := utils.ParseToken(r) if err != nil { utils.MeasureE("http-auth", err) http.Error(w, "Invalid Request", 400) return } defer utils.MeasureT(time.Now(), token+"-http-receive") buckets, err := store.NewBucket(token, bufio.NewReader(r.Body)) if err != nil { http.Error(w, "Invalid Request", 400) return } for i := range buckets { ch <- buckets[i] } }
func GetMetrics(token, name string, resolution int64, min, max time.Time) ([]*Metric, error) { startQuery := time.Now() rows, err := db.PGR.Query("select * from get_metrics($1, $2, $3, $4, $5)", token, name, resolution, min, max) if err != nil { utils.MeasureE("get-metrics-error", err) return nil, err } utils.MeasureT(startQuery, "get-metrics.query") startParse := time.Now() defer rows.Close() var metrics []*Metric for rows.Next() { startLoop := time.Now() var tmp []byte b := new(Bucket) rows.Scan(&b.Name, &b.Source, &b.Time, &tmp) if len(tmp) == 0 { b.Vals = []float64{} continue } encoding.DecodeArray(tmp, &b.Vals) m := new(Metric) m.Time = b.Time m.Name = b.Name m.Source = b.Source m.Mean = b.Mean() metrics = append(metrics, m) utils.MeasureT(startLoop, "get-metrics.scan-struct-loop") } utils.MeasureT(startParse, "parse.get-metrics") return metrics, nil }
func CacheGet(c Cachable) ([]byte, bool) { defer utils.MeasureT(time.Now(), "cache-get") rc := rp.Get() defer rc.Close() bs, err := redis.Bytes(rc.Do("GET", c.Key())) if err != nil { utils.MeasureE("redis-get", err) return nil, false } return bs, true }
// Fetch should kick off the librato outlet process. // Its responsibility is to get the ids of buckets for the current time, // make empty Buckets, then place the buckets in an inbox to be filled // (load the vals into the bucket) and processed. func fetch(out chan<- *store.Bucket) { for _ = range time.Tick(time.Duration(*processInterval) * time.Second) { go func(out chan<- *store.Bucket) { startPoll := time.Now() max := utils.RoundTime(time.Now(), time.Minute) min := max.Add(-time.Minute) ids, err := allBucketIds(min, max) if err != nil { utils.MeasureE("find-failed", err) return } for i := range ids { b := store.Bucket{Id: ids[i]} out <- &b } utils.MeasureT(startPoll, "librato.fetch") }(out) } }