// handle metrics received from statsd // bucket name must be in this format: // appname.info1#param1#param2.info2#param1.infoX.field // data is saved for day, hour, and 15 minute intervals func dbHandleMetrics(m *statsd.Metric) { values := strings.Split(m.Bucket, ".") if len(values) < 3 { log.Error("Invalid bucket name - at least 3 items dot-separated items are required: %s", m.Bucket) return } if strings.HasPrefix(m.Bucket, "_") { log.Error("Invalid bucket name - cannot start with underline: %s", m.Bucket) return } // first item - app app := values[0] // last item - field name name := values[len(values)-1] // sanitization if !data.ValidateName(app) { log.Error("Invalid bucket name - name not validated: %s", app) return } if !data.ValidateValueName(name) { log.Error("Invalid bucket name - name not validated: %s", name) return } // remove first and last item values = values[1 : len(values)-1] tm := time.Now().UTC() // 15 minute aggregation minute := int(tm.Minute()/15.0) * 15 var idata *bson.M switch m.Type { case statsd.COUNTER: idata = &bson.M{ "$inc": bson.M{ fmt.Sprintf("_dy.c_%s", name): m.Value, fmt.Sprintf("_hr.h_%d.c_%s", tm.Hour(), name): m.Value, fmt.Sprintf("_hr.h_%d.mn.m_%d.c_%s", tm.Hour(), minute, name): m.Value, }, } case statsd.TIMER: idata = &bson.M{ "$inc": bson.M{ fmt.Sprintf("_dy.t_%s", name): m.Value, fmt.Sprintf("_dy.tc_%s", name): 1, fmt.Sprintf("_hr.h_%d.t_%s", tm.Hour(), name): m.Value, fmt.Sprintf("_hr.h_%d.tc_%s", tm.Hour(), name): 1, fmt.Sprintf("_hr.h_%d.mn.m_%d.t_%s", tm.Hour(), minute, name): m.Value, fmt.Sprintf("_hr.h_%d.mn.m_%d.tc_%s", tm.Hour(), minute, name): 1, }, } case statsd.GAUGE: idata = &bson.M{ "$inc": bson.M{ fmt.Sprintf("_dy.g_%s", name): m.Value, fmt.Sprintf("_dy.gc_%s", name): 1, fmt.Sprintf("_hr.h_%d.g_%s", tm.Hour(), name): m.Value, fmt.Sprintf("_hr.h_%d.gc_%s", tm.Hour(), name): 1, fmt.Sprintf("_hr.h_%d.mn.m_%d.g_%s", tm.Hour(), minute, name): m.Value, fmt.Sprintf("_hr.h_%d.mn.m_%d.gc_%s", tm.Hour(), minute, name): 1, }, } } if idata != nil { baseq := bson.M{ "_dt": tm.Format("2006-01-02"), } baseqapp := bson.M{ "_dt": tm.Format("2006-01-02"), "_app": app, } // all collections start with stat_ c_base := "stat" // loop on info. Each can have parameters separated by # for _, iv := range values { info := strings.Split(iv, "#") if strings.HasPrefix(info[0], "_") { log.Error("Invalid bucket name - info cannot start with underline: %s", info[0]) return } // sanitize if !data.ValidateName(info[0]) { log.Error("Invalid bucket name - name not validated: %s", info[0]) return } c_base = c_base + "_" + info[0] if len(c_base) == 0 { log.Error("Invalid bucket name - info cannot be blank: %s", m.Bucket) return } // separate database for total and per-app c := dbdb.C(c_base) capp := dbdb.C(fmt.Sprintf("%s-app", c_base)) // loop parameters for ridx, rv := range info[1:] { if rv != "" { var pname string // from second parameter on, add index to parameter name, starting from 1 if ridx > 0 { pname = fmt.Sprintf("%s%d", info[0], ridx) } else { pname = info[0] } // sanitize if !data.ValidateName(pname) { log.Error("Invalid param name - name not validated: %s", pname) return } // add parameter to queries baseq[pname] = rv baseqapp[pname] = rv } } // general _, err := c.Upsert(baseq, *idata) if err != nil { log.Error("Error saving log record: %s", err) } // by app if app != "" { _, err = capp.Upsert(baseqapp, *idata) if err != nil { log.Error("Error saving connection app record: %s", err) } } } } }
func QueryStats(db *mgo.Database, statsquery *StatsQuery) (*StatsQueryResult, error) { // check parameters if statsquery.Process == "" || statsquery.Data == nil || len(statsquery.Data) == 0 { return nil, fmt.Errorf("Required parameter not sent") } if statsquery.Groups == nil { statsquery.Groups = make([]string, 0) } // sanitize if !data.ValidateValueName(statsquery.Process) { return nil, fmt.Errorf("Invalid process name - name not validated: %s", statsquery.Process) } if statsquery.App != "" && statsquery.App != "@" && !data.ValidateName(statsquery.App) { return nil, fmt.Errorf("Invalid app name - name not validated: %s", statsquery.App) } for _, gval := range statsquery.Groups { if gval != "_app" && !data.ValidateName(gval) { return nil, fmt.Errorf("Invalid group name - name not validated: %s", gval) } } if statsquery.Amount < 1 { statsquery.Amount = 1 } // find collection cname := fmt.Sprintf("stat_%s", statsquery.Process) if statsquery.App != "" { cname += "-app" } if !infoCollectionExists(db, cname) { return nil, fmt.Errorf("Process not found: %s", statsquery.Process) } c := db.C(cname) if statsquery.App != "" { c.EnsureIndex(mgo.Index{ Key: []string{"_dt", "_app"}, Background: true, Sparse: true, }) } else { c.EnsureIndex(mgo.Index{ Key: []string{"_dt"}, Background: true, Sparse: true, }) } // start time startdate := epochdate.TodayUTC() - epochdate.Date(statsquery.Amount) + 1 enddate := epochdate.TodayUTC() //log.Printf("StartDate: %s - EndDate: %s", startdate.String(), enddate.String()) // build mongodb filter filter := bson.M{"_dt": bson.M{"$gte": startdate.String()}} if statsquery.App != "" && statsquery.App != "@" { filter["_app"] = statsquery.App } if statsquery.Filters != nil { for pn, pv := range statsquery.Filters { // sanitize if !data.ValidateValueName(pn) { return nil, fmt.Errorf("Invalid filter name - name not validated: %s", pn) } //log.Printf("Filter: %s = %s", pn, pv) filter[pn] = pv } } querysort := []string{"_dt"} if len(statsquery.Groups) > 0 { for _, g := range statsquery.Groups { querysort = append(querysort, g) } } query := c.Find(filter).Sort(querysort...).Iter() groupcollect := make(map[string]*InfoGroupInfo, 0) fdata := make(map[string]interface{}) for query.Next(&fdata) { datadate, _ := epochdate.Parse(epochdate.RFC3339, fdata["_dt"].(string)) //log.Printf("DateDate: %s", datadate.String()) // build group string curgroup := "" if len(statsquery.Groups) > 0 { for _, g := range statsquery.Groups { if gv, ok := fdata[g]; ok { curgroup = curgroup + "::" + fmt.Sprintf("%v", gv) } else { return nil, fmt.Errorf("No such field %s", g) } } } ginfo, sdok := groupcollect[curgroup] if !sdok { //log.Printf("New data for group %s", curgroup) // stats collector, fills empty periods with 0 scollect := NewSDayCollect(statsquery.Period) for _, ditem := range statsquery.Data { // add data - output name is equals data name scollect.AddImport(ditem, ditem) } scollect.Init(startdate, enddate) ginfo = &InfoGroupInfo{GroupId: curgroup, Groups: make(map[string]interface{}), Collect: scollect} groupcollect[curgroup] = ginfo for _, g := range statsquery.Groups { if gv, ok := fdata[g]; ok { ginfo.Groups[g] = gv } else { return nil, fmt.Errorf("No such field %s", g) } } } // fill day from data ginfo.Collect.ValueDay(datadate, fdata) } if err := query.Close(); err != nil { return nil, fmt.Errorf("Error reading data: %s", err) } var res interface{} if len(statsquery.Groups) > 0 { resgroup := &InfoResultGroup{Group: make([]*InfoResultGroupItem, 0)} for _, gv := range groupcollect { resgroup.Group = append(resgroup.Group, &InfoResultGroupItem{GroupId: gv.GroupId, Groups: gv.Groups, InfoResult: &InfoResult{List: gv.Collect.BuildResult()}}) } res = resgroup } else { resinfo := &InfoResult{List: nil} if ri, ok := groupcollect[""]; ok { resinfo.List = ri.Collect.BuildResult() } else { resinfo.List = nil } res = resinfo } return &StatsQueryResult{ StartDate: startdate, //EndDate: curdate, EndDate: enddate, Result: res, }, nil }