func printTable(p parsetime.ParseTime, header string, times []string) { fmt.Printf("### %s", header) fmt.Println("") fmt.Println("") table := clitable.New([]string{"Input string", "_time.Time"}) for _, v := range times { t, err := p.Parse(v) if err != nil { log.Printf("%s : %s", err, t.String()) fmt.Println() } table.AddRow(map[string]interface{}{"Input string": v, "_time.Time": t.String()}) } table.Markdown = true table.Print() }
func main() { kingpin.CommandLine.Help = "Access Log Profiler for LTSV (read from file or stdin)." kingpin.Version("0.3.1") kingpin.Parse() var f *os.File var err error var c Config if *config != "" { c, err = LoadYAML(*config) if err != nil { log.Fatal(err) } } option := Config{ File: *file, Reverse: *reverse, QueryString: *queryString, Tsv: *tsv, ApptimeLabel: *apptimeLabel, ReqtimeLabel: *reqtimeLabel, StatusLabel: *statusLabel, SizeLabel: *sizeLabel, MethodLabel: *methodLabel, UriLabel: *uriLabel, TimeLabel: *timeLabel, Limit: *limit, IncludesStr: *includes, ExcludesStr: *excludes, IncludeStatusesStr: *includeStatuses, ExcludeStatusesStr: *excludeStatuses, NoHeaders: *noHeaders, AggregatesStr: *aggregates, StartTime: *startTime, EndTime: *endTime, StartTimeDuration: *startTimeDuration, EndTimeDuration: *endTimeDuration, } if *max { c.Sort = "max" } else if *min { c.Sort = "min" } else if *avg { c.Sort = "avg" } else if *sum { c.Sort = "sum" } else if *cnt { c.Sort = "cnt" } else if *sortUri { c.Sort = "uri" } else if *method { c.Sort = "method" } else if *maxBody { c.Sort = "max-body" } else if *minBody { c.Sort = "min-body" } else if *avgBody { c.Sort = "avg-body" } else if *sumBody { c.Sort = "sum-body" } else if *p1 { c.Sort = "p1" } else if *p50 { c.Sort = "p50" } else if *p99 { c.Sort = "p99" } else if *stddev { c.Sort = "stddev" } else { if c.Sort == "" { c.Sort = "max" } } c = SetConfig(c, option) if *load != "" { accessLog, err = LoadProfiles(*load) if err != nil { log.Fatal(err) } SortProfiles(accessLog, c) return } fileinfo, err := os.Stdin.Stat() if err != nil { log.Fatal(err) } if fileinfo.Mode()&os.ModeNamedPipe == 0 { f, err = os.Open(c.File) defer f.Close() if err != nil { log.Fatal(err) } } else { f = os.Stdin } accessLog = make(Profiles, 0, c.Limit) var includeRegexps []*regexp.Regexp if len(c.Includes) > 0 { includeRegexps = make([]*regexp.Regexp, 0, len(c.Includes)) for _, pattern := range c.Includes { re, rerr := regexp.Compile(pattern) if rerr != nil { log.Fatal(err) } includeRegexps = append(includeRegexps, re) } } var excludeRegexps []*regexp.Regexp if len(c.Excludes) > 0 { excludeRegexps = make([]*regexp.Regexp, 0, len(c.Excludes)) for _, pattern := range c.Excludes { re, rerr := regexp.Compile(pattern) if rerr != nil { log.Fatal(err) } excludeRegexps = append(excludeRegexps, re) } } var includeStatusRegexps []*regexp.Regexp if len(c.IncludeStatuses) > 0 { includeRegexps = make([]*regexp.Regexp, 0, len(c.Includes)) for _, pattern := range c.IncludeStatuses { re, rerr := regexp.Compile(pattern) if rerr != nil { log.Fatal(err) } includeStatusRegexps = append(includeStatusRegexps, re) } } var excludeStatusRegexps []*regexp.Regexp if len(c.ExcludeStatuses) > 0 { excludeRegexps = make([]*regexp.Regexp, 0, len(c.Excludes)) for _, pattern := range c.ExcludeStatuses { re, rerr := regexp.Compile(pattern) if rerr != nil { log.Fatal(err) } excludeStatusRegexps = append(excludeStatusRegexps, re) } } var aggregateRegexps []*regexp.Regexp if len(c.Aggregates) > 0 { aggregateRegexps = make([]*regexp.Regexp, 0, len(c.Aggregates)) for _, pattern := range c.Aggregates { re, rerr := regexp.Compile(pattern) if rerr != nil { log.Fatal(err) } aggregateRegexps = append(aggregateRegexps, re) } } var p parsetime.ParseTime p, err = parsetime.NewParseTime(*location) var sTimeNano int64 if c.StartTime != "" { sTime, err := p.Parse(c.StartTime) if err != nil { log.Fatal(err) } sTimeNano = sTime.UnixNano() } if c.StartTimeDuration != "" { sTime, err := TimeDurationSub(c.StartTimeDuration) if err != nil { log.Fatal(err) } sTimeNano = sTime.UnixNano() } var eTimeNano int64 if c.EndTime != "" { eTime, err := p.Parse(c.EndTime) if err != nil { log.Fatal(err) } eTimeNano = eTime.UnixNano() } if c.EndTimeDuration != "" { eTime, err := TimeDurationSub(c.EndTimeDuration) if err != nil { log.Fatal(err) } eTimeNano = eTime.UnixNano() } r := ltsv.NewReader(f) Loop: for { line, err := r.Read() if err == io.EOF { break } else if err != nil { log.Fatal(err) } resTime, err := strconv.ParseFloat(line[c.ApptimeLabel], 64) if err != nil { var reqTime float64 reqTime, err = strconv.ParseFloat(line[c.ReqtimeLabel], 64) if err != nil { continue } resTime = reqTime } bodySize, err := strconv.ParseFloat(line[c.SizeLabel], 64) if err != nil { continue } if sTimeNano != 0 || eTimeNano != 0 { t, err := p.Parse(line[c.TimeLabel]) if err != nil { continue } timeNano := t.UnixNano() if !TimeCmp(sTimeNano, eTimeNano, timeNano) { continue } } u, err := url.Parse(line[c.UriLabel]) if err != nil { continue } if c.QueryString { v := url.Values{} values := u.Query() for q, _ := range values { v.Set(q, "xxx") } uri = fmt.Sprintf("%s?%s", u.Path, v.Encode()) index = fmt.Sprintf("%s_%s?%s", line[c.MethodLabel], u.Path, v.Encode()) } else { uri = u.Path index = fmt.Sprintf("%s_%s", line[c.MethodLabel], u.Path) } if len(c.Includes) > 0 { isnotMatched := true for _, re := range includeRegexps { if ok := re.Match([]byte(uri)); ok && err == nil { isnotMatched = false } else if err != nil { log.Fatal(err) } } if isnotMatched { continue Loop } } if len(c.Excludes) > 0 { for _, re := range excludeRegexps { if ok := re.Match([]byte(uri)); ok && err == nil { continue Loop } else if err != nil { log.Fatal(err) } } } if len(c.IncludeStatuses) > 0 { isnotMatched := true for _, re := range includeStatusRegexps { if ok := re.Match([]byte(line[c.StatusLabel])); ok && err == nil { isnotMatched = false } else if err != nil { log.Fatal(err) } } if isnotMatched { continue Loop } } if len(c.ExcludeStatuses) > 0 { for _, re := range excludeStatusRegexps { if ok := re.Match([]byte(line[c.StatusLabel])); ok && err == nil { continue Loop } else if err != nil { log.Fatal(err) } } } isMatched := false if len(c.Aggregates) > 0 { for _, re := range aggregateRegexps { if ok := re.Match([]byte(uri)); ok && err == nil { isMatched = true pattern := re.String() index = fmt.Sprintf("%s_%s", line[c.MethodLabel], pattern) uri = pattern SetCursor(index, uri) } else if err != nil { log.Fatal(err) } } } if !isMatched { SetCursor(index, uri) } if len(uriHints) > c.Limit { log.Fatal(fmt.Sprintf("Too many uri (%d or less)", c.Limit)) } if accessLog[cursor].Max < resTime { accessLog[cursor].Max = resTime } if accessLog[cursor].Min >= resTime || accessLog[cursor].Min == 0 { accessLog[cursor].Min = resTime } accessLog[cursor].Cnt++ accessLog[cursor].Sum += resTime accessLog[cursor].Method = line[c.MethodLabel] accessLog[cursor].Percentails = append(accessLog[cursor].Percentails, Percentail{RequestTime: resTime}) if accessLog[cursor].MaxBody < bodySize { accessLog[cursor].MaxBody = bodySize } if accessLog[cursor].MinBody >= bodySize || accessLog[cursor].MinBody == 0 { accessLog[cursor].MinBody = bodySize } accessLog[cursor].SumBody += bodySize } for i, _ := range accessLog { sort.Sort(ByRequestTime{accessLog[i].Percentails}) accessLog[i].Avg = accessLog[i].Sum / float64(accessLog[i].Cnt) accessLog[i].AvgBody = accessLog[i].SumBody / float64(accessLog[i].Cnt) p1Len := LenPercentail(len(accessLog[i].Percentails), 1) accessLog[i].P1 = accessLog[i].Percentails[p1Len].RequestTime p50Len := LenPercentail(len(accessLog[i].Percentails), 50) accessLog[i].P50 = accessLog[i].Percentails[p50Len].RequestTime p99Len := LenPercentail(len(accessLog[i].Percentails), 99) accessLog[i].P99 = accessLog[i].Percentails[p99Len].RequestTime accessLog[i].Stddev = RequestTimeStddev(accessLog[i].Percentails, accessLog[i].Sum, accessLog[i].Avg) } SortProfiles(accessLog, c) if *dump != "" { err = DumpProfiles(*dump, accessLog) if err != nil { log.Fatal(err) } } }