func NewSlowLogParser(file *os.File, stopChan <-chan bool, opt Options) *SlowLogParser { // Seek to the offset, if any. // @todo error if start off > file size if opt.StartOffset > 0 { // @todo handle error file.Seek(int64(opt.StartOffset), os.SEEK_SET) } if opt.Debug { l.SetFlags(l.Ltime | l.Lmicroseconds) fmt.Println() l.Println("parsing " + file.Name()) } p := &SlowLogParser{ stopChan: stopChan, opt: opt, file: file, EventChan: make(chan *log.Event), inHeader: false, inQuery: false, headerLines: 0, queryLines: 0, bytesRead: opt.StartOffset, lineOffset: 0, event: log.NewEvent(), } return p }
func (p *SlowLogParser) sendEvent(inHeader bool, inQuery bool) { if p.opt.Debug { l.Println("send event") } // Make a new event and reset our metadata. defer func() { p.event = log.NewEvent() p.headerLines = 0 p.queryLines = 0 p.inHeader = inHeader p.inQuery = inQuery }() if _, ok := p.event.TimeMetrics["Query_time"]; !ok { if p.headerLines == 0 { l.Panicf("No Query_time in event at %d: %#v", p.lineOffset, p.event) } // Started parsing in header after Query_time. Throw away event. return } // Clean up the event. p.event.Db = strings.TrimSuffix(p.event.Db, ";\n") p.event.Query = strings.TrimSuffix(p.event.Query, ";") // Send the event. This will block. select { case p.EventChan <- p.event: case <-p.stopChan: p.stopped = true } }