func (this *SkyOutput) Run(r engine.OutputRunner, h engine.PluginHelper) error { var ( ok = true pack *engine.PipelinePack inChan = r.InChan() globals = engine.Globals() project = h.Project(this.project) ) LOOP: for ok { select { case pack, ok = <-inChan: if !ok { break LOOP } if globals.Debug { globals.Println(*pack) } this.feedSky(project, pack) pack.Recycle() } } return nil }
func (this *AlarmOutput) handlePack(pack *engine.PipelinePack, h engine.PluginHelper) { if worker, present := this.projects[pack.Project]. workers[pack.Logfile.CamelCaseName()]; present { worker.inject(pack.Message, h.Project(pack.Project)) } }
func (this *EsFilter) Run(r engine.FilterRunner, h engine.PluginHelper) error { var ( globals = engine.Globals() pack *engine.PipelinePack ok = true count = 0 inChan = r.InChan() ) LOOP: for ok { select { case pack, ok = <-inChan: if !ok { break LOOP } if globals.Debug { globals.Println(*pack) } if this.handlePack(pack, h.Project(pack.Project)) { count += 1 r.Inject(pack) } else { pack.Recycle() } } } globals.Printf("[%s]Total filtered: %d", r.Name(), count) return nil }
func (this *AlarmOutput) Run(r engine.OutputRunner, h engine.PluginHelper) error { var ( pack *engine.PipelinePack reloadChan = make(chan interface{}) ok = true inChan = r.InChan() ) for name, project := range this.projects { go this.runSendAlarmsWatchdog(h.Project(name), project) } // start all the workers goAhead := make(chan bool) for _, project := range this.projects { for _, worker := range project.workers { go worker.run(h, goAhead) <-goAhead // in case of race condition with worker.inject } } observer.Subscribe(engine.RELOAD, reloadChan) LOOP: for ok { select { case <-reloadChan: // TODO case pack, ok = <-inChan: if !ok { break LOOP } this.handlePack(pack, h) pack.Recycle() } } close(this.stopChan) // all the workers cleanup for _, project := range this.projects { for _, worker := range project.workers { worker.cleanup() } } for _, project := range this.projects { close(project.emailChan) } return nil }
func (this *esBufferWorker) flush(r engine.FilterRunner, h engine.PluginHelper) { if this.summary.N == 0 { return } // generate new pack pack := h.PipelinePack(0) switch this.expression { case "count": pack.Message.SetField(this.esField, this.summary.N) case "mean": pack.Message.SetField(this.esField, this.summary.Mean) case "max": pack.Message.SetField(this.esField, this.summary.Max) case "min": pack.Message.SetField(this.esField, this.summary.Min) case "sd": pack.Message.SetField(this.esField, this.summary.Sd()) case "sum": pack.Message.SetField(this.esField, this.summary.Sum) default: panic("invalid expression: " + this.expression) } pack.Message.Timestamp = this.timestamp pack.Ident = this.ident pack.EsIndex = indexName(h.Project(this.projectName), this.indexPattern, time.Unix(int64(this.timestamp), 0)) pack.EsType = this.esType pack.Project = this.projectName globals := engine.Globals() if globals.Debug { globals.Println(*pack) } r.Inject(pack) this.summary.Reset() }
// for each inbound pack, this filter will generate several new pack // the original pack will be recycled immediately func (this *CardinalityFilter) handlePack(r engine.FilterRunner, h engine.PluginHelper, pack *engine.PipelinePack) { globals := engine.Globals() for _, c := range this.converters { if !pack.Logfile.MatchPrefix(c.logPrefix) || pack.Project != c.project { continue } for _, f := range c.fields { val, err := pack.Message.FieldValue(f.key, f.typ) if err != nil { if globals.Verbose { h.Project(c.project).Println(err) } return } for _, interval := range f.intervals { // generate new pack p := h.PipelinePack(pack.MsgLoopCount) if p == nil { globals.Println("can't get pack in filter") continue } p.Ident = this.ident p.Project = c.project p.CardinalityKey = fmt.Sprintf("%s.%s.%s", pack.Project, f.key, interval) p.CardinalityData = val p.CardinalityInterval = interval r.Inject(p) } } } }
func (this *alarmWorker) run(h engine.PluginHelper, goAhead chan bool) { var ( globals = engine.Globals() summary = stats.Summary{} beep bool ever = true ) // lazy assignment this.project = h.Project(this.projName) if globals.DryRun || this._instantAlarmOnly { goAhead <- true return } this.createDB() this.prepareInsertStmt() this.prepareStatsStmt() goAhead <- true for ever { select { case <-time.After(this.conf.windowSize): this.Lock() windowHead, windowTail, err := this.getWindowBorder() if err != nil { this.Unlock() continue } if this.conf.showSummary { summary.Reset() } rows, _ := this.statsStmt.Query(windowTail) cols, _ := rows.Columns() colsN := len(cols) values := make([]interface{}, colsN) valuePtrs := make([]interface{}, colsN) rowSeverity := 0 abnormal := false this.workersMutex.Lock() this.printWindowTitle(windowHead, windowTail, this.conf.title) for rows.Next() { beep = false for i, _ := range cols { valuePtrs[i] = &values[i] } rows.Scan(valuePtrs...) // 1st column always being aggregated quantile var amount = values[0].(int64) if amount == 0 { break } if this.conf.showSummary { summary.Add(float64(amount)) } // beep and feed alarmMail if this.conf.beepThreshold > 0 && int(amount) >= this.conf.beepThreshold { beep = true } rowSeverity = this.conf.severity * int(amount) // abnormal change? blink if this.isAbnormalChange(amount, this.historyKey(this.conf.printFormat, values)) { this.blinkColorPrintfLn(this.conf.printFormat, values...) abnormal = true // multiply factor rowSeverity *= this.conf.abnormalSeverityFactor } this.colorPrintfLn(beep, this.conf.printFormat, values...) this.feedAlarmMail(abnormal, rowSeverity, this.conf.printFormat, values...) } // show summary if this.conf.showSummary && summary.N > 0 { this.colorPrintfLn(false, "Total: %.1f, Mean: %.1f", summary.Sum, summary.Mean) } this.workersMutex.Unlock() rows.Close() this.moveWindowForward(windowTail) this.Unlock() case <-this.stopChan: ever = false } } }
func (this *EsOutput) Run(r engine.OutputRunner, h engine.PluginHelper) error { var ( pack *engine.PipelinePack reloadChan = make(chan interface{}) ok = true globals = engine.Globals() inChan = r.InChan() reportTicker = time.NewTicker(this.reportInterval) ) this.indexer = core.NewBulkIndexer(this.bulkMaxConn) this.indexer.BulkMaxDocs = this.bulkMaxDocs this.indexer.BulkMaxBuffer = this.bulkMaxBuffer // start the bulk indexer this.indexer.Run(this.stopChan) defer reportTicker.Stop() observer.Subscribe(engine.RELOAD, reloadChan) LOOP: for ok { select { case <-this.stopChan: ok = false case <-reportTicker.C: this.showPeriodicalStats() case <-reloadChan: // TODO case <-time.After(this.flushInterval): this.indexer.Flush() case pack, ok = <-inChan: if !ok { break LOOP } if globals.Debug { globals.Println(*pack) } this.feedEs(h.Project(pack.Project), pack) pack.Recycle() } } engine.Globals().Printf("[%s]Total output to ES: %d", r.Name(), this.totalN) // before shutdown, flush again if globals.Verbose { engine.Globals().Println("Waiting for ES flush...") } this.indexer.Flush() if globals.Verbose { engine.Globals().Println("ES flushed") } // let indexer stop this.stopChan <- true return nil }