func (input *NsqInput) Run(runner pipeline.InputRunner, helper pipeline.PluginHelper) (err error) { var ( dRunner pipeline.DecoderRunner ok bool ) if input.DecoderName != "" { if dRunner, ok = helper.DecoderRunner(input.DecoderName, fmt.Sprintf("%s-%s", runner.Name(), input.DecoderName)); !ok { return fmt.Errorf("Decoder not found: %s", input.DecoderName) } input.decoderChan = dRunner.InChan() } input.runner = runner input.packSupply = runner.InChan() input.consumer.AddHandler(input) err = input.consumer.ConnectToNSQDs(input.NsqdAddrs) if err != nil { return err } err = input.consumer.ConnectToNSQLookupds(input.LookupdAddrs) if err != nil { return err } <-input.consumer.StoppedChan() return nil }
func (rpsi *RedisPubSubInput) Run(ir pipeline.InputRunner, h pipeline.PluginHelper) error { var ( dRunner pipeline.DecoderRunner decoder pipeline.Decoder pack *pipeline.PipelinePack e error ok bool ) // Get the InputRunner's chan to receive empty PipelinePacks packSupply := ir.InChan() if rpsi.conf.DecoderName != "" { if dRunner, ok = h.DecoderRunner(rpsi.conf.DecoderName, fmt.Sprintf("%s-%s", ir.Name(), rpsi.conf.DecoderName)); !ok { return fmt.Errorf("Decoder not found: %s", rpsi.conf.DecoderName) } decoder = dRunner.Decoder() } //Connect to the channel psc := redis.PubSubConn{Conn: rpsi.conn} psc.PSubscribe(rpsi.conf.Channel) for { switch n := psc.Receive().(type) { case redis.PMessage: // Grab an empty PipelinePack from the InputRunner pack = <-packSupply pack.Message.SetType("redis_pub_sub") pack.Message.SetLogger(n.Channel) pack.Message.SetPayload(string(n.Data)) pack.Message.SetTimestamp(time.Now().UnixNano()) var packs []*pipeline.PipelinePack if decoder == nil { packs = []*pipeline.PipelinePack{pack} } else { packs, e = decoder.Decode(pack) } if packs != nil { for _, p := range packs { ir.Inject(p) } } else { if e != nil { ir.LogError(fmt.Errorf("Couldn't parse Redis message: %s", n.Data)) } pack.Recycle(nil) } case redis.Subscription: ir.LogMessage(fmt.Sprintf("Subscription: %s %s %d\n", n.Kind, n.Channel, n.Count)) if n.Count == 0 { return errors.New("No channel to subscribe") } case error: fmt.Printf("error: %v\n", n) return n } } return nil }
func (lsi *LogstreamInput) Run(ir p.InputRunner, h p.PluginHelper, stopChan chan chan bool, dRunner p.DecoderRunner) { var ( parser func(ir p.InputRunner, deliver Deliver, stop chan chan bool) error err error ) if lsi.parseFunction == "payload" { parser = lsi.payloadParser } else if lsi.parseFunction == "messageProto" { parser = lsi.messageProtoParser } // Setup our pack delivery function appropriately for the configuration deliver := func(pack *p.PipelinePack) { if dRunner == nil { ir.Inject(pack) } else { dRunner.InChan() <- pack } } // Check for more data interval interval, _ := time.ParseDuration("250ms") tick := time.Tick(interval) ok := true for ok { // Clear our error err = nil // Attempt to read as many as we can err = parser(ir, deliver, stopChan) // Save our location after reading as much as we can lsi.stream.SavePosition() lsi.recordCount = 0 if err != nil && err != io.EOF { ir.LogError(err) } // Did our parser func get stopped? if lsi.stopped != nil { ok = false continue } // Wait for our next interval, stop if needed select { case lsi.stopped = <-stopChan: ok = false case <-tick: continue } } close(lsi.stopped) }
func (input *FilePollingInput) Run(runner pipeline.InputRunner, helper pipeline.PluginHelper) error { var ( data []byte pack *pipeline.PipelinePack dRunner pipeline.DecoderRunner ok bool err error ) if input.DecoderName != "" { if dRunner, ok = helper.DecoderRunner(input.DecoderName, fmt.Sprintf("%s-%s", runner.Name(), input.DecoderName)); !ok { return fmt.Errorf("Decoder not found: %s", input.DecoderName) } input.decoderChan = dRunner.InChan() } input.runner = runner hostname := helper.PipelineConfig().Hostname() packSupply := runner.InChan() tickChan := runner.Ticker() for { select { case <-input.stop: return nil case <-tickChan: } data, err = ioutil.ReadFile(input.FilePath) if err != nil { runner.LogError(fmt.Errorf("Error reading file: %s", err)) continue } pack = <-packSupply pack.Message.SetUuid(uuid.NewRandom()) pack.Message.SetTimestamp(time.Now().UnixNano()) pack.Message.SetType("heka.file.polling") pack.Message.SetHostname(hostname) pack.Message.SetPayload(string(data)) if field, err := message.NewField("TickerInterval", int(input.TickerInterval), ""); err != nil { runner.LogError(err) } else { pack.Message.AddField(field) } if field, err := message.NewField("FilePath", input.FilePath, ""); err != nil { runner.LogError(err) } else { pack.Message.AddField(field) } input.sendPack(pack) } return nil }
func (rli *RedisInput) Run(ir pipeline.InputRunner, h pipeline.PluginHelper) error { fmt.Println("Addr", rli.conf.Address) fmt.Println("key", rli.conf.Key) fmt.Println("batch_count:", rli.conf.Batch_count) fmt.Println("decoder:", rli.conf.Decoder) var ( dRunner pipeline.DecoderRunner decoder pipeline.Decoder ok bool e error reply interface{} vals []string msg string ) if rli.conf.Decoder != "" { if dRunner, ok = h.DecoderRunner(rli.conf.Decoder, fmt.Sprintf("%s-%s", ir.Name(), rli.conf.Decoder)); !ok { return fmt.Errorf("Decoder not found: %s", rli.conf.Decoder) } decoder = dRunner.Decoder() } for { reply, e = rli.conn.Do("BLPOP", rli.conf.Key, "0") if e == nil { vals, e = redis.Strings(reply, nil) msg = vals[1] if e == nil { rli.InsertMessage(ir, decoder, msg) } } reply, e = rli.batchlpop.Do(rli.conn, rli.conf.Key, rli.conf.Batch_count) if e == nil { vals, e = redis.Strings(reply, nil) if e == nil { for _, msg = range vals { rli.InsertMessage(ir, decoder, msg) } } else { fmt.Printf("err: %v\n", e) } } else { fmt.Printf("type: %T, error: %v\n", reply, e) return e } } return nil }
func (s *SandboxDecoder) SetDecoderRunner(dr pipeline.DecoderRunner) { if s.sb != nil { return // no-op already initialized } s.dRunner = dr var original *message.Message switch s.sbc.ScriptType { case "lua": s.sb, s.err = lua.CreateLuaSandbox(s.sbc) default: s.err = fmt.Errorf("unsupported script type: %s", s.sbc.ScriptType) } if s.err == nil { s.preservationFile = filepath.Join(s.pConfig.Globals.PrependBaseDir(DATA_DIR), dr.Name()+DATA_EXT) if s.sbc.PreserveData && fileExists(s.preservationFile) { s.err = s.sb.Init(s.preservationFile, "decoder") } else { s.err = s.sb.Init("", "decoder") } } if s.err != nil { dr.LogError(s.err) if s.sb != nil { s.sb.Destroy("") s.sb = nil } s.pConfig.Globals.ShutDown() return } s.sb.InjectMessage(func(payload, payload_type, payload_name string) int { if s.pack == nil { s.pack = dr.NewPack() if original == nil && len(s.packs) > 0 { original = s.packs[0].Message // payload injections have the original header data in the first pack } } else { original = nil // processing a new message, clear the old message } if len(payload_type) == 0 { // heka protobuf message if original == nil { original = new(message.Message) copyMessageHeaders(original, s.pack.Message) // save off the header values since unmarshal will wipe them out } if nil != proto.Unmarshal([]byte(payload), s.pack.Message) { return 1 } if s.tz != time.UTC { const layout = "2006-01-02T15:04:05.999999999" // remove the incorrect UTC tz info t := time.Unix(0, s.pack.Message.GetTimestamp()) t = t.In(time.UTC) ct, _ := time.ParseInLocation(layout, t.Format(layout), s.tz) s.pack.Message.SetTimestamp(ct.UnixNano()) } } else { s.pack.Message.SetPayload(payload) ptype, _ := message.NewField("payload_type", payload_type, "file-extension") s.pack.Message.AddField(ptype) pname, _ := message.NewField("payload_name", payload_name, "") s.pack.Message.AddField(pname) } if original != nil { // if future injections fail to set the standard headers, use the values // from the original message. if s.pack.Message.Uuid == nil { s.pack.Message.SetUuid(original.GetUuid()) } if s.pack.Message.Timestamp == nil { s.pack.Message.SetTimestamp(original.GetTimestamp()) } if s.pack.Message.Type == nil { s.pack.Message.SetType(original.GetType()) } if s.pack.Message.Hostname == nil { s.pack.Message.SetHostname(original.GetHostname()) } if s.pack.Message.Logger == nil { s.pack.Message.SetLogger(original.GetLogger()) } if s.pack.Message.Severity == nil { s.pack.Message.SetSeverity(original.GetSeverity()) } if s.pack.Message.Pid == nil { s.pack.Message.SetPid(original.GetPid()) } } s.packs = append(s.packs, s.pack) s.pack = nil return 0 }) }
func (ni *NsqInput) Run(ir pipeline.InputRunner, h pipeline.PluginHelper) error { // Get the InputRunner's chan to receive empty PipelinePacks var pack *pipeline.PipelinePack var err error var dRunner pipeline.DecoderRunner var decoder pipeline.Decoder var ok bool var e error //pos := 0 //output := make([]*Message, 2) packSupply := ir.InChan() if ni.conf.Decoder != "" { if dRunner, ok = h.DecoderRunner(ni.conf.Decoder); !ok { return fmt.Errorf("Decoder not found: %s", ni.conf.Decoder) } decoder = dRunner.Decoder() } err = ni.nsqReader.ConnectToLookupd(ni.conf.Address) if err != nil { ir.LogError(errors.New("ConnectToLookupd failed.")) } header := &message.Header{} stopped := false //readLoop: for !stopped { //stopped = true select { case <-ni.stopChan: ir.LogError(errors.New("get ni.stopChan, set stopped=true")) stopped = true default: pack = <-packSupply m, ok1 := <-ni.handler.logChan if !ok1 { stopped = true break } if ni.conf.Serialize { if dRunner == nil { pack.Recycle() ir.LogError(errors.New("Serialize messages require a decoder.")) } //header := &message.Header{} _, msgOk := findMessage(m.msg.Body, header, &(pack.MsgBytes)) if msgOk { dRunner.InChan() <- pack } else { pack.Recycle() ir.LogError(errors.New("Can't find Heka message.")) } header.Reset() } else { //ir.LogError(fmt.Errorf("message body: %s", m.msg.Body)) pack.Message.SetType("nsq") pack.Message.SetPayload(string(m.msg.Body)) pack.Message.SetTimestamp(time.Now().UnixNano()) var packs []*pipeline.PipelinePack if decoder == nil { packs = []*pipeline.PipelinePack{pack} } else { packs, e = decoder.Decode(pack) } if packs != nil { for _, p := range packs { ir.Inject(p) } } else { if e != nil { ir.LogError(fmt.Errorf("Couldn't parse Nsq message: %s", m.msg.Body)) } pack.Recycle() } } m.returnChannel <- &nsq.FinishedMessage{m.msg.Id, 0, true} /* output[pos] = m pos++ if pos == 2 { for pos > 0 { pos-- m1 := output[pos] m1.returnChannel <- &nsq.FinishedMessage{m1.msg.Id, 0, true} output[pos] = nil } } */ } } return nil }