コード例 #1
0
ファイル: input.go プロジェクト: swan-go/heka-redis
func (rpsi *RedisPubSubInput) Run(ir pipeline.InputRunner, h pipeline.PluginHelper) error {
	var (
		dRunner pipeline.DecoderRunner
		decoder pipeline.Decoder
		pack    *pipeline.PipelinePack
		e       error
		ok      bool
	)
	// Get the InputRunner's chan to receive empty PipelinePacks
	packSupply := ir.InChan()

	if rpsi.conf.DecoderName != "" {
		if dRunner, ok = h.DecoderRunner(rpsi.conf.DecoderName, fmt.Sprintf("%s-%s", ir.Name(), rpsi.conf.DecoderName)); !ok {
			return fmt.Errorf("Decoder not found: %s", rpsi.conf.DecoderName)
		}
		decoder = dRunner.Decoder()
	}

	//Connect to the channel
	psc := redis.PubSubConn{Conn: rpsi.conn}
	psc.PSubscribe(rpsi.conf.Channel)

	for {
		switch n := psc.Receive().(type) {
		case redis.PMessage:
			// Grab an empty PipelinePack from the InputRunner
			pack = <-packSupply
			pack.Message.SetType("redis_pub_sub")
			pack.Message.SetLogger(n.Channel)
			pack.Message.SetPayload(string(n.Data))
			pack.Message.SetTimestamp(time.Now().UnixNano())
			var packs []*pipeline.PipelinePack
			if decoder == nil {
				packs = []*pipeline.PipelinePack{pack}
			} else {
				packs, e = decoder.Decode(pack)
			}
			if packs != nil {
				for _, p := range packs {
					ir.Inject(p)
				}
			} else {
				if e != nil {
					ir.LogError(fmt.Errorf("Couldn't parse Redis message: %s", n.Data))
				}
				pack.Recycle(nil)
			}
		case redis.Subscription:
			ir.LogMessage(fmt.Sprintf("Subscription: %s %s %d\n", n.Kind, n.Channel, n.Count))
			if n.Count == 0 {
				return errors.New("No channel to subscribe")
			}
		case error:
			fmt.Printf("error: %v\n", n)
			return n
		}
	}

	return nil
}
コード例 #2
0
ファイル: input.go プロジェクト: imgix/heka-nsq
func (input *NsqInput) Run(runner pipeline.InputRunner,
	helper pipeline.PluginHelper) (err error) {
	var (
		dRunner pipeline.DecoderRunner
		ok      bool
	)

	if input.DecoderName != "" {
		if dRunner, ok = helper.DecoderRunner(input.DecoderName,
			fmt.Sprintf("%s-%s", runner.Name(), input.DecoderName)); !ok {
			return fmt.Errorf("Decoder not found: %s", input.DecoderName)
		}
		input.decoderChan = dRunner.InChan()
	}

	input.runner = runner
	input.packSupply = runner.InChan()

	input.consumer.AddHandler(input)

	err = input.consumer.ConnectToNSQDs(input.NsqdAddrs)
	if err != nil {
		return err
	}
	err = input.consumer.ConnectToNSQLookupds(input.LookupdAddrs)
	if err != nil {
		return err
	}

	<-input.consumer.StoppedChan()

	return nil
}
コード例 #3
0
ファイル: file_polling_input.go プロジェクト: salekseev/heka
func (input *FilePollingInput) Run(runner pipeline.InputRunner,
	helper pipeline.PluginHelper) error {

	var (
		data    []byte
		pack    *pipeline.PipelinePack
		dRunner pipeline.DecoderRunner
		ok      bool
		err     error
	)

	if input.DecoderName != "" {
		if dRunner, ok = helper.DecoderRunner(input.DecoderName,
			fmt.Sprintf("%s-%s", runner.Name(), input.DecoderName)); !ok {
			return fmt.Errorf("Decoder not found: %s", input.DecoderName)
		}
		input.decoderChan = dRunner.InChan()
	}
	input.runner = runner

	hostname := helper.PipelineConfig().Hostname()
	packSupply := runner.InChan()
	tickChan := runner.Ticker()

	for {
		select {
		case <-input.stop:
			return nil
		case <-tickChan:
		}

		data, err = ioutil.ReadFile(input.FilePath)
		if err != nil {
			runner.LogError(fmt.Errorf("Error reading file: %s", err))
			continue
		}

		pack = <-packSupply
		pack.Message.SetUuid(uuid.NewRandom())
		pack.Message.SetTimestamp(time.Now().UnixNano())
		pack.Message.SetType("heka.file.polling")
		pack.Message.SetHostname(hostname)
		pack.Message.SetPayload(string(data))
		if field, err := message.NewField("TickerInterval", int(input.TickerInterval), ""); err != nil {
			runner.LogError(err)
		} else {
			pack.Message.AddField(field)
		}
		if field, err := message.NewField("FilePath", input.FilePath, ""); err != nil {
			runner.LogError(err)
		} else {
			pack.Message.AddField(field)
		}
		input.sendPack(pack)
	}

	return nil
}
コード例 #4
0
ファイル: logstreamer_input.go プロジェクト: salekseev/heka
// Creates DecoderRunner and stop channel and starts the provided
// LogstreamInput plugin.
func (li *LogstreamerInput) startLogstreamInput(logstream *LogstreamInput, i int,
	ir p.InputRunner, h p.PluginHelper) {

	fullName := fmt.Sprintf("%s-%s-%d", li.pluginName, li.decoderName, i)
	dRunner, _ := h.DecoderRunner(li.decoderName, fullName)
	stop := make(chan chan bool, 1)
	li.stopLogstreamChans = append(li.stopLogstreamChans, stop)
	go logstream.Run(ir, h, stop, dRunner)
}
コード例 #5
0
ファイル: redis_input.go プロジェクト: swan-go/heka-redis
func (rli *RedisInput) Run(ir pipeline.InputRunner, h pipeline.PluginHelper) error {
	fmt.Println("Addr", rli.conf.Address)
	fmt.Println("key", rli.conf.Key)
	fmt.Println("batch_count:", rli.conf.Batch_count)
	fmt.Println("decoder:", rli.conf.Decoder)
	var (
		dRunner pipeline.DecoderRunner
		decoder pipeline.Decoder
		ok      bool
		e       error
		reply   interface{}
		vals    []string
		msg     string
	)

	if rli.conf.Decoder != "" {
		if dRunner, ok = h.DecoderRunner(rli.conf.Decoder, fmt.Sprintf("%s-%s", ir.Name(), rli.conf.Decoder)); !ok {
			return fmt.Errorf("Decoder not found: %s", rli.conf.Decoder)
		}
		decoder = dRunner.Decoder()
	}

	for {
		reply, e = rli.conn.Do("BLPOP", rli.conf.Key, "0")
		if e == nil {
			vals, e = redis.Strings(reply, nil)
			msg = vals[1]
			if e == nil {
				rli.InsertMessage(ir, decoder, msg)
			}
		}
		reply, e = rli.batchlpop.Do(rli.conn, rli.conf.Key, rli.conf.Batch_count)
		if e == nil {
			vals, e = redis.Strings(reply, nil)
			if e == nil {
				for _, msg = range vals {
					rli.InsertMessage(ir, decoder, msg)
				}
			} else {
				fmt.Printf("err: %v\n", e)
			}
		} else {
			fmt.Printf("type: %T, error: %v\n", reply, e)
			return e
		}
	}

	return nil
}
コード例 #6
0
ファイル: redis_Input.go プロジェクト: jbli/jbli_svn
func (ri *RedisMQInput) Run(ir pipeline.InputRunner, h pipeline.PluginHelper) error {
	// Get the InputRunner's chan to receive empty PipelinePacks
	packs := ir.InChan()

	var decoding chan<- *pipeline.PipelinePack
	if ri.conf.Decoder != "" {
		// Fetch specified decoder
		decoder, ok := h.DecoderRunner(ri.conf.Decoder)
		if !ok {
			err := fmt.Errorf("Could not find decoder", ri.conf.Decoder)
			return err
		}

		// Get the decoder's receiving chan
		decoding = decoder.InChan()
	}

	var pack *pipeline.PipelinePack
	//var p []*redismq.Package
	var p *redismq.Package
	var count int
	var b []byte
	var err error

	for {
		p, err = ri.rdconsumer.Get()
		if err != nil {
			ir.LogError(err)
			continue
		}
		err = p.Ack()
		if err != nil {
			ir.LogError(err)
		}
		b = []byte(p.Payload)
		// Grab an empty PipelinePack from the InputRunner
		pack = <-packs

		// Trim the excess empty bytes
		count = len(b)
		pack.MsgBytes = pack.MsgBytes[:count]

		// Copy ws bytes into pack's bytes
		copy(pack.MsgBytes, b)

		if decoding != nil {
			// Send pack onto decoder
			decoding <- pack
		} else {
			// Send pack into Heka pipeline
			ir.Inject(pack)
		}
	}
	/*
	           checkStat := time.Tick(ri.statInterval)
	           ok := true
	           for ok {
	               select {
	   		case _, ok = <-ri.stopChan:
	   			break
	   		case <-checkStat:
	                       p, err = ri.rdconsumer.MultiGet(500)
	                       if err != nil {
	                           ir.LogError(err)
	                           continue
	                       }
	                       err = p[len(p)-1].MultiAck()
	                       if err != nil {
	                           ir.LogError(err)
	                       }
	                       for _, v := range p {
	                         b = []byte(v.Payload)
	                         // Grab an empty PipelinePack from the InputRunner
	                         pack = <-packs

	                         // Trim the excess empty bytes
	                         count = len(b)
	                         pack.MsgBytes = pack.MsgBytes[:count]

	                         // Copy ws bytes into pack's bytes
	                         copy(pack.MsgBytes, b)

	                         if decoding != nil {
	                           // Send pack onto decoder
	                           decoding <- pack
	                         } else {
	                           // Send pack into Heka pipeline
	                           ir.Inject(pack)
	                         }
	                       }
	                   }
	           }
	*/
	return nil
}
コード例 #7
0
// Main Logstreamer Input runner
// This runner kicks off all the other logstream inputs, and handles rescanning for
// updates to the filesystem that might affect file visibility for the logstream
// inputs
func (li *LogstreamerInput) Run(ir p.InputRunner, h p.PluginHelper) (err error) {
	var (
		ok         bool
		dRunner    p.DecoderRunner
		errs       *ls.MultipleError
		newstreams []string
	)

	// Setup the decoder runner that will be used
	if li.decoderName != "" {
		if dRunner, ok = h.DecoderRunner(li.decoderName, fmt.Sprintf("%s-%s",
			li.pluginName, li.decoderName)); !ok {

			return fmt.Errorf("Decoder not found: %s", li.decoderName)
		}
	}

	// Kick off all the current logstreams we know of
	for _, logstream := range li.plugins {
		stop := make(chan chan bool, 1)
		go logstream.Run(ir, h, stop, dRunner)
		li.stopLogstreamChans = append(li.stopLogstreamChans, stop)
	}

	ok = true
	rescan := time.Tick(li.rescanInterval)
	// Our main rescan loop that handles shutting down
	for ok {
		select {
		case <-li.stopChan:
			ok = false
			returnChans := make([]chan bool, len(li.stopLogstreamChans))
			// Send out all the stop signals
			for i, ch := range li.stopLogstreamChans {
				ret := make(chan bool)
				ch <- ret
				returnChans[i] = ret
			}

			// Wait for all the stops
			for _, ch := range returnChans {
				<-ch
			}

			// Close our own stopChan to indicate we shut down
			close(li.stopChan)
		case <-rescan:
			li.logstreamSetLock.Lock()
			newstreams, errs = li.logstreamSet.ScanForLogstreams()
			if errs.IsError() {
				ir.LogError(errs)
			}
			for _, name := range newstreams {
				stream, ok := li.logstreamSet.GetLogstream(name)
				if !ok {
					ir.LogError(fmt.Errorf("Found new logstream: %s, but couldn't fetch it.", name))
					continue
				}

				// Setup a new logstream input for this logstream and start it running
				stParser, parserFunc, _ := CreateParser(li.parser, li.delimiter,
					li.delimiterLocation, li.decoderName)

				lsi := NewLogstreamInput(stream, stParser, parserFunc, name,
					li.hostName)
				li.plugins[name] = lsi
				stop := make(chan chan bool, 1)
				go lsi.Run(ir, h, stop, dRunner)
				li.stopLogstreamChans = append(li.stopLogstreamChans, stop)
			}
			li.logstreamSetLock.Unlock()
		}
	}
	err = nil
	return
}
コード例 #8
0
ファイル: nsq_input.go プロジェクト: jbli/jbli_svn
func (ni *NsqInput) Run(ir pipeline.InputRunner, h pipeline.PluginHelper) error {
	// Get the InputRunner's chan to receive empty PipelinePacks
	var pack *pipeline.PipelinePack
	var err error
	var dRunner pipeline.DecoderRunner
	var decoder pipeline.Decoder
	var ok bool
	var e error

	//pos := 0
	//output := make([]*Message, 2)
	packSupply := ir.InChan()

	if ni.conf.Decoder != "" {
		if dRunner, ok = h.DecoderRunner(ni.conf.Decoder); !ok {
			return fmt.Errorf("Decoder not found: %s", ni.conf.Decoder)
		}
		decoder = dRunner.Decoder()
	}

	err = ni.nsqReader.ConnectToLookupd(ni.conf.Address)
	if err != nil {
		ir.LogError(errors.New("ConnectToLookupd failed."))
	}

	header := &message.Header{}

	stopped := false
	//readLoop:
	for !stopped {
		//stopped = true
		select {
		case <-ni.stopChan:
			ir.LogError(errors.New("get ni.stopChan, set stopped=true"))
			stopped = true
		default:
			pack = <-packSupply
			m, ok1 := <-ni.handler.logChan
			if !ok1 {
				stopped = true
				break
			}

			if ni.conf.Serialize {
				if dRunner == nil {
					pack.Recycle()
					ir.LogError(errors.New("Serialize messages require a decoder."))
				}
				//header := &message.Header{}
				_, msgOk := findMessage(m.msg.Body, header, &(pack.MsgBytes))
				if msgOk {
					dRunner.InChan() <- pack
				} else {
					pack.Recycle()
					ir.LogError(errors.New("Can't find Heka message."))
				}
				header.Reset()
			} else {
				//ir.LogError(fmt.Errorf("message body: %s", m.msg.Body))
				pack.Message.SetType("nsq")
				pack.Message.SetPayload(string(m.msg.Body))
				pack.Message.SetTimestamp(time.Now().UnixNano())
				var packs []*pipeline.PipelinePack
				if decoder == nil {
					packs = []*pipeline.PipelinePack{pack}
				} else {
					packs, e = decoder.Decode(pack)
				}
				if packs != nil {
					for _, p := range packs {
						ir.Inject(p)
					}
				} else {
					if e != nil {
						ir.LogError(fmt.Errorf("Couldn't parse Nsq message: %s", m.msg.Body))
					}
					pack.Recycle()
				}
			}
			m.returnChannel <- &nsq.FinishedMessage{m.msg.Id, 0, true}
			/*
			   output[pos] = m
			   pos++
			   if pos == 2 {
			           for pos > 0 {
			                   pos--
			                   m1 := output[pos]
			                   m1.returnChannel <- &nsq.FinishedMessage{m1.msg.Id, 0, true}
			                   output[pos] = nil
			           }
			   }
			*/
		}
	}
	return nil
}