// Main Logstreamer Input runner. This runner kicks off all the other // logstream inputs, and handles rescanning for updates to the filesystem that // might affect file visibility for the logstream inputs. func (li *LogstreamerInput) Run(ir p.InputRunner, h p.PluginHelper) (err error) { var ( ok bool errs *ls.MultipleError newstreams []string ) // Kick off all the current logstreams we know of i := 0 for _, logstream := range li.plugins { i++ li.startLogstreamInput(logstream, i, ir, h) } ok = true rescan := time.Tick(li.rescanInterval) // Our main rescan loop that handles shutting down for ok { select { case <-li.stopChan: ok = false returnChans := make([]chan bool, len(li.stopLogstreamChans)) // Send out all the stop signals for i, ch := range li.stopLogstreamChans { ret := make(chan bool) ch <- ret returnChans[i] = ret } // Wait for all the stops for _, ch := range returnChans { <-ch } // Close our own stopChan to indicate we shut down close(li.stopChan) case <-rescan: li.logstreamSetLock.Lock() newstreams, errs = li.logstreamSet.ScanForLogstreams() if errs.IsError() { ir.LogError(errs) } for _, name := range newstreams { stream, ok := li.logstreamSet.GetLogstream(name) if !ok { ir.LogError(fmt.Errorf("Found new logstream: %s, but couldn't fetch it.", name)) continue } lsi := NewLogstreamInput(stream, name, li.hostName) li.plugins[name] = lsi i++ li.startLogstreamInput(lsi, i, ir, h) } li.logstreamSetLock.Unlock() } } return nil }
func (li *LogstreamerInput) Init(config interface{}) (err error) { var ( errs *ls.MultipleError oldest time.Duration plugins []string ) conf := config.(*LogstreamerInputConfig) // Setup the journal dir if err = os.MkdirAll(conf.JournalDirectory, 0744); err != nil { return err } if len(conf.FileMatch) > 0 && conf.FileMatch[len(conf.FileMatch)-1:] != "$" { conf.FileMatch += "$" } li.decoderName = conf.Decoder li.parser = conf.ParserType li.delimiter = conf.Delimiter li.delimiterLocation = conf.DelimiterLocation li.plugins = make(map[string]*LogstreamInput) // Setup the rescan interval if li.rescanInterval, err = time.ParseDuration(conf.RescanInterval); err != nil { return } // Parse the oldest duration if oldest, err = time.ParseDuration(conf.OldestDuration); err != nil { return } // If no differentiator is present than we use the plugin if len(conf.Differentiator) == 0 { conf.Differentiator = []string{li.pluginName} } for name, submap := range conf.Translation { if len(submap) == 1 { if _, ok := submap["missing"]; !ok { err = fmt.Errorf("A translation map with one entry ('%s') must be "+ "specifying a 'missing' key.", name) return } } } // Create the main sort pattern sp := &ls.SortPattern{ FileMatch: conf.FileMatch, Translation: conf.Translation, Priority: conf.Priority, Differentiator: conf.Differentiator, } // Create the main logstream set li.logstreamSetLock.Lock() defer li.logstreamSetLock.Unlock() li.logstreamSet, err = ls.NewLogstreamSet(sp, oldest, conf.LogDirectory, conf.JournalDirectory) if err != nil { return } // Initial scan for logstreams plugins, errs = li.logstreamSet.ScanForLogstreams() if errs.IsError() { return errs } // Verify we can make a parser if _, _, err = CreateParser(li.parser, li.delimiter, li.delimiterLocation, li.decoderName); err != nil { return } // Declare our hostname if conf.Hostname == "" { li.hostName, err = os.Hostname() if err != nil { return } } else { li.hostName = conf.Hostname } // Create all our initial logstream plugins for the logstreams found for _, name := range plugins { stream, ok := li.logstreamSet.GetLogstream(name) if !ok { continue } stParser, parserFunc, _ := CreateParser(li.parser, li.delimiter, li.delimiterLocation, li.decoderName) li.plugins[name] = NewLogstreamInput(stream, stParser, parserFunc, name, li.hostName) } li.stopLogstreamChans = make([]chan chan bool, 0, len(plugins)) li.stopChan = make(chan bool) return }
// Main Logstreamer Input runner // This runner kicks off all the other logstream inputs, and handles rescanning for // updates to the filesystem that might affect file visibility for the logstream // inputs func (li *LogstreamerInput) Run(ir p.InputRunner, h p.PluginHelper) (err error) { var ( ok bool dRunner p.DecoderRunner errs *ls.MultipleError newstreams []string ) // Setup the decoder runner that will be used if li.decoderName != "" { if dRunner, ok = h.DecoderRunner(li.decoderName, fmt.Sprintf("%s-%s", li.pluginName, li.decoderName)); !ok { return fmt.Errorf("Decoder not found: %s", li.decoderName) } } // Kick off all the current logstreams we know of for _, logstream := range li.plugins { stop := make(chan chan bool, 1) go logstream.Run(ir, h, stop, dRunner) li.stopLogstreamChans = append(li.stopLogstreamChans, stop) } ok = true rescan := time.Tick(li.rescanInterval) // Our main rescan loop that handles shutting down for ok { select { case <-li.stopChan: ok = false returnChans := make([]chan bool, len(li.stopLogstreamChans)) // Send out all the stop signals for i, ch := range li.stopLogstreamChans { ret := make(chan bool) ch <- ret returnChans[i] = ret } // Wait for all the stops for _, ch := range returnChans { <-ch } // Close our own stopChan to indicate we shut down close(li.stopChan) case <-rescan: li.logstreamSetLock.Lock() newstreams, errs = li.logstreamSet.ScanForLogstreams() if errs.IsError() { ir.LogError(errs) } for _, name := range newstreams { stream, ok := li.logstreamSet.GetLogstream(name) if !ok { ir.LogError(fmt.Errorf("Found new logstream: %s, but couldn't fetch it.", name)) continue } // Setup a new logstream input for this logstream and start it running stParser, parserFunc, _ := CreateParser(li.parser, li.delimiter, li.delimiterLocation, li.decoderName) lsi := NewLogstreamInput(stream, stParser, parserFunc, name, li.hostName) li.plugins[name] = lsi stop := make(chan chan bool, 1) go lsi.Run(ir, h, stop, dRunner) li.stopLogstreamChans = append(li.stopLogstreamChans, stop) } li.logstreamSetLock.Unlock() } } err = nil return }