func (f *FileStat) Gather(acc telegraf.Accumulator) error { var errS string var err error for _, filepath := range f.Files { // Get the compiled glob object for this filepath g, ok := f.globs[filepath] if !ok { if g, err = globpath.Compile(filepath); err != nil { errS += err.Error() + " " continue } f.globs[filepath] = g } files := g.Match() if len(files) == 0 { acc.AddFields("filestat", map[string]interface{}{ "exists": int64(0), }, map[string]string{ "file": filepath, }) continue } for fileName, fileInfo := range files { tags := map[string]string{ "file": fileName, } fields := map[string]interface{}{ "exists": int64(1), "size_bytes": fileInfo.Size(), } if f.Md5 { md5, err := getMd5(fileName) if err != nil { errS += err.Error() + " " } else { fields["md5_sum"] = md5 } } acc.AddFields("filestat", fields, tags) } } if errS != "" { return fmt.Errorf(errS) } return nil }
func (t *Tail) Start(acc telegraf.Accumulator) error { t.Lock() defer t.Unlock() t.acc = acc var seek tail.SeekInfo if !t.FromBeginning { seek.Whence = 2 seek.Offset = 0 } var errS string // Create a "tailer" for each file for _, filepath := range t.Files { g, err := globpath.Compile(filepath) if err != nil { log.Printf("E! Error Glob %s failed to compile, %s", filepath, err) } for file, _ := range g.Match() { tailer, err := tail.TailFile(file, tail.Config{ ReOpen: true, Follow: true, Location: &seek, MustExist: true, }) if err != nil { errS += err.Error() + " " continue } // create a goroutine for each "tailer" t.wg.Add(1) go t.receiver(tailer) t.tailers = append(t.tailers, tailer) } } if errS != "" { return fmt.Errorf(errS) } return nil }
func (l *LogParserPlugin) Start(acc telegraf.Accumulator) error { l.Lock() defer l.Unlock() l.acc = acc l.lines = make(chan string, 1000) l.done = make(chan struct{}) // Looks for fields which implement LogParser interface l.parsers = []LogParser{} s := reflect.ValueOf(l).Elem() for i := 0; i < s.NumField(); i++ { f := s.Field(i) if !f.CanInterface() { continue } if lpPlugin, ok := f.Interface().(LogParser); ok { if reflect.ValueOf(lpPlugin).IsNil() { continue } l.parsers = append(l.parsers, lpPlugin) } } if len(l.parsers) == 0 { return fmt.Errorf("ERROR: logparser input plugin: no parser defined.") } // compile log parser patterns: for _, parser := range l.parsers { if err := parser.Compile(); err != nil { return err } } var seek tail.SeekInfo if !l.FromBeginning { seek.Whence = 2 seek.Offset = 0 } l.wg.Add(1) go l.parser() var errS string // Create a "tailer" for each file for _, filepath := range l.Files { g, err := globpath.Compile(filepath) if err != nil { log.Printf("ERROR Glob %s failed to compile, %s", filepath, err) } for file, _ := range g.Match() { tailer, err := tail.TailFile(file, tail.Config{ ReOpen: true, Follow: true, Location: &seek, }) if err != nil { errS += err.Error() + " " continue } // create a goroutine for each "tailer" l.wg.Add(1) go l.receiver(tailer) l.tailers = append(l.tailers, tailer) } } if errS != "" { return fmt.Errorf(errS) } return nil }