func assembleConfigFiles(configFile string) (map[string]interface{}, error) { logs.Info("Reading config file : %s", configFile) doc, err := yaml.ReadFile(configFile) if err != nil { return nil, err } mapping := YamlUnmarshal(doc.Root).(map[string]interface{}) confD_path := path.Dir(configFile) + "/conf.d/*.yaml" logs.Info("Reading config folder : %s", confD_path) entries, err := filepath.Glob(confD_path) if err != nil { logs.Warn("Can't read relevant conf.d: %s", err) } else { for _, path := range entries { logs.Info("Reading config file : %s", path) doc, err := yaml.ReadFile(path) if err != nil { logs.Warn("Can't read relevant conf.d: %s", err) } else { inner := YamlUnmarshal(doc.Root).(map[string]interface{}) RecursiveMergeNoConflict(mapping, inner, "") } } } return mapping, nil }
func (tail *Tail) Poll() { size := 16384 buffer := make([]byte, size) for { len, err := tail.handle.Read(buffer) if err == io.EOF { fi, err := tail.Stat() if err != nil { logs.Warn("Can't stat %s", err) } else if fi.Size() < tail.Offset() { logs.Warn("File truncated, resetting...") tail.SetOffset(0) tail.WriteOffset() tail.seek() } return } else if err != nil { logs.Debug("read error: ", err) return } else { tail.Parser.Consume(buffer[0:len], &tail.offset) tail.WriteOffset() } } }
func (config *Config) CreateDestinations() Destinations { dests := NewDestinations() for _, subConfig := range config.Destinations { dest, err := NewDestination(subConfig) if err != nil { logs.Warn("Can't load destination, continuing...: %s", err) continue } dests = append(dests, dest) } return dests }
func (dests *Destinations) Consume(ch chan Record, finished chan bool) { if len(*dests) == 0 { logs.Warn("No destinations specified") } for { rec := <-ch if rec == nil { break } else { for _, dest := range *dests { dest.Encoder.Encode(rec, dest.RW) } } } logs.Info("Finished consuming log records") finished <- true }
func (parser *RegexpParser) Consume(bytes []byte, counter *int64) { parser.buffer = append(parser.buffer, bytes...) logs.Debug("consuming %d bytes of %s", len(bytes), parser.file) l := len(parser.buffer) if l > parser.maxLineSize { off := l - parser.maxLineSize logs.Debug("chopping %d bytes off buffer (was: %d, max: %d)", off, l, parser.maxLineSize) atomic.AddInt64(counter, int64(off)) parser.buffer = parser.buffer[off:] } for { m := parser.compiled.FindSubmatchIndex(parser.buffer) if m == nil { return } hasher := sha1.New() out := make(map[string]Column) out["_offset"] = Column{Integer, Simple, atomic.LoadInt64(counter)} out["_file"] = Column{String, Simple, parser.file} out["_time"] = Column{Timestamp, Simple, StandardTimeProvider.Now().Unix()} out["_group"] = Column{String, Simple, parser.group} out["_hostname"] = Column{String, Simple, parser.hostname} for _, spec := range parser.fields { g := spec.Group if g < 0 || g > len(m)/2 { logs.Error("spec group out of range: alias: %s, name: %s, g: %d", spec.Alias, spec.Name, g) panic(-1) } if m[g*2] == -1 { continue } s := string(parser.buffer[m[g*2]:m[g*2+1]]) switch spec.Type { case Timestamp: t, err := time.Parse(spec.Format, s) if err != nil { logs.Warn("date parse error: %s", err) } else { if t.Year() == 0 { now := StandardTimeProvider.Now() adjusted := time.Date(now.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second(), t.Nanosecond(), t.Location()) if adjusted.After(now) { adjusted = time.Date(now.Year()-1, t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second(), t.Nanosecond(), t.Location()) } t = adjusted } out[spec.Alias] = Column{Timestamp, spec.Treatment, t.Unix()} } case String: if spec.Treatment == Tokens { out[spec.Alias] = Column{Tokens, spec.Treatment, spec.Pattern.FindAllString(s, -1)} } else if spec.Treatment == Hash { hasher.Reset() hasher.Write([]byte(spec.Salt)) hasher.Write([]byte(s)) sha := base64.URLEncoding.EncodeToString(hasher.Sum(nil)) out[spec.Alias] = Column{Tokens, spec.Treatment, sha} } else { out[spec.Alias] = Column{String, spec.Treatment, s} } case Integer: n, err := strconv.ParseInt(s, 10, 64) if err == nil { out[spec.Alias] = Column{spec.Type, spec.Treatment, n} } case Double: n, err := strconv.ParseFloat(s, 64) if err == nil { out[spec.Alias] = Column{spec.Type, spec.Treatment, n} } default: panic(nil) } } parser.output <- out atomic.AddInt64(counter, int64(m[1])) parser.buffer = parser.buffer[m[1]:] } logs.Debug("done with %s", parser.file) }
func configFromMapping(mapping map[string]interface{}, hostname string) (*Config, error) { b, _ := json.Marshal(mapping) logs.Debug("mapping: %s", string(b)) var err error = nil config := new(Config) config.Sources = make([]SourceConfig, 0) config.Destinations = make([]DestinationConfig, 0) global, err := getMap(mapping, "global") if err != nil { return nil, fmt.Errorf("no global section in the config file") } config.OffsetDir, err = getString(global, "offset_dir") if err != nil { logs.Warn("no offset_dir specified") config.MaxBackfillBytes = -1 } config.MaxBackfillBytes, err = getInt64(global, "max_backfill_bytes") if err != nil { logs.Warn("no max_backfill_bytes, continuing with unlimited") config.MaxBackfillBytes = -1 } config.MaxLineSizeBytes, err = getInt64(global, "max_linesize_bytes") if err != nil { logs.Warn("no max_linesize_bytes, continuing with 32768") config.MaxLineSizeBytes = 32768 } sources, err := getMap(mapping, "sources") if err != nil { return nil, fmt.Errorf("no sources section in the config file") } for name, _ := range sources { src, err := getMap(sources, name) if err != nil { logs.Warn("Invalid source: %s, continuing...", name) continue } var source SourceConfig source.Hostname = hostname source.Fields = make([]FieldConfig, 0) source.OffsetDir = config.OffsetDir source.MaxBackfillBytes = config.MaxBackfillBytes source.MaxLineSizeBytes = config.MaxLineSizeBytes source.Name = name source.Glob, err = getString(src, "glob") if err != nil { return nil, err } source.Pattern, err = getString(src, "pattern") if err != nil { source.Pattern = DefaultPattern } _, err = regexp.Compile(source.Pattern) if err != nil { logs.Warn("%s is not a valid regexp, continuing... (%s)", source.Pattern, err) continue } fields, err := getMap(src, "fields") for name, _ := range fields { fld, err := getMap(fields, name) if err != nil { logs.Warn("%s is not a map, continuing... (%s)", name, err) continue } var field FieldConfig field.Alias = name field.Name, err = getString(fld, "name") if err != nil { field.Name = field.Alias } field.Group, err = getInt(fld, "group") s, err := getString(fld, "type") if err != nil { field.Type = String } else { field.Type, err = parseFieldType(s) if err != nil { logs.Warn("Invalid field type: %s, continuing... (error was %s)", s, err) continue } } logs.Info("found type %s", field.Type) s, err = getString(fld, "treatment") if err != nil { field.Treatment = String } else { field.Treatment, err = parseFieldTreatment(s) if err != nil { logs.Warn("Invalid field treatment: %s, continuing... (error was %s)", s, err) continue } } logs.Info("found treatment %s", field.Treatment) field.Salt, err = getString(fld, "salt") field.Format, err = getString(fld, "format") s, err = getString(fld, "pattern") field.Pattern, err = regexp.Compile(s) if err != nil { logs.Warn("Invalid regex: %s, continuing... (error was %s)", s, err) continue } source.Fields = append(source.Fields, field) } config.Sources = append(config.Sources, source) } destinations, err := getMap(mapping, "destinations") if err != nil { return nil, fmt.Errorf("no destinations section in the config file") } for name, _ := range destinations { var dest DestinationConfig urlString, err := getString(destinations, name) u, err := url.Parse(urlString) if err != nil { logs.Warn("Invalid URL: %s, continuing... (error was %s)", urlString, err) continue } logs.Info("Found destination: %s", urlString) dest.Name = name dest.Url = u config.Destinations = append(config.Destinations, dest) } return config, nil }
func main() { flag.Parse() runtime.GOMAXPROCS(*cpus) // set the logger path handle, err := os.OpenFile(*logFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666) if err != nil { logs.Warn("Unable to open log file %s, using stderr: %s", *logFile, err) } else { logs.Logger = log.New(handle, "", log.LstdFlags|log.Lshortfile) } // Check whether we're in debug mode if *debug { logs.SetLevel(logs.DEBUG) logs.Debug("logging at DEBUG") } else { logs.SetLevel(logs.INFO) } if *name == "unknown" { *name, err = os.Hostname() if err != nil { logs.Warn("Unable to determine hostname: %s", err) } } // Read the config files config, err := dendrite.NewConfig(*configFile, *name) if err != nil { logs.Fatal("Can't read configuration: %s", err) } // Link up all of the objects ch := make(chan dendrite.Record, 100) logs.Debug("original %s", ch) dests := config.CreateDestinations() groups := config.CreateAllTailGroups(ch) // If any of our destinations talk back, log it. go func() { reader := bufio.NewReader(dests.Reader()) for { str, err := reader.ReadString('\n') if err == io.EOF { logs.Debug("eof") time.Sleep(1 * time.Second) } else if err != nil { logs.Error("error reading: %s", err) } else { logs.Info("received: %s", str) } } }() // Do the event loop finished := make(chan bool, 0) go dests.Consume(ch, finished) if *quitAfter >= 0 { start := time.Now() logs.Debug("starting the poll") i := 0 for { i++ if i%10 == 0 { groups.Refresh() } groups.Poll() if time.Now().Sub(start) >= time.Duration((*quitAfter)*float64(time.Second)) { break } } } else { logs.Debug("starting the loop") groups.Loop() } logs.Info("Closing...") close(ch) <-finished logs.Info("Goodbye!") }