func main() { flag.Parse() // 1. Start Output // update the logstash index occasionally go loges.UpdateLogstashIndex() // start an elasticsearch writer worker, for sending to elasticsearch go loges.ToElasticSearch(msgChan, "golog", cleanEsHost(hostname)) // 2. Format/Filter // create our custom formatter for parsing/filtering,/manipulating line entries loges.FormatterSet(CustomFormatter(200)) // 3. Input: Start our Input and block loges.StdinPruducer(msgChan) }
func main() { flag.Parse() u.SetupLogging(logLevel) u.SetColorIfTerminal() // this doesn't work if reading stdin if colorize { u.SetColorOutput() } done := make(chan bool) esHostName = cleanEsHost(esHostName) // if we have note specified tail files, then assume stdin if len(flag.Args()) == 0 && source == "tail" { source = "stdin" } u.Debugf("LOGES: filters=%s es=%s argct=:%d source=%v ll=%s args=%v", filters, esHostName, len(flag.Args()), source, logLevel, flag.Args()) // Setup output first, to ensure its ready when Source starts // TODO: suuport multiple outputs? switch output { case "elasticsearch": // update the Logstash date for the index occasionally go loges.UpdateLogstashIndex() // start an elasticsearch bulk worker, for sending to elasticsearch go loges.ToElasticSearch(msgChan, "golog", esHostName, ttl, exitIfNoMsgsDur, metricsToEs) case "stdout": u.Debug("setting output to stdout ", colorize) go loges.ToStdout(msgChan, colorize) default: Usage() os.Exit(1) } // TODO: implement metrics out for _, metOut := range strings.Split(metricsOut, ",") { switch metOut { case "influxdb": // todo case "graphite": u.Infof("Registering Graphite Transform: host=%s prefix=%s", graphiteHost, graphitePrefix) loges.TransformRegister(loges.GraphiteTransform(logType, graphiteHost, graphitePrefix, true)) } } // now set up the transforms/filters for _, filter := range strings.Split(filters, ",") { switch filter { case "stdfiles": loges.TransformRegister(loges.FileFormatter(logType, nil)) case "fluentd": loges.TransformRegister(loges.FluentdFormatter(logType, nil)) case "kafka": // TODO, finish conversion to sarama //loges.TransformRegister(kafka.KafkaFormatter) } } for _, sourceInput := range strings.Split(source, ",") { u.Warnf("source = %v", sourceInput) switch sourceInput { case "tail": for _, filename := range flag.Args() { tailDone := make(chan bool) go loges.TailFile(filename, tail.Config{Follow: true, ReOpen: true}, tailDone, msgChan) } case "http": go loges.HttpRun(httpPort, msgChan) //case "kafka": // go kafka.RunKafkaConsumer(msgChan, partitionstr, topic, kafkaHost, offset, maxMsgCt, maxSize) case "stdin": go loges.StdinPruducer(msgChan) default: u.Error(sourceInput) println("No input set, required") Usage() os.Exit(1) } } u.Warn("end of main startup, until done") <-done }