示例#1
0
func (group *TailGroup) Refresh() {
	d, _ := os.Getwd()
	logs.Debug("pwd:", d)
	matches, err := filepath.Glob(group.Glob)
	if err != nil {
		logs.Debug("Error in glob: ", err)
	} else if matches == nil {
		logs.Debug("Glob matched zero files: ", group.Glob)
	} else if matches != nil {
		logs.Debug("Glob matched %d files: ", len(matches), group.Glob)
		for _, match := range matches {
			info, err := os.Stat(match)
			if err != nil {
				logs.Debug("Can't stat: ", err)
			} else if info.IsDir() {
				logs.Debug("Ignoring directory: ", match)
			} else {
				if time.Since(info.ModTime()).Hours() >= 1 {
					logs.Debug("Ignoring idle file: ", match)
					group.deactivate(match)
				} else {
					logs.Debug("Tailing: ", match)
					group.activate(match)
				}
			}
		}
	}
}
示例#2
0
文件: tail.go 项目: srid/dendrite
func (tail *Tail) Poll() {
	size := 16384
	buffer := make([]byte, size)
	for {
		len, err := tail.handle.Read(buffer)
		if err == io.EOF {
			fi, err := tail.Stat()
			if err != nil {
				logs.Warn("Can't stat %s", err)
			} else if fi.Size() < tail.Offset() {
				logs.Warn("File truncated, resetting...")
				tail.SetOffset(0)
				tail.WriteOffset()
				tail.seek()
			}
			return
		} else if err != nil {
			logs.Debug("read error: ", err)
			return
		} else {
			tail.Parser.Consume(buffer[0:len], &tail.offset)
			tail.WriteOffset()
		}
	}
}
示例#3
0
文件: tail.go 项目: willf/dendrite
func NewTail(parser Parser, maxBackfill int64, path string, offsetPath string, offset int64) *Tail {
	tail := new(Tail)
	tail.Path = path
	tail.offset = offset
	tail.OffsetPath = offsetPath
	tail.Parser = parser
	tracker := watch.NewInotifyTracker()
	w, err := tracker.NewWatcher()
	if err != nil {
		return nil
	}
	tail.Watcher = watch.NewInotifyFileWatcher(path, w)
	tail.LoadOffset()
	tail.maxBackfill = maxBackfill

	handle, err := os.Open(path)
	if err != nil {
		logs.Debug("Can't open file: ", path)
		return nil
	} else {
		tail.handle = handle
	}
	tail.seek()
	return tail
}
示例#4
0
文件: tail.go 项目: srid/dendrite
func (tail *Tail) WriteOffset() {
	path := path.Join(os.TempDir(), path.Base(tail.OffsetPath))
	temp, err := os.Create(path)
	if err != nil {
		logs.Debug("Can't create tempfile:", err)
	} else {
		_, err := fmt.Fprintf(temp, "%d\n", tail.Offset())
		if err != nil {
			logs.Debug("Can't write to tempfile:", err)
			temp.Close()
		} else {
			temp.Close()
			err := os.Rename(path, tail.OffsetPath)
			if err != nil {
				logs.Debug("Rename failed:", err)
			}
		}
	}
}
示例#5
0
文件: parser.go 项目: samacs/dendrite
func NewRegexpParser(hostname string, group string, file string, output chan Record, pattern string, fields []FieldConfig, maxLineSize int64) Parser {
	parser := new(RegexpParser)
	parser.maxLineSize = int(maxLineSize)
	parser.hostname = hostname
	parser.file = file
	parser.group = group
	parser.output = output
	parser.buffer = make([]byte, 0)
	re, err := regexp.Compile(pattern)
	if err != nil {
		panic(err)
	} else {
		parser.compiled = re
		for i, name := range re.SubexpNames() {
			if name != "" {
				found := false
				for n, spec := range fields {
					if spec.Name == "" {
						spec.Name = spec.Alias
					}
					if name == spec.Name {
						found = true
						fields[n].Group = i
						logs.Debug("setting group alias: %s, name: %s, group: %d", spec.Alias, spec.Name, spec.Group)
					}
				}
				if !found {
					var spec FieldConfig
					spec.Group = i
					spec.Alias = name
					spec.Type = String
					fields = append(fields, spec)
				}
			}
		}
	}
	parser.fields = fields
	for _, f := range parser.fields {
		logs.Debug("p.f: alias: %s, name: %s, group: %d, type: %d", f.Alias, f.Name, f.Group, f.Type)
	}
	return parser
}
示例#6
0
文件: tail.go 项目: srid/dendrite
func (tail *Tail) LoadOffset() {
	file, err := os.Open(tail.OffsetPath)
	if err != nil {
		tail.WriteOffset()
	} else {
		reader := bufio.NewReader(file)
		str, err := reader.ReadString('\n')
		if err != nil {
			logs.Debug("Malformed offset file: ", err)
		} else {
			out, err := strconv.ParseInt(strings.TrimSpace(str), 10, 64)
			if err != nil {
				logs.Debug("Malformed offset file: ", err)
			} else {
				logs.Debug("Found offset: %d", out)
				tail.SetOffset(out)
			}
		}
		file.Close()
	}
}
示例#7
0
文件: parser.go 项目: samacs/dendrite
func (parser *RegexpParser) Consume(bytes []byte, counter *int64) {
	parser.buffer = append(parser.buffer, bytes...)
	logs.Debug("consuming %d bytes of %s", len(bytes), parser.file)
	l := len(parser.buffer)
	if l > parser.maxLineSize {
		off := l - parser.maxLineSize
		logs.Debug("chopping %d bytes off buffer (was: %d, max: %d)", off, l, parser.maxLineSize)
		atomic.AddInt64(counter, int64(off))
		parser.buffer = parser.buffer[off:]
	}
	for {
		m := parser.compiled.FindSubmatchIndex(parser.buffer)
		if m == nil {
			return
		}

		hasher := sha1.New()

		out := make(map[string]Column)
		out["_offset"] = Column{Integer, Simple, atomic.LoadInt64(counter)}
		out["_file"] = Column{String, Simple, parser.file}
		out["_time"] = Column{Timestamp, Simple, StandardTimeProvider.Now().Unix()}
		out["_group"] = Column{String, Simple, parser.group}
		out["_hostname"] = Column{String, Simple, parser.hostname}
		for _, spec := range parser.fields {
			g := spec.Group
			if g < 0 || g > len(m)/2 {
				logs.Error("spec group out of range: alias: %s, name: %s, g: %d", spec.Alias, spec.Name, g)
				panic(-1)
			}
			if m[g*2] == -1 {
				continue
			}
			s := string(parser.buffer[m[g*2]:m[g*2+1]])
			switch spec.Type {
			case Timestamp:
				t, err := time.Parse(spec.Format, s)
				if err != nil {
					logs.Warn("date parse error: %s", err)
				} else {
					if t.Year() == 0 {
						now := StandardTimeProvider.Now()
						adjusted := time.Date(now.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second(), t.Nanosecond(), t.Location())
						if adjusted.After(now) {
							adjusted = time.Date(now.Year()-1, t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second(), t.Nanosecond(), t.Location())
						}
						t = adjusted
					}
					out[spec.Alias] = Column{Timestamp, spec.Treatment, t.Unix()}
				}
			case String:
				if spec.Treatment == Tokens {
					out[spec.Alias] = Column{Tokens, spec.Treatment, spec.Pattern.FindAllString(s, -1)}
				} else if spec.Treatment == Hash {
					hasher.Reset()
					hasher.Write([]byte(spec.Salt))
					hasher.Write([]byte(s))
					sha := base64.URLEncoding.EncodeToString(hasher.Sum(nil))
					out[spec.Alias] = Column{Tokens, spec.Treatment, sha}
				} else {
					out[spec.Alias] = Column{String, spec.Treatment, s}
				}
			case Integer:
				n, err := strconv.ParseInt(s, 10, 64)
				if err == nil {
					out[spec.Alias] = Column{spec.Type, spec.Treatment, n}
				}
			case Double:
				n, err := strconv.ParseFloat(s, 64)
				if err == nil {
					out[spec.Alias] = Column{spec.Type, spec.Treatment, n}
				}

			default:
				panic(nil)
			}
		}
		parser.output <- out
		atomic.AddInt64(counter, int64(m[1]))

		parser.buffer = parser.buffer[m[1]:]
	}
	logs.Debug("done with %s", parser.file)
}
示例#8
0
文件: config.go 项目: Benoss/dendrite
func configFromMapping(mapping map[string]interface{}, hostname string) (*Config, error) {
	b, _ := json.Marshal(mapping)
	logs.Debug("mapping: %s", string(b))
	var err error = nil
	config := new(Config)
	config.Sources = make([]SourceConfig, 0)
	config.Destinations = make([]DestinationConfig, 0)

	global, err := getMap(mapping, "global")
	if err != nil {
		return nil, fmt.Errorf("no global section in the config file")
	}

	config.OffsetDir, err = getString(global, "offset_dir")
	if err != nil {
		logs.Warn("no offset_dir specified")
		config.MaxBackfillBytes = -1
	}

	config.MaxBackfillBytes, err = getInt64(global, "max_backfill_bytes")
	if err != nil {
		logs.Warn("no max_backfill_bytes, continuing with unlimited")
		config.MaxBackfillBytes = -1
	}

	config.MaxLineSizeBytes, err = getInt64(global, "max_linesize_bytes")
	if err != nil {
		logs.Warn("no max_linesize_bytes, continuing with 32768")
		config.MaxLineSizeBytes = 32768
	}

	sources, err := getMap(mapping, "sources")
	if err != nil {
		return nil, fmt.Errorf("no sources section in the config file")
	}

	for name, _ := range sources {
		src, err := getMap(sources, name)
		if err != nil {
			logs.Warn("Invalid source: %s, continuing...", name)
			continue
		}

		var source SourceConfig
		source.Hostname = hostname
		source.Fields = make([]FieldConfig, 0)
		source.OffsetDir = config.OffsetDir
		source.MaxBackfillBytes = config.MaxBackfillBytes
		source.MaxLineSizeBytes = config.MaxLineSizeBytes
		source.Name = name
		source.Glob, err = getString(src, "glob")
		if err != nil {
			return nil, err
		}
		source.Pattern, err = getString(src, "pattern")
		if err != nil {
			source.Pattern = DefaultPattern
		}

		_, err = regexp.Compile(source.Pattern)
		if err != nil {
			logs.Warn("%s is not a valid regexp, continuing... (%s)", source.Pattern, err)
			continue
		}

		fields, err := getMap(src, "fields")
		for name, _ := range fields {
			fld, err := getMap(fields, name)
			if err != nil {
				logs.Warn("%s is not a map, continuing... (%s)", name, err)
				continue
			}

			var field FieldConfig
			field.Alias = name

			field.Name, err = getString(fld, "name")
			if err != nil {
				field.Name = field.Alias
			}

			field.Group, err = getInt(fld, "group")

			s, err := getString(fld, "type")
			if err != nil {
				field.Type = String
			} else {
				field.Type, err = parseFieldType(s)
				if err != nil {
					logs.Warn("Invalid field type: %s, continuing... (error was %s)", s, err)
					continue
				}
			}
			logs.Info("found type %s", field.Type)

			s, err = getString(fld, "treatment")
			if err != nil {
				field.Treatment = String
			} else {
				field.Treatment, err = parseFieldTreatment(s)
				if err != nil {
					logs.Warn("Invalid field treatment: %s, continuing... (error was %s)", s, err)
					continue
				}
			}
			logs.Info("found treatment %s", field.Treatment)

			field.Salt, err = getString(fld, "salt")

			field.Format, err = getString(fld, "format")

			s, err = getString(fld, "pattern")
			field.Pattern, err = regexp.Compile(s)
			if err != nil {
				logs.Warn("Invalid regex: %s, continuing... (error was %s)", s, err)
				continue
			}
			source.Fields = append(source.Fields, field)
		}
		config.Sources = append(config.Sources, source)
	}

	destinations, err := getMap(mapping, "destinations")
	if err != nil {
		return nil, fmt.Errorf("no destinations section in the config file")
	}

	for name, _ := range destinations {
		var dest DestinationConfig
		urlString, err := getString(destinations, name)
		u, err := url.Parse(urlString)
		if err != nil {
			logs.Warn("Invalid URL: %s, continuing... (error was %s)", urlString, err)
			continue
		}
		logs.Info("Found destination: %s", urlString)
		dest.Name = name
		dest.Url = u
		config.Destinations = append(config.Destinations, dest)
	}

	return config, nil
}
示例#9
0
func main() {
	flag.Parse()
	runtime.GOMAXPROCS(*cpus)

	// set the logger path
	handle, err := os.OpenFile(*logFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666)
	if err != nil {
		logs.Warn("Unable to open log file %s, using stderr: %s", *logFile, err)
	} else {
		logs.Logger = log.New(handle, "", log.LstdFlags|log.Lshortfile)
	}

	// Check whether we're in debug mode
	if *debug {
		logs.SetLevel(logs.DEBUG)
		logs.Debug("logging at DEBUG")
	} else {
		logs.SetLevel(logs.INFO)
	}

	if *name == "unknown" {
		*name, err = os.Hostname()
		if err != nil {
			logs.Warn("Unable to determine hostname: %s", err)
		}
	}

	// Read the config files
	config, err := dendrite.NewConfig(*configFile, *name)
	if err != nil {
		logs.Fatal("Can't read configuration: %s", err)
	}

	// Link up all of the objects
	ch := make(chan dendrite.Record, 100)
	logs.Debug("original %s", ch)
	dests := config.CreateDestinations()
	groups := config.CreateAllTailGroups(ch)

	// If any of our destinations talk back, log it.
	go func() {
		reader := bufio.NewReader(dests.Reader())
		for {
			str, err := reader.ReadString('\n')
			if err == io.EOF {
				logs.Debug("eof")
				time.Sleep(1 * time.Second)
			} else if err != nil {
				logs.Error("error reading: %s", err)
			} else {
				logs.Info("received: %s", str)
			}
		}
	}()

	// Do the event loop
	finished := make(chan bool, 0)
	go dests.Consume(ch, finished)
	if *quitAfter >= 0 {
		start := time.Now()
		logs.Debug("starting the poll")
		i := 0
		for {
			i++
			if i%10 == 0 {
				groups.Refresh()
			}
			groups.Poll()
			if time.Now().Sub(start) >= time.Duration((*quitAfter)*float64(time.Second)) {
				break
			}
		}
	} else {
		logs.Debug("starting the loop")
		groups.Loop()
	}
	logs.Info("Closing...")
	close(ch)
	<-finished
	logs.Info("Goodbye!")
}