// convertMessageToValue reads a Heka Message and returns a slice of field values
func (po *PostgresOutput) convertMessageToValues(m *message.Message, insertFields []string) (fieldValues []interface{}, err error) {
	fieldValues = []interface{}{}
	missingFields := []string{}
	for _, field := range insertFields {
		// Special case: get "Timestamp" from Heka message
		if field == "Timestamp" {
			// Convert Heka time (Unix timestamp in nanoseconds) to Golang time
			v := time.Unix(0, m.GetTimestamp())
			fieldValues = append(fieldValues, v)
		} else {
			v, ok := m.GetFieldValue(field)
			if !ok {
				// If configured to do so, write NULL when a FieldValue isn't found in the Heka message
				if po.allowMissingMessageFields {
					v = nil
				} else {
					missingFields = append(missingFields, field)
					continue
				}
			}
			fieldValues = append(fieldValues, v)
		}
	}

	if len(missingFields) > 0 {
		return []interface{}{}, fmt.Errorf("message is missing expected fields: %s", strings.Join(missingFields, ", "))
	}

	return fieldValues, nil
}
Example #2
0
func (self *LogOutput) Run(or OutputRunner, h PluginHelper) (err error) {
	inChan := or.InChan()

	var (
		pack *PipelinePack
		msg  *message.Message
	)
	for plc := range inChan {
		pack = plc.Pack
		msg = pack.Message
		if self.payloadOnly {
			log.Printf(msg.GetPayload())
		} else {
			log.Printf("<\n\tTimestamp: %s\n"+
				"\tType: %s\n"+
				"\tHostname: %s\n"+
				"\tPid: %d\n"+
				"\tUUID: %s\n"+
				"\tLogger: %s\n"+
				"\tPayload: %s\n"+
				"\tEnvVersion: %s\n"+
				"\tSeverity: %d\n"+
				"\tFields: %+v\n"+
				"\tCaptures: %v\n>\n",
				time.Unix(0, msg.GetTimestamp()), msg.GetType(),
				msg.GetHostname(), msg.GetPid(), msg.GetUuidString(),
				msg.GetLogger(), msg.GetPayload(), msg.GetEnvVersion(),
				msg.GetSeverity(), msg.Fields, plc.Captures)
		}
		pack.Recycle()
	}
	return
}
Example #3
0
func (c *CleanMessageFormatter) Format(m *message.Message) (doc []byte, err error) {
	buf := bytes.Buffer{}
	buf.WriteString(`{`)
	// Iterates over fields configured for clean formating
	for _, f := range c.fields {
		switch strings.ToLower(f) {
		case "uuid":
			writeField(&buf, f, strconv.Quote(m.GetUuidString()))
		case "timestamp":
			t := time.Unix(0, m.GetTimestamp()).UTC()
			writeField(&buf, f, strconv.Quote(t.Format(c.timestampFormat)))
		case "type":
			writeField(&buf, f, strconv.Quote(m.GetType()))
		case "logger":
			writeField(&buf, f, strconv.Quote(m.GetLogger()))
		case "severity":
			writeField(&buf, f, strconv.Itoa(int(m.GetSeverity())))
		case "payload":
			if utf8.ValidString(m.GetPayload()) {
				writeField(&buf, f, strconv.Quote(m.GetPayload()))
			}
		case "envversion":
			writeField(&buf, f, strconv.Quote(m.GetEnvVersion()))
		case "pid":
			writeField(&buf, f, strconv.Itoa(int(m.GetPid())))
		case "hostname":
			writeField(&buf, f, strconv.Quote(m.GetHostname()))
		case "fields":
			for _, field := range m.Fields {
				switch field.GetValueType() {
				case message.Field_STRING:
					writeField(&buf, *field.Name, strconv.Quote(field.GetValue().(string)))
				case message.Field_BYTES:
					data := field.GetValue().([]byte)[:]
					writeField(&buf, *field.Name, strconv.Quote(base64.StdEncoding.EncodeToString(data)))
				case message.Field_INTEGER:
					writeField(&buf, *field.Name, strconv.FormatInt(field.GetValue().(int64), 10))
				case message.Field_DOUBLE:
					writeField(&buf, *field.Name, strconv.FormatFloat(field.GetValue().(float64),
						'g', -1, 64))
				case message.Field_BOOL:
					writeField(&buf, *field.Name, strconv.FormatBool(field.GetValue().(bool)))
				}
			}
		default:
			// Search fo a given fields in the message
			err = fmt.Errorf("Unable to find field: %s", f)
			return
		}
	}
	buf.WriteString(`}`)
	doc = buf.Bytes()
	return
}
Example #4
0
func (self *DashboardOutput) Run(or OutputRunner, h PluginHelper) (err error) {
	inChan := or.InChan()
	ticker := or.Ticker()

	var (
		ok   = true
		plc  *PipelineCapture
		pack *PipelinePack
		msg  *message.Message
	)
	for ok {
		select {
		case plc, ok = <-inChan:
			if !ok {
				break
			}
			pack = plc.Pack
			msg = pack.Message
			switch msg.GetType() {
			case "heka.all-report":
				fn := path.Join(self.workingDirectory, "heka_report.json")
				overwriteFile(fn, msg.GetPayload())
			case "heka.sandbox-output":
				tmp, ok := msg.GetFieldValue("payload_type")
				if ok {
					if pt, ok := tmp.(string); ok && pt == "cbuf" {
						html := path.Join(self.workingDirectory, msg.GetLogger()+".html")
						_, err := os.Stat(html)
						if err != nil {
							overwriteFile(html, fmt.Sprintf(getCbufTemplate(), msg.GetLogger(), msg.GetLogger()))
						}
						fn := path.Join(self.workingDirectory, msg.GetLogger()+"."+pt)
						overwriteFile(fn, msg.GetPayload())
					}
				}
			case "heka.sandbox-terminated":
				fn := path.Join(self.workingDirectory, self.terminationFile)
				if file, err := os.OpenFile(fn, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0644); err == nil {
					line := fmt.Sprintf("%d\t%s\t%v\n", msg.GetTimestamp()/1e9, msg.GetLogger(), msg.GetPayload())
					file.WriteString(line)
					file.Close()
				}
			}
			plc.Pack.Recycle()
		case <-ticker:
			go h.PipelineConfig().allReportsMsg()
		}
	}
	return
}
Example #5
0
func (c *KibanaFormatter) Format(m *message.Message) (doc []byte, err error) {
	buf := bytes.Buffer{}
	buf.WriteString(`{`)

	writeStringField(true, &buf, `@uuid`, m.GetUuidString())
	t := time.Unix(0, m.GetTimestamp()) // time.Unix gives local time back
	writeStringField(false, &buf, `@timestamp`, t.UTC().Format("2006-01-02T15:04:05.000Z"))
	writeStringField(false, &buf, `@type`, m.GetType())
	writeStringField(false, &buf, `@logger`, m.GetLogger())
	writeRawField(false, &buf, `@severity`, strconv.Itoa(int(m.GetSeverity())))
	writeStringField(false, &buf, `@message`, m.GetPayload())
	writeRawField(false, &buf, `@envversion`, strconv.Quote(m.GetEnvVersion()))
	writeRawField(false, &buf, `@pid`, strconv.Itoa(int(m.GetPid())))
	writeStringField(false, &buf, `@source_host`, m.GetHostname())

	buf.WriteString(`,"@fields":{`)
	first := true
	for _, field := range m.Fields {
		switch field.GetValueType() {
		case message.Field_STRING:
			writeStringField(first, &buf, *field.Name, field.GetValue().(string))
			first = false
		case message.Field_BYTES:
			data := field.GetValue().([]byte)[:]
			writeStringField(first, &buf, *field.Name, base64.StdEncoding.EncodeToString(data))
			first = false
		case message.Field_INTEGER:
			writeRawField(first, &buf, *field.Name, strconv.FormatInt(field.GetValue().(int64), 10))
			first = false
		case message.Field_DOUBLE:
			writeRawField(first, &buf, *field.Name, strconv.FormatFloat(field.GetValue().(float64),
				'g', -1, 64))
			first = false
		case message.Field_BOOL:
			writeRawField(first, &buf, *field.Name, strconv.FormatBool(field.GetValue().(bool)))
			first = false
		}
	}
	buf.WriteString(`}`) // end of fields
	buf.WriteString(`}`)
	doc = buf.Bytes()
	return
}
Example #6
0
func (self *DashboardOutput) Run(or OutputRunner, h PluginHelper) (err error) {
	inChan := or.InChan()
	ticker := or.Ticker()
	go self.starterFunc(self)

	var (
		ok   = true
		pack *PipelinePack
		msg  *message.Message
	)

	// Maps sandbox names to plugin list items used to generate the
	// sandboxes.json file.
	sandboxes := make(map[string]*DashPluginListItem)
	sbxsLock := new(sync.Mutex)
	reNotWord, _ := regexp.Compile("\\W")
	for ok {
		select {
		case pack, ok = <-inChan:
			if !ok {
				break
			}
			msg = pack.Message
			switch msg.GetType() {
			case "heka.all-report":
				fn := filepath.Join(self.dataDirectory, "heka_report.json")
				overwriteFile(fn, msg.GetPayload())
				sbxsLock.Lock()
				if err := overwritePluginListFile(self.dataDirectory, sandboxes); err != nil {
					or.LogError(fmt.Errorf("Can't write plugin list file to '%s': %s",
						self.dataDirectory, err))
				}
				sbxsLock.Unlock()
			case "heka.sandbox-output":
				tmp, _ := msg.GetFieldValue("payload_type")
				if payloadType, ok := tmp.(string); ok {
					var payloadName, nameExt string
					tmp, _ := msg.GetFieldValue("payload_name")
					if payloadName, ok = tmp.(string); ok {
						nameExt = reNotWord.ReplaceAllString(payloadName, "")
					}
					if len(nameExt) > 64 {
						nameExt = nameExt[:64]
					}
					nameExt = "." + nameExt

					payloadType = reNotWord.ReplaceAllString(payloadType, "")
					filterName := msg.GetLogger()
					fn := filterName + nameExt + "." + payloadType
					ofn := filepath.Join(self.dataDirectory, fn)
					relPath := path.Join(self.relDataPath, fn) // Used for generating HTTP URLs.
					overwriteFile(ofn, msg.GetPayload())
					sbxsLock.Lock()
					if listItem, ok := sandboxes[filterName]; !ok {
						// First time we've seen this sandbox, add it to the set.
						output := &DashPluginOutput{
							Name:     payloadName,
							Filename: relPath,
						}
						sandboxes[filterName] = &DashPluginListItem{
							Name:    filterName,
							Outputs: []*DashPluginOutput{output},
						}
					} else {
						// We've seen the sandbox, see if we already have this output.
						found := false
						for _, output := range listItem.Outputs {
							if output.Name == payloadName {
								found = true
								break
							}
						}
						if !found {
							output := &DashPluginOutput{
								Name:     payloadName,
								Filename: relPath,
							}
							listItem.Outputs = append(listItem.Outputs, output)
						}
					}
					sbxsLock.Unlock()
				}
			case "heka.sandbox-terminated":
				fn := filepath.Join(self.dataDirectory, "heka_sandbox_termination.tsv")
				filterName := msg.GetLogger()
				if file, err := os.OpenFile(fn, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0644); err == nil {
					var line string
					if _, ok := msg.GetFieldValue("ProcessMessageCount"); !ok {
						line = fmt.Sprintf("%d\t%s\t%v\n", msg.GetTimestamp()/1e9,
							msg.GetLogger(), msg.GetPayload())
					} else {
						pmc, _ := msg.GetFieldValue("ProcessMessageCount")
						pms, _ := msg.GetFieldValue("ProcessMessageSamples")
						pmd, _ := msg.GetFieldValue("ProcessMessageAvgDuration")
						mad, _ := msg.GetFieldValue("MatchAvgDuration")
						fcl, _ := msg.GetFieldValue("FilterChanLength")
						mcl, _ := msg.GetFieldValue("MatchChanLength")
						rcl, _ := msg.GetFieldValue("RouterChanLength")
						line = fmt.Sprintf("%d\t%s\t%v"+
							" ProcessMessageCount:%v"+
							" ProcessMessageSamples:%v"+
							" ProcessMessageAvgDuration:%v"+
							" MatchAvgDuration:%v"+
							" FilterChanLength:%v"+
							" MatchChanLength:%v"+
							" RouterChanLength:%v\n",
							msg.GetTimestamp()/1e9,
							filterName, msg.GetPayload(), pmc, pms, pmd,
							mad, fcl, mcl, rcl)
					}
					file.WriteString(line)
					file.Close()
				}
				sbxsLock.Lock()
				delete(sandboxes, filterName)
				sbxsLock.Unlock()
			}
			pack.Recycle()
		case <-ticker:
			go h.PipelineConfig().AllReportsMsg()
		}
	}
	return
}
Example #7
0
func (s *SandboxDecoder) SetDecoderRunner(dr pipeline.DecoderRunner) {
	if s.sb != nil {
		return // no-op already initialized
	}

	s.dRunner = dr
	var original *message.Message

	switch s.sbc.ScriptType {
	case "lua":
		s.sb, s.err = lua.CreateLuaSandbox(s.sbc)
	default:
		s.err = fmt.Errorf("unsupported script type: %s", s.sbc.ScriptType)
	}

	if s.err == nil {
		s.preservationFile = filepath.Join(s.pConfig.Globals.PrependBaseDir(DATA_DIR),
			dr.Name()+DATA_EXT)
		if s.sbc.PreserveData && fileExists(s.preservationFile) {
			s.err = s.sb.Init(s.preservationFile, "decoder")
		} else {
			s.err = s.sb.Init("", "decoder")
		}
	}
	if s.err != nil {
		dr.LogError(s.err)
		if s.sb != nil {
			s.sb.Destroy("")
			s.sb = nil
		}
		s.pConfig.Globals.ShutDown()
		return
	}

	s.sb.InjectMessage(func(payload, payload_type, payload_name string) int {
		if s.pack == nil {
			s.pack = dr.NewPack()
			if original == nil && len(s.packs) > 0 {
				original = s.packs[0].Message // payload injections have the original header data in the first pack
			}
		} else {
			original = nil // processing a new message, clear the old message
		}
		if len(payload_type) == 0 { // heka protobuf message
			if original == nil {
				original = new(message.Message)
				copyMessageHeaders(original, s.pack.Message) // save off the header values since unmarshal will wipe them out
			}
			if nil != proto.Unmarshal([]byte(payload), s.pack.Message) {
				return 1
			}
			if s.tz != time.UTC {
				const layout = "2006-01-02T15:04:05.999999999" // remove the incorrect UTC tz info
				t := time.Unix(0, s.pack.Message.GetTimestamp())
				t = t.In(time.UTC)
				ct, _ := time.ParseInLocation(layout, t.Format(layout), s.tz)
				s.pack.Message.SetTimestamp(ct.UnixNano())
			}
		} else {
			s.pack.Message.SetPayload(payload)
			ptype, _ := message.NewField("payload_type", payload_type, "file-extension")
			s.pack.Message.AddField(ptype)
			pname, _ := message.NewField("payload_name", payload_name, "")
			s.pack.Message.AddField(pname)
		}
		if original != nil {
			// if future injections fail to set the standard headers, use the values
			// from the original message.
			if s.pack.Message.Uuid == nil {
				s.pack.Message.SetUuid(original.GetUuid())
			}
			if s.pack.Message.Timestamp == nil {
				s.pack.Message.SetTimestamp(original.GetTimestamp())
			}
			if s.pack.Message.Type == nil {
				s.pack.Message.SetType(original.GetType())
			}
			if s.pack.Message.Hostname == nil {
				s.pack.Message.SetHostname(original.GetHostname())
			}
			if s.pack.Message.Logger == nil {
				s.pack.Message.SetLogger(original.GetLogger())
			}
			if s.pack.Message.Severity == nil {
				s.pack.Message.SetSeverity(original.GetSeverity())
			}
			if s.pack.Message.Pid == nil {
				s.pack.Message.SetPid(original.GetPid())
			}
		}
		s.packs = append(s.packs, s.pack)
		s.pack = nil
		return 0
	})
}
Example #8
0
func main() {
	flagMatch := flag.String("match", "TRUE", "message_matcher filter expression")
	flagFormat := flag.String("format", "txt", "output format [txt|json|heka|count]")
	flagOutput := flag.String("output", "", "output filename, defaults to stdout")
	flagTail := flag.Bool("tail", false, "don't exit on EOF")
	flagOffset := flag.Int64("offset", 0, "starting offset for the input file in bytes")
	flagMaxMessageSize := flag.Uint64("max-message-size", 4*1024*1024, "maximum message size in bytes")
	flag.Parse()

	if flag.NArg() != 1 {
		flag.PrintDefaults()
		os.Exit(1)
	}

	if *flagMaxMessageSize < math.MaxUint32 {
		maxSize := uint32(*flagMaxMessageSize)
		message.SetMaxMessageSize(maxSize)
	} else {
		fmt.Printf("Message size is too large: %d\n", flagMaxMessageSize)
		os.Exit(8)
	}

	var err error
	var match *message.MatcherSpecification
	if match, err = message.CreateMatcherSpecification(*flagMatch); err != nil {
		fmt.Printf("Match specification - %s\n", err)
		os.Exit(2)
	}

	var file *os.File
	if file, err = os.Open(flag.Arg(0)); err != nil {
		fmt.Printf("%s\n", err)
		os.Exit(3)
	}
	defer file.Close()

	var out *os.File
	if "" == *flagOutput {
		out = os.Stdout
	} else {
		if out, err = os.OpenFile(*flagOutput, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644); err != nil {
			fmt.Printf("%s\n", err)
			os.Exit(4)
		}
		defer out.Close()
	}

	var offset int64
	if offset, err = file.Seek(*flagOffset, 0); err != nil {
		fmt.Printf("%s\n", err)
		os.Exit(5)
	}

	sRunner, err := makeSplitterRunner()
	if err != nil {
		fmt.Println(err)
		os.Exit(7)
	}
	msg := new(message.Message)
	var processed, matched int64

	fmt.Printf("Input:%s  Offset:%d  Match:%s  Format:%s  Tail:%t  Output:%s\n",
		flag.Arg(0), *flagOffset, *flagMatch, *flagFormat, *flagTail, *flagOutput)
	for true {
		n, record, err := sRunner.GetRecordFromStream(file)
		if n > 0 && n != len(record) {
			fmt.Printf("Corruption detected at offset: %d bytes: %d\n", offset, n-len(record))
		}
		if err != nil {
			if err == io.EOF {
				if !*flagTail || "count" == *flagFormat {
					break
				}
				time.Sleep(time.Duration(500) * time.Millisecond)
			} else {
				break
			}
		} else {
			if len(record) > 0 {
				processed += 1
				headerLen := int(record[1]) + message.HEADER_FRAMING_SIZE
				if err = proto.Unmarshal(record[headerLen:], msg); err != nil {
					fmt.Printf("Error unmarshalling message at offset: %d error: %s\n", offset, err)
					continue
				}

				if !match.Match(msg) {
					continue
				}
				matched += 1

				switch *flagFormat {
				case "count":
					// no op
				case "json":
					contents, _ := json.Marshal(msg)
					fmt.Fprintf(out, "%s\n", contents)
				case "heka":
					fmt.Fprintf(out, "%s", record)
				default:
					fmt.Fprintf(out, "Timestamp: %s\n"+
						"Type: %s\n"+
						"Hostname: %s\n"+
						"Pid: %d\n"+
						"UUID: %s\n"+
						"Logger: %s\n"+
						"Payload: %s\n"+
						"EnvVersion: %s\n"+
						"Severity: %d\n"+
						"Fields: %+v\n\n",
						time.Unix(0, msg.GetTimestamp()), msg.GetType(),
						msg.GetHostname(), msg.GetPid(), msg.GetUuidString(),
						msg.GetLogger(), msg.GetPayload(), msg.GetEnvVersion(),
						msg.GetSeverity(), msg.Fields)
				}
			}
		}
		offset += int64(n)
	}
	fmt.Printf("Processed: %d, matched: %d messages\n", processed, matched)
	if err != nil {
		fmt.Printf("%s\n", err)
		os.Exit(6)
	}
}
Example #9
0
func EncoderSpec(c gs.Context) {
	t := new(ts.SimpleT)
	ctrl := gomock.NewController(t)
	defer ctrl.Finish()

	// NewPipelineConfig sets up Globals which is needed for the
	// pipeline.Prepend*Dir functions to not die during plugin Init().
	_ = pipeline.NewPipelineConfig(nil)

	c.Specify("A SandboxEncoder", func() {

		encoder := new(SandboxEncoder)
		conf := encoder.ConfigStruct().(*SandboxEncoderConfig)
		supply := make(chan *pipeline.PipelinePack, 1)
		pack := pipeline.NewPipelinePack(supply)
		pack.Message.SetPayload("original")
		pack.Message.SetType("my_type")
		pack.Message.SetPid(12345)
		pack.Message.SetSeverity(4)
		pack.Message.SetHostname("hostname")
		pack.Message.SetTimestamp(54321)
		pack.Message.SetUuid(uuid.NewRandom())
		var (
			result []byte
			err    error
		)

		c.Specify("emits JSON correctly", func() {
			conf.ScriptFilename = "../lua/testsupport/encoder_json.lua"
			err = encoder.Init(conf)
			c.Expect(err, gs.IsNil)

			result, err = encoder.Encode(pack)
			c.Expect(err, gs.IsNil)
			msg := new(message.Message)
			err = json.Unmarshal(result, msg)
			c.Expect(err, gs.IsNil)
			c.Expect(msg.GetTimestamp(), gs.Equals, int64(54321))
			c.Expect(msg.GetPid(), gs.Equals, int32(12345))
			c.Expect(msg.GetSeverity(), gs.Equals, int32(4))
			c.Expect(msg.GetHostname(), gs.Equals, "hostname")
			c.Expect(msg.GetPayload(), gs.Equals, "original")
			c.Expect(msg.GetType(), gs.Equals, "my_type")
		})

		c.Specify("emits text correctly", func() {
			conf.ScriptFilename = "../lua/testsupport/encoder_text.lua"
			err = encoder.Init(conf)
			c.Expect(err, gs.IsNil)

			result, err = encoder.Encode(pack)
			c.Expect(err, gs.IsNil)
			c.Expect(string(result), gs.Equals, "Prefixed original")
		})

		c.Specify("emits protobuf correctly", func() {

			c.Specify("when inject_message is used", func() {
				conf.ScriptFilename = "../lua/testsupport/encoder_protobuf.lua"
				err = encoder.Init(conf)
				c.Expect(err, gs.IsNil)

				result, err = encoder.Encode(pack)
				c.Expect(err, gs.IsNil)

				msg := new(message.Message)
				err = proto.Unmarshal(result, msg)
				c.Expect(err, gs.IsNil)
				c.Expect(msg.GetTimestamp(), gs.Equals, int64(54321))
				c.Expect(msg.GetPid(), gs.Equals, int32(12345))
				c.Expect(msg.GetSeverity(), gs.Equals, int32(4))
				c.Expect(msg.GetHostname(), gs.Equals, "hostname")
				c.Expect(msg.GetPayload(), gs.Equals, "mutated")
				c.Expect(msg.GetType(), gs.Equals, "after")
			})

			c.Specify("when `write_message` is used", func() {
				conf.ScriptFilename = "../lua/testsupport/encoder_writemessage.lua"
				err = encoder.Init(conf)
				c.Expect(err, gs.IsNil)

				result, err = encoder.Encode(pack)
				c.Expect(err, gs.IsNil)

				msg := new(message.Message)
				err = proto.Unmarshal(result, msg)
				c.Expect(err, gs.IsNil)
				c.Expect(msg.GetPayload(), gs.Equals, "mutated payload")
				c.Expect(pack.Message.GetPayload(), gs.Equals, "original")
			})
		})

	})
}
Example #10
0
func EncoderSpec(c gs.Context) {
	t := new(ts.SimpleT)
	ctrl := gomock.NewController(t)
	defer ctrl.Finish()

	// NewPipelineConfig sets up Globals which is needed for the
	// pipeline.Prepend*Dir functions to not die during plugin Init().
	pConfig := pipeline.NewPipelineConfig(nil)

	c.Specify("A SandboxEncoder", func() {

		encoder := new(SandboxEncoder)
		encoder.SetPipelineConfig(pConfig)
		conf := encoder.ConfigStruct().(*SandboxEncoderConfig)
		supply := make(chan *pipeline.PipelinePack, 1)
		pack := pipeline.NewPipelinePack(supply)
		pack.Message.SetPayload("original")
		pack.Message.SetType("my_type")
		pack.Message.SetPid(12345)
		pack.Message.SetSeverity(4)
		pack.Message.SetHostname("hostname")
		pack.Message.SetTimestamp(54321)
		pack.Message.SetUuid(uuid.NewRandom())
		var (
			result []byte
			err    error
		)

		c.Specify("emits JSON correctly", func() {
			conf.ScriptFilename = "../lua/testsupport/encoder_json.lua"
			err = encoder.Init(conf)
			c.Expect(err, gs.IsNil)

			result, err = encoder.Encode(pack)
			c.Expect(err, gs.IsNil)
			msg := new(message.Message)
			err = json.Unmarshal(result, msg)
			c.Expect(err, gs.IsNil)
			c.Expect(msg.GetTimestamp(), gs.Equals, int64(54321))
			c.Expect(msg.GetPid(), gs.Equals, int32(12345))
			c.Expect(msg.GetSeverity(), gs.Equals, int32(4))
			c.Expect(msg.GetHostname(), gs.Equals, "hostname")
			c.Expect(msg.GetPayload(), gs.Equals, "original")
			c.Expect(msg.GetType(), gs.Equals, "my_type")
		})

		c.Specify("emits text correctly", func() {
			conf.ScriptFilename = "../lua/testsupport/encoder_text.lua"
			err = encoder.Init(conf)
			c.Expect(err, gs.IsNil)

			result, err = encoder.Encode(pack)
			c.Expect(err, gs.IsNil)
			c.Expect(string(result), gs.Equals, "Prefixed original")
		})

		c.Specify("emits protobuf correctly", func() {

			c.Specify("when inject_message is used", func() {
				conf.ScriptFilename = "../lua/testsupport/encoder_protobuf.lua"
				err = encoder.Init(conf)
				c.Expect(err, gs.IsNil)

				result, err = encoder.Encode(pack)
				c.Expect(err, gs.IsNil)

				msg := new(message.Message)
				err = proto.Unmarshal(result, msg)
				c.Expect(err, gs.IsNil)
				c.Expect(msg.GetTimestamp(), gs.Equals, int64(54321))
				c.Expect(msg.GetPid(), gs.Equals, int32(12345))
				c.Expect(msg.GetSeverity(), gs.Equals, int32(4))
				c.Expect(msg.GetHostname(), gs.Equals, "hostname")
				c.Expect(msg.GetPayload(), gs.Equals, "mutated")
				c.Expect(msg.GetType(), gs.Equals, "after")
			})

			c.Specify("when `write_message` is used", func() {
				conf.ScriptFilename = "../lua/testsupport/encoder_writemessage.lua"
				err = encoder.Init(conf)
				c.Expect(err, gs.IsNil)

				result, err = encoder.Encode(pack)
				c.Expect(err, gs.IsNil)

				msg := new(message.Message)
				err = proto.Unmarshal(result, msg)
				c.Expect(err, gs.IsNil)
				c.Expect(msg.GetPayload(), gs.Equals, "mutated payload")
				c.Expect(pack.Message.GetPayload(), gs.Equals, "original")
			})
		})
	})

	c.Specify("cbuf librato encoder", func() {
		encoder := new(SandboxEncoder)
		encoder.SetPipelineConfig(pConfig)
		conf := encoder.ConfigStruct().(*SandboxEncoderConfig)
		supply := make(chan *pipeline.PipelinePack, 1)
		pack := pipeline.NewPipelinePack(supply)
		pack.Message.SetType("my_type")
		pack.Message.SetPid(12345)
		pack.Message.SetSeverity(4)
		pack.Message.SetHostname("hostname")
		pack.Message.SetTimestamp(54321)
		pack.Message.SetUuid(uuid.NewRandom())
		var (
			result []byte
			err    error
		)
		conf.ScriptFilename = "../lua/encoders/cbuf_librato.lua"
		conf.ModuleDirectory = "../../../../../../modules"
		conf.Config = make(map[string]interface{})
		err = encoder.Init(conf)
		c.Assume(err, gs.IsNil)

		c.Specify("encodes cbuf data", func() {
			payload := `{"time":1410823460,"rows":5,"columns":5,"seconds_per_row":5,"column_info":[{"name":"HTTP_200","unit":"count","aggregation":"sum"},{"name":"HTTP_300","unit":"count","aggregation":"sum"},{"name":"HTTP_400","unit":"count","aggregation":"sum"},{"name":"HTTP_500","unit":"count","aggregation":"sum"},{"name":"HTTP_UNKNOWN","unit":"count","aggregation":"sum"}]}
1	2	3	4	5
6	7	8	9	10
11	12	13	14	15
16	17	18	19	20
21	22	23	24	25
`
			pack.Message.SetPayload(payload)
			result, err = encoder.Encode(pack)
			c.Expect(err, gs.IsNil)
			expected := `{"gauges":[{"value":1,"measure_time":1410823460,"name":"HTTP_200","source":"hostname"},{"value":2,"measure_time":1410823460,"name":"HTTP_300","source":"hostname"},{"value":3,"measure_time":1410823460,"name":"HTTP_400","source":"hostname"},{"value":4,"measure_time":1410823460,"name":"HTTP_500","source":"hostname"},{"value":5,"measure_time":1410823460,"name":"HTTP_UNKNOWN","source":"hostname"},{"value":6,"measure_time":1410823465,"name":"HTTP_200","source":"hostname"},{"value":7,"measure_time":1410823465,"name":"HTTP_300","source":"hostname"},{"value":8,"measure_time":1410823465,"name":"HTTP_400","source":"hostname"},{"value":9,"measure_time":1410823465,"name":"HTTP_500","source":"hostname"},{"value":10,"measure_time":1410823465,"name":"HTTP_UNKNOWN","source":"hostname"},{"value":11,"measure_time":1410823470,"name":"HTTP_200","source":"hostname"},{"value":12,"measure_time":1410823470,"name":"HTTP_300","source":"hostname"},{"value":13,"measure_time":1410823470,"name":"HTTP_400","source":"hostname"},{"value":14,"measure_time":1410823470,"name":"HTTP_500","source":"hostname"},{"value":15,"measure_time":1410823470,"name":"HTTP_UNKNOWN","source":"hostname"},{"value":16,"measure_time":1410823475,"name":"HTTP_200","source":"hostname"},{"value":17,"measure_time":1410823475,"name":"HTTP_300","source":"hostname"},{"value":18,"measure_time":1410823475,"name":"HTTP_400","source":"hostname"},{"value":19,"measure_time":1410823475,"name":"HTTP_500","source":"hostname"},{"value":20,"measure_time":1410823475,"name":"HTTP_UNKNOWN","source":"hostname"}]}`
			c.Expect(string(result), gs.Equals, expected)

			c.Specify("and correctly advances", func() {
				payload := `{"time":1410823475,"rows":5,"columns":5,"seconds_per_row":5,"column_info":[{"name":"HTTP_200","unit":"count","aggregation":"sum"},{"name":"HTTP_300","unit":"count","aggregation":"sum"},{"name":"HTTP_400","unit":"count","aggregation":"sum"},{"name":"HTTP_500","unit":"count","aggregation":"sum"},{"name":"HTTP_UNKNOWN","unit":"count","aggregation":"sum"}]}
16	17	18	19	20
21	22	23	24	25
1	2	3	4	5
6	nan	8	nan	10
5	4	3	2	1
`
				pack.Message.SetPayload(payload)
				result, err = encoder.Encode(pack)
				c.Expect(err, gs.IsNil)
				expected := `{"gauges":[{"value":21,"measure_time":1410823480,"name":"HTTP_200","source":"hostname"},{"value":22,"measure_time":1410823480,"name":"HTTP_300","source":"hostname"},{"value":23,"measure_time":1410823480,"name":"HTTP_400","source":"hostname"},{"value":24,"measure_time":1410823480,"name":"HTTP_500","source":"hostname"},{"value":25,"measure_time":1410823480,"name":"HTTP_UNKNOWN","source":"hostname"},{"value":1,"measure_time":1410823485,"name":"HTTP_200","source":"hostname"},{"value":2,"measure_time":1410823485,"name":"HTTP_300","source":"hostname"},{"value":3,"measure_time":1410823485,"name":"HTTP_400","source":"hostname"},{"value":4,"measure_time":1410823485,"name":"HTTP_500","source":"hostname"},{"value":5,"measure_time":1410823485,"name":"HTTP_UNKNOWN","source":"hostname"},{"value":6,"measure_time":1410823490,"name":"HTTP_200","source":"hostname"},{"value":8,"measure_time":1410823490,"name":"HTTP_400","source":"hostname"},{"value":10,"measure_time":1410823490,"name":"HTTP_UNKNOWN","source":"hostname"}]}`
				c.Expect(string(result), gs.Equals, expected)
			})
		})
	})

	c.Specify("schema influx encoder", func() {
		encoder := new(SandboxEncoder)
		encoder.SetPipelineConfig(pConfig)
		conf := encoder.ConfigStruct().(*SandboxEncoderConfig)
		supply := make(chan *pipeline.PipelinePack, 1)
		pack := pipeline.NewPipelinePack(supply)
		pack.Message.SetType("my_type")
		pack.Message.SetPid(12345)
		pack.Message.SetSeverity(4)
		pack.Message.SetHostname("hostname")
		pack.Message.SetTimestamp(54321 * 1e9)
		pack.Message.SetLogger("Logger")
		pack.Message.SetPayload("Payload value lorem ipsum")

		f, err := message.NewField("intField", 123, "")
		c.Assume(err, gs.IsNil)
		err = f.AddValue(456)
		c.Assume(err, gs.IsNil)
		pack.Message.AddField(f)

		f, err = message.NewField("strField", "0_first", "")
		c.Assume(err, gs.IsNil)
		err = f.AddValue("0_second")
		c.Assume(err, gs.IsNil)
		pack.Message.AddField(f)

		f, err = message.NewField("strField", "1_first", "")
		c.Assume(err, gs.IsNil)
		err = f.AddValue("1_second")
		c.Assume(err, gs.IsNil)
		pack.Message.AddField(f)

		f, err = message.NewField("byteField", []byte("first"), "")
		c.Assume(err, gs.IsNil)
		err = f.AddValue([]byte("second"))
		c.Assume(err, gs.IsNil)
		pack.Message.AddField(f)

		conf.ScriptFilename = "../lua/encoders/schema_influx.lua"
		conf.ModuleDirectory = "../../../../../../modules"
		conf.Config = make(map[string]interface{})

		c.Specify("encodes a basic message", func() {
			err = encoder.Init(conf)
			c.Assume(err, gs.IsNil)
			result, err := encoder.Encode(pack)
			c.Expect(err, gs.IsNil)
			expected := `[{"points":[[54321000,"my_type","Payload value lorem ipsum","hostname",12345,"Logger",4,"",[123,456],["0_first","0_second"],["1_first","1_second"]]],"name":"series","columns":["time","Type","Payload","Hostname","Pid","Logger","Severity","EnvVersion","intField","strField","strField2"]}]`
			c.Expect(string(result), gs.Equals, expected)
		})

		c.Specify("interpolates series name correctly", func() {
			conf.Config["series"] = "series.%{Pid}.%{Type}.%{strField}.%{intField}"
			err = encoder.Init(conf)
			c.Assume(err, gs.IsNil)
			result, err := encoder.Encode(pack)
			c.Expect(err, gs.IsNil)
			expected := `[{"points":[[54321000,"my_type","Payload value lorem ipsum","hostname",12345,"Logger",4,"",[123,456],["0_first","0_second"],["1_first","1_second"]]],"name":"series.12345.my_type.0_first.123","columns":["time","Type","Payload","Hostname","Pid","Logger","Severity","EnvVersion","intField","strField","strField2"]}]`
			c.Expect(string(result), gs.Equals, expected)
		})

		c.Specify("skips specified correctly", func() {
			conf.Config["skip_fields"] = "Payload strField Type"
			err = encoder.Init(conf)
			c.Assume(err, gs.IsNil)
			result, err := encoder.Encode(pack)
			c.Expect(err, gs.IsNil)
			expected := `[{"points":[[54321000,"hostname",12345,"Logger",4,"",[123,456]]],"name":"series","columns":["time","Hostname","Pid","Logger","Severity","EnvVersion","intField"]}]`
			c.Expect(string(result), gs.Equals, expected)
		})
	})
}
Example #11
0
func TestInjectMessage(t *testing.T) {
	var sbc SandboxConfig
	tests := []string{
		"lua types",
		"cloudwatch metric",
		"external reference",
		"array only",
		"private keys",
		"special characters",
		"message field all types",
		"internal reference",
	}
	outputs := []string{
		`{"value":1}1.2 string nil true false`,
		`{"StatisticValues":[{"Minimum":0,"SampleCount":0,"Sum":0,"Maximum":0},{"Minimum":0,"SampleCount":0,"Sum":0,"Maximum":0}],"Dimensions":[{"Name":"d1","Value":"v1"},{"Name":"d2","Value":"v2"}],"MetricName":"example","Timestamp":0,"Value":0,"Unit":"s"}`,
		`{"a":{"y":2,"x":1}}`,
		`[1,2,3]`,
		`{"x":1,"_m":1,"_private":[1,2]}`,
		`{"special\tcharacters":"\"\t\r\n\b\f\\\/"}`,
		"\x10\x80\x94\xeb\xdc\x03\x52\x13\x0a\x06\x6e\x75\x6d\x62\x65\x72\x10\x03\x39\x00\x00\x00\x00\x00\x00\xf0\x3f\x52\x2c\x0a\x07\x6e\x75\x6d\x62\x65\x72\x73\x10\x03\x1a\x05\x63\x6f\x75\x6e\x74\x3a\x18\x00\x00\x00\x00\x00\x00\xf0\x3f\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x08\x40\x52\x0e\x0a\x05\x62\x6f\x6f\x6c\x73\x10\x04\x42\x03\x01\x00\x00\x52\x0a\x0a\x04\x62\x6f\x6f\x6c\x10\x04\x40\x01\x52\x10\x0a\x06\x73\x74\x72\x69\x6e\x67\x22\x06\x73\x74\x72\x69\x6e\x67\x52\x15\x0a\x07\x73\x74\x72\x69\x6e\x67\x73\x22\x02\x73\x31\x22\x02\x73\x32\x22\x02\x73\x33",
		`{"y":[2],"x":[1,2,3],"ir":[1,2,3]}`,
	}
	if false { // lua jit values
		outputs[1] = `{"Timestamp":0,"Value":0,"StatisticValues":[{"SampleCount":0,"Sum":0,"Maximum":0,"Minimum":0},{"SampleCount":0,"Sum":0,"Maximum":0,"Minimum":0}],"Unit":"s","MetricName":"example","Dimensions":[{"Name":"d1","Value":"v1"},{"Name":"d2","Value":"v2"}]}`
	}

	sbc.ScriptFilename = "./testsupport/inject_message.lua"
	sbc.MemoryLimit = 100000
	sbc.InstructionLimit = 1000
	sbc.OutputLimit = 8000
	pack := getTestPack()
	sb, err := lua.CreateLuaSandbox(&sbc)
	if err != nil {
		t.Errorf("%s", err)
	}
	err = sb.Init("", "")
	if err != nil {
		t.Errorf("%s", err)
	}
	cnt := 0
	sb.InjectMessage(func(p, pt, pn string) int {
		if len(pt) == 0 { // no type is a Heka protobuf message
			if p[18:] != outputs[cnt] { // ignore the UUID
				t.Errorf("Output is incorrect, expected: \"%x\" received: \"%x\"", outputs[cnt], p[18:])
			}
		} else {
			if p != outputs[cnt] {
				t.Errorf("Output is incorrect, expected: \"%s\" received: \"%s\"", outputs[cnt], p)
			}
		}
		if cnt == 6 {
			msg := new(message.Message)
			err := proto.Unmarshal([]byte(p), msg)
			if err != nil {
				t.Errorf("%s", err)
			}
			if msg.GetTimestamp() != 1e9 {
				t.Errorf("Timestamp expected %d received %d", int(1e9), pack.Message.GetTimestamp())
			}
			if field := msg.FindFirstField("numbers"); field != nil {
				if field.GetRepresentation() != "count" {
					t.Errorf("'numbers' representation expected count received %s", 1e9, field.GetRepresentation())
				}
			} else {
				t.Errorf("'numbers' field not found")
			}
			tests := []string{
				"Timestamp == 1000000000",
				"Fields[number] == 1",
				"Fields[numbers][0][0] == 1 && Fields[numbers][0][1] == 2 && Fields[numbers][0][2] == 3",
				"Fields[string] == 'string'",
				"Fields[strings][0][0] == 's1' && Fields[strings][0][1] == 's2' && Fields[strings][0][2] == 's3'",
				"Fields[bool] == TRUE",
				"Fields[bools][0][0] == TRUE && Fields[bools][0][1] == FALSE && Fields[bools][0][2] == FALSE",
			}
			for _, v := range tests {
				ms, _ := message.CreateMatcherSpecification(v)
				match := ms.Match(msg)
				if !match {
					t.Errorf("Test failed %s", v)
				}
			}
		}
		cnt++
		return 0
	})

	for _, v := range tests {
		pack.Message.SetPayload(v)
		r := sb.ProcessMessage(pack)
		if r != 0 {
			t.Errorf("ProcessMessage should return 0, received %d %s", r, sb.LastError())
		}
	}
	sb.Destroy("")
	if cnt != len(tests) {
		t.Errorf("InjectMessage was called %d times, expected %d", cnt, len(tests))
	}
}
Example #12
0
func (self *DashboardOutput) Run(or OutputRunner, h PluginHelper) (err error) {
	inChan := or.InChan()
	ticker := or.Ticker()

	var (
		ok   = true
		pack *PipelinePack
		msg  *message.Message
	)

	reNotWord, _ := regexp.Compile("\\W")
	for ok {
		select {
		case pack, ok = <-inChan:
			if !ok {
				break
			}
			msg = pack.Message
			switch msg.GetType() {
			case "heka.all-report":
				fn := path.Join(self.workingDirectory, "heka_report.json")
				createPluginPages(self.workingDirectory, msg.GetPayload())
				overwriteFile(fn, msg.GetPayload())
			case "heka.sandbox-output":
				tmp, _ := msg.GetFieldValue("payload_type")
				if payloadType, ok := tmp.(string); ok {
					var payloadName, nameExt string
					tmp, _ := msg.GetFieldValue("payload_name")
					if payloadName, ok = tmp.(string); ok {
						nameExt = reNotWord.ReplaceAllString(payloadName, "")
					}
					if len(nameExt) > 64 {
						nameExt = nameExt[:64]
					}
					nameExt = "." + nameExt

					payloadType = reNotWord.ReplaceAllString(payloadType, "")
					fn := msg.GetLogger() + nameExt + "." + payloadType
					ofn := path.Join(self.workingDirectory, fn)
					if payloadType == "cbuf" {
						html := msg.GetLogger() + nameExt + ".html"
						ohtml := path.Join(self.workingDirectory, html)
						_, err := os.Stat(ohtml)
						if err != nil {
							overwriteFile(ohtml, fmt.Sprintf(getCbufTemplate(),
								msg.GetLogger(),
								payloadName,
								fn))
						}
						overwriteFile(ofn, msg.GetPayload())
						updatePluginMetadata(self.workingDirectory, msg.GetLogger(), html, payloadName)
					} else {
						overwriteFile(ofn, msg.GetPayload())
						updatePluginMetadata(self.workingDirectory, msg.GetLogger(), fn, payloadName)
					}
				}
			case "heka.sandbox-terminated":
				fn := path.Join(self.workingDirectory, "heka_sandbox_termination.tsv")
				if file, err := os.OpenFile(fn, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0644); err == nil {
					var line string
					if _, ok := msg.GetFieldValue("ProcessMessageCount"); !ok {
						line = fmt.Sprintf("%d\t%s\t%v\n", msg.GetTimestamp()/1e9, msg.GetLogger(), msg.GetPayload())
					} else {
						pmc, _ := msg.GetFieldValue("ProcessMessageCount")
						pms, _ := msg.GetFieldValue("ProcessMessageSamples")
						pmd, _ := msg.GetFieldValue("ProcessMessageAvgDuration")
						ms, _ := msg.GetFieldValue("MatchSamples")
						mad, _ := msg.GetFieldValue("MatchAvgDuration")
						fcl, _ := msg.GetFieldValue("FilterChanLength")
						mcl, _ := msg.GetFieldValue("MatchChanLength")
						rcl, _ := msg.GetFieldValue("RouterChanLength")
						line = fmt.Sprintf("%d\t%s\t%v"+
							" ProcessMessageCount:%v"+
							" ProcessMessageSamples:%v"+
							" ProcessMessageAvgDuration:%v"+
							" MatchSamples:%v"+
							" MatchAvgDuration:%v"+
							" FilterChanLength:%v"+
							" MatchChanLength:%v"+
							" RouterChanLength:%v\n",
							msg.GetTimestamp()/1e9,
							msg.GetLogger(), msg.GetPayload(), pmc, pms, pmd,
							ms, mad, fcl, mcl, rcl)
					}
					file.WriteString(line)
					file.Close()
				}
			}
			pack.Recycle()
		case <-ticker:
			go h.PipelineConfig().allReportsMsg()
		}
	}
	return
}
Example #13
0
// Save matching client records locally to the given output file in the given
// format.
func save(recordChannel <-chan s3splitfile.S3Record, match *message.MatcherSpecification, format string, out *os.File, done chan<- int) {
	processed := 0
	matched := 0
	bytes := 0
	msg := new(message.Message)
	ok := true
	for ok {
		r, ok := <-recordChannel
		if !ok {
			// Channel is closed
			done <- bytes
			break
		}

		bytes += len(r.Record)

		processed += 1
		headerLen := int(r.Record[1]) + message.HEADER_FRAMING_SIZE
		messageBytes := r.Record[headerLen:]
		unsnappy, decodeErr := snappy.Decode(nil, messageBytes)
		if decodeErr == nil {
			messageBytes = unsnappy
		}
		if err := proto.Unmarshal(messageBytes, msg); err != nil {
			fmt.Fprintf(os.Stderr, "Error unmarshalling message %d in %s, error: %s\n", processed, r.Key, err)
			continue
		}

		if !match.Match(msg) {
			continue
		}

		matched += 1

		switch format {
		case "count":
			// no op
		case "json":
			contents, _ := json.Marshal(msg)
			fmt.Fprintf(out, "%s\n", contents)
		case "heka":
			fmt.Fprintf(out, "%s", r.Record)
		case "offsets":
			// Use offsets mode for indexing the S3 files by clientId
			clientId, ok := msg.GetFieldValue("clientId")
			recordLength := len(r.Record) - headerLen
			if ok {
				fmt.Fprintf(out, "%s\t%s\t%d\t%d\n", r.Key, clientId, (r.Offset + uint64(headerLen)), recordLength)
			} else {
				fmt.Fprintf(os.Stderr, "Missing client id in %s @ %d+%d\n", r.Key, r.Offset, recordLength)
			}
		default:
			fmt.Fprintf(out, "Timestamp: %s\n"+
				"Type: %s\n"+
				"Hostname: %s\n"+
				"Pid: %d\n"+
				"UUID: %s\n"+
				"Logger: %s\n"+
				"Payload: %s\n"+
				"EnvVersion: %s\n"+
				"Severity: %d\n"+
				"Fields: %+v\n\n",
				time.Unix(0, msg.GetTimestamp()), msg.GetType(),
				msg.GetHostname(), msg.GetPid(), msg.GetUuidString(),
				msg.GetLogger(), msg.GetPayload(), msg.GetEnvVersion(),
				msg.GetSeverity(), msg.Fields)
		}
	}
	fmt.Fprintf(os.Stderr, "Processed: %d, matched: %d messages (%.2f MB)\n", processed, matched, (float64(bytes) / 1024.0 / 1024.0))
}