func InsensitiveDecodeSpec(c gs.Context) { tme, err := time.Parse(time.RFC3339, time.RFC3339[:len(time.RFC3339)-5]) if err != nil { panic(err) } expected := Insensitive{ TopString: "string", TopInt: 1, TopFloat: 1.1, TopBool: true, TopDate: tme, TopArray: []string{"array"}, MatcH: "i should be in MatcH only", Match: "i should be in Match only", Field: "neat", // encoding/json would store "messy" here Once: "just once", OncE: "just once", // wait, what? Nest: InsensitiveNest{ Ed: InsensitiveEd{NestedString: "another string"}, }, } var got Insensitive _, err = Decode(caseToml, &got) c.Assume(err, gs.IsNil) c.Assume(reflect.DeepEqual(expected, got), gs.IsTrue) }
func StatsdInputSpec(c gs.Context) { t := &pipeline_ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() pConfig := NewPipelineConfig(nil) ith := new(plugins_ts.InputTestHelper) ith.Msg = pipeline_ts.GetTestMessage() ith.Pack = NewPipelinePack(pConfig.InputRecycleChan()) ith.PackSupply = make(chan *PipelinePack, 1) // Specify localhost, but we're not really going to use the network ith.AddrStr = "localhost:55565" ith.ResolvedAddrStr = "127.0.0.1:55565" // set up mock helper, input runner, and stat accumulator ith.MockHelper = NewMockPluginHelper(ctrl) ith.MockInputRunner = NewMockInputRunner(ctrl) mockStatAccum := NewMockStatAccumulator(ctrl) c.Specify("A StatsdInput", func() { statsdInput := StatsdInput{} config := statsdInput.ConfigStruct().(*StatsdInputConfig) config.Address = ith.AddrStr err := statsdInput.Init(config) c.Assume(err, gs.IsNil) realListener := statsdInput.listener c.Expect(realListener.LocalAddr().String(), gs.Equals, ith.ResolvedAddrStr) realListener.Close() mockListener := pipeline_ts.NewMockConn(ctrl) statsdInput.listener = mockListener ith.MockHelper.EXPECT().StatAccumulator("StatAccumInput").Return(mockStatAccum, nil) mockListener.EXPECT().Close() mockListener.EXPECT().SetReadDeadline(gomock.Any()) c.Specify("sends a Stat to the StatAccumulator", func() { statName := "sample.count" statVal := 303 msg := fmt.Sprintf("%s:%d|c\n", statName, statVal) expected := Stat{statName, strconv.Itoa(statVal), "c", float32(1)} mockStatAccum.EXPECT().DropStat(expected).Return(true) readCall := mockListener.EXPECT().Read(make([]byte, 512)) readCall.Return(len(msg), nil) readCall.Do(func(msgBytes []byte) { copy(msgBytes, []byte(msg)) statsdInput.Stop() }) var wg sync.WaitGroup wg.Add(1) go func() { err = statsdInput.Run(ith.MockInputRunner, ith.MockHelper) c.Expect(err, gs.IsNil) wg.Done() }() wg.Wait() }) }) }
func TcpInputSpecFailure(c gs.Context) { tcpInput := TcpInput{} err := tcpInput.Init(&TcpInputConfig{Net: "udp", Address: "localhost:55565", Decoder: "ProtobufDecoder", ParserType: "message.proto"}) c.Assume(err, gs.Not(gs.IsNil)) c.Assume(err.Error(), gs.Equals, "ListenTCP failed: unknown network udp\n") }
func UdpInputSpecFailure(c gs.Context) { udpInput := UdpInput{} err := udpInput.Init(&UdpInputConfig{Net: "tcp", Address: "localhost:55565", Decoder: "ProtobufDecoder", ParserType: "message.proto"}) c.Assume(err, gs.Not(gs.IsNil)) c.Assume(err.Error(), gs.Equals, "ResolveUDPAddr failed: unknown network tcp\n") }
func UdpInputSpecFailure(c gs.Context) { udpInput := UdpInput{} err := udpInput.Init(&UdpInputConfig{ Net: "tcp", Address: "localhost:55565", }) c.Assume(err, gs.Not(gs.IsNil)) c.Assume(err.Error(), gs.Equals, "ResolveUDPAddr failed: unknown network tcp\n") }
func TcpInputSpecFailure(c gs.Context) { tcpInput := TcpInput{} err := tcpInput.Init(&TcpInputConfig{ Net: "udp", Address: "localhost:55565", }) c.Assume(err, gs.Not(gs.IsNil)) c.Assume(err.Error(), gs.Equals, "ResolveTCPAddress failed: unknown network udp\n") }
func GeoIpDecoderSpec(c gs.Context) { t := &ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() pConfig := NewPipelineConfig(nil) pConfig.Globals.ShareDir = "/foo/bar/baz" c.Specify("A GeoIpDecoder", func() { decoder := new(GeoIpDecoder) decoder.SetPipelineConfig(pConfig) rec := new(geoip.GeoIPRecord) conf := decoder.ConfigStruct().(*GeoIpDecoderConfig) c.Expect(conf.DatabaseFile, gs.Equals, "/foo/bar/baz/GeoLiteCity.dat") supply := make(chan *PipelinePack, 1) pack := NewPipelinePack(supply) nf, _ := message.NewField("remote_host", "74.125.142.147", "") pack.Message.AddField(nf) decoder.SourceIpField = "remote_host" conf.SourceIpField = "remote_host" decoder.Init(conf) rec.CountryCode = "US" rec.CountryCode3 = "USA" rec.CountryName = "United States" rec.Region = "CA" rec.City = "Mountain View" rec.PostalCode = "94043" rec.Latitude = 37.4192 rec.Longitude = -122.0574 rec.AreaCode = 650 rec.CharSet = 1 rec.ContinentCode = "NA" c.Specify("Test GeoIpDecoder Output", func() { buf := decoder.GeoBuff(rec) nf, _ = message.NewField("geoip", buf.Bytes(), "") pack.Message.AddField(nf) b, ok := pack.Message.GetFieldValue("geoip") c.Expect(ok, gs.IsTrue) c.Expect(string(b.([]byte)), gs.Equals, `{"latitude":37.4192008972168,"longitude":-122.0574035644531,"location":[-122.0574035644531,37.4192008972168],"coordinates":["-122.0574035644531","37.4192008972168"],"countrycode":"US","countrycode3":"USA","countryname":"United States","region":"CA","city":"Mountain View","postalcode":"94043","areacode":650,"charset":1,"continentcode":"NA"}`) }) }) }
func DecodeSpec(c gs.Context) { var val simple md, err := Decode(testSimple, &val) c.Assume(err, gs.IsNil) c.Assume(md.IsDefined("Annoying", "Cats", "plato"), gs.IsTrue) c.Assume(md.IsDefined("Cats", "Stinky"), gs.IsFalse) var colors = [][]string{[]string{"red", "green", "blue"}, []string{"cyan", "magenta", "yellow", "black"}} for ridx, row := range colors { for cidx, _ := range row { c.Assume(val.Colors[ridx][cidx], gs.Equals, colors[ridx][cidx]) } } c.Assume(val, gs.Not(gs.IsNil)) }
func check(c gs.Context, in, out string) (err error) { tmpl := fmt.Sprintf("<%d>%%s %%s syslog_test[%%d]: %s\n", syslog.LOG_USER+syslog.LOG_INFO, in) if hostname, err := os.Hostname(); err != nil { return errors.New("Error retrieving hostname") } else { var parsedHostname, timestamp string var pid int // The stdlib tests that hostname matches parsedHostname, we // don't bother if n, err := fmt.Sscanf(out, tmpl, ×tamp, &parsedHostname, &pid); n != 3 || err != nil || hostname != parsedHostname { return errors.New("Error extracting timestamp, parsedHostname, pid") } computed_in := fmt.Sprintf(tmpl, timestamp, parsedHostname, pid) c.Expect(computed_in, gs.Equals, out) } return nil }
func InputRunnerSpec(c gs.Context) { t := &ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() globals := &GlobalConfigStruct{ PluginChanSize: 5, } NewPipelineConfig(globals) mockHelper := NewMockPluginHelper(ctrl) c.Specify("Runner restarts a plugin on the first time only", func() { var pluginGlobals PluginGlobals pluginGlobals.Retries = RetryOptions{ MaxDelay: "1us", Delay: "1us", MaxJitter: "1us", MaxRetries: 1, } pc := new(PipelineConfig) pc.inputWrappers = make(map[string]*PluginWrapper) pw := &PluginWrapper{ Name: "stopping", ConfigCreator: func() interface{} { return nil }, PluginCreator: func() interface{} { return new(StoppingInput) }, } pc.inputWrappers["stopping"] = pw input := new(StoppingInput) iRunner := NewInputRunner("stopping", input, &pluginGlobals, false) var wg sync.WaitGroup cfgCall := mockHelper.EXPECT().PipelineConfig().Times(7) cfgCall.Return(pc) wg.Add(1) iRunner.Start(mockHelper, &wg) wg.Wait() c.Expect(stopinputTimes, gs.Equals, 2) }) }
func CheckTypeSpec(c gs.Context) { var err error var tomlBlob = ` ranking = ["Springsteen", "J Geils"] [bands.Springsteen] type = "ignore_this" started = 1973 albums = ["Greetings", "WIESS", "Born to Run", "Darkness"] not_albums = ["Greetings", "WIESS", "Born to Run", "Darkness"] [bands.J Geils] started = 1970 albums = ["The J. Geils Band", "Full House", "Blow Your Face Out"] ` type classics struct { Ranking []string Bands map[string]Primitive } c.Specify("check mapping", func() { // Do the initial decode. Reflection is delayed on Primitive values. var music classics var md MetaData md, err = Decode(tomlBlob, &music) c.Assume(err, gs.IsNil) empty_ignore := map[string]interface{}{} err = CheckType(md.mapping, music, empty_ignore) c.Assume(err, gs.IsNil) }) }
// decodeMessageAndVerifyOutput takes a decoder conf, message payload, and a fn -> the fn is a number of // assertions to verify that the message after decoding is as expected. func decodeMessageAndVerifyOutput(c gs.Context, conf *JsonDecoderConfig, payload string, fn packVerifier) { t := &pipeline_ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() // 1. Initialize test decoder decoder := new(JsonDecoder) err := decoder.Init(conf) c.Assume(err, gs.IsNil) dRunner := pipelinemock.NewMockDecoderRunner(ctrl) decoder.SetDecoderRunner(dRunner) // 2. Set payload to be tested, and decode it supply := make(chan *PipelinePack, 1) pack := NewPipelinePack(supply) pack.Message.SetPayload(payload) _, err = decoder.Decode(pack) // 3. Assert outcome of decoding fn(c, pack) pack.Zero() }
func DashboardOutputSpec(c gs.Context) { t := new(ts.SimpleT) ctrl := gomock.NewController(t) defer ctrl.Finish() c.Specify("A FileOutput", func() { dashboardOutput := new(DashboardOutput) config := dashboardOutput.ConfigStruct().(*DashboardOutputConfig) c.Specify("Init halts if basedirectory is not writable", func() { tmpdir := filepath.Join(os.TempDir(), "tmpdir") err := os.MkdirAll(tmpdir, 0400) c.Assume(err, gs.IsNil) config.WorkingDirectory = tmpdir err = dashboardOutput.Init(config) if runtime.GOOS == "windows" { c.Assume(err, gs.IsNil) } else { c.Assume(err, gs.Not(gs.IsNil)) } }) }) }
func DashboardOutputSpec(c gs.Context) { t := new(pipeline_ts.SimpleT) ctrl := gomock.NewController(t) defer ctrl.Finish() NewPipelineConfig(nil) // Needed for side effect of setting up Globals :P if runtime.GOOS != "windows" { c.Specify("A DashboardOutput", func() { dashboardOutput := new(DashboardOutput) config := dashboardOutput.ConfigStruct().(*DashboardOutputConfig) c.Specify("Init halts if basedirectory is not writable", func() { tmpdir := filepath.Join(os.TempDir(), "tmpdir") err := os.MkdirAll(tmpdir, 0400) c.Assume(err, gs.IsNil) config.WorkingDirectory = tmpdir err = dashboardOutput.Init(config) c.Assume(err, gs.Not(gs.IsNil)) }) }) } }
func UdpInputSpec(c gs.Context) { t := &pipeline_ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() config := NewPipelineConfig(nil) ith := new(plugins_ts.InputTestHelper) ith.Msg = pipeline_ts.GetTestMessage() ith.Pack = NewPipelinePack(config.InputRecycleChan()) ith.AddrStr = "localhost:55565" ith.ResolvedAddrStr = "127.0.0.1:55565" // set up mock helper, decoder set, and packSupply channel ith.MockHelper = pipelinemock.NewMockPluginHelper(ctrl) ith.MockInputRunner = pipelinemock.NewMockInputRunner(ctrl) ith.Decoder = pipelinemock.NewMockDecoderRunner(ctrl) ith.PackSupply = make(chan *PipelinePack, 1) ith.DecodeChan = make(chan *PipelinePack) c.Specify("A UdpInput", func() { udpInput := UdpInput{} err := udpInput.Init(&UdpInputConfig{Net: "udp", Address: ith.AddrStr, Decoder: "ProtobufDecoder", ParserType: "message.proto"}) c.Assume(err, gs.IsNil) realListener := (udpInput.listener).(*net.UDPConn) c.Expect(realListener.LocalAddr().String(), gs.Equals, ith.ResolvedAddrStr) mbytes, _ := proto.Marshal(ith.Msg) header := &message.Header{} header.SetMessageLength(uint32(len(mbytes))) mockDecoderRunner := ith.Decoder.(*pipelinemock.MockDecoderRunner) mockDecoderRunner.EXPECT().InChan().Return(ith.DecodeChan) ith.MockInputRunner.EXPECT().InChan().Return(ith.PackSupply) ith.MockInputRunner.EXPECT().Name().Return("UdpInput") encCall := ith.MockHelper.EXPECT().DecoderRunner("ProtobufDecoder", "UdpInput-ProtobufDecoder") encCall.Return(ith.Decoder, true) c.Specify("reads a message from the connection and passes it to the decoder", func() { hbytes, _ := proto.Marshal(header) go func() { udpInput.Run(ith.MockInputRunner, ith.MockHelper) }() conn, err := net.Dial("udp", ith.AddrStr) // a mock connection will not work here since the mock read cannot block c.Assume(err, gs.IsNil) buf := encodeMessage(hbytes, mbytes) _, err = conn.Write(buf) c.Assume(err, gs.IsNil) ith.PackSupply <- ith.Pack packRef := <-ith.DecodeChan udpInput.Stop() c.Expect(ith.Pack, gs.Equals, packRef) c.Expect(string(ith.Pack.MsgBytes), gs.Equals, string(mbytes)) c.Expect(ith.Pack.Decoded, gs.IsFalse) }) }) c.Specify("A UdpInput Multiline input", func() { ith.AddrStr = "localhost:55566" ith.ResolvedAddrStr = "127.0.0.1:55566" udpInput := UdpInput{} err := udpInput.Init(&UdpInputConfig{Net: "udp", Address: ith.AddrStr, Decoder: "test", ParserType: "token"}) c.Assume(err, gs.IsNil) realListener := (udpInput.listener).(*net.UDPConn) c.Expect(realListener.LocalAddr().String(), gs.Equals, ith.ResolvedAddrStr) mockDecoderRunner := ith.Decoder.(*pipelinemock.MockDecoderRunner) mockDecoderRunner.EXPECT().InChan().Return(ith.DecodeChan).Times(2) ith.MockInputRunner.EXPECT().InChan().Return(ith.PackSupply).Times(2) ith.MockInputRunner.EXPECT().Name().Return("UdpInput").AnyTimes() encCall := ith.MockHelper.EXPECT().DecoderRunner("test", "UdpInput-test") encCall.Return(ith.Decoder, true) c.Specify("reads two messages from a packet and passes them to the decoder", func() { go func() { udpInput.Run(ith.MockInputRunner, ith.MockHelper) }() conn, err := net.Dial("udp", ith.AddrStr) // a mock connection will not work here since the mock read cannot block c.Assume(err, gs.IsNil) _, err = conn.Write([]byte("message1\nmessage2\n")) c.Assume(err, gs.IsNil) ith.PackSupply <- ith.Pack packRef := <-ith.DecodeChan c.Expect(string(packRef.Message.GetPayload()), gs.Equals, "message1\n") ith.PackSupply <- ith.Pack packRef = <-ith.DecodeChan c.Expect(string(packRef.Message.GetPayload()), gs.Equals, "message2\n") udpInput.Stop() }) }) }
func HttpInputSpec(c gs.Context) { t := &ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() pConfig := pipeline.NewPipelineConfig(nil) jsonPost := `{"uuid": "xxBI3zyeXU+spG8Uiveumw==", "timestamp": 1372966886023588, "hostname": "Victors-MacBook-Air.local", "pid": 40183, "fields": [{"representation": "", "value_type": "STRING", "name": "cef_meta.syslog_priority", "value_string": [""]}, {"representation": "", "value_type": "STRING", "name": "cef_meta.syslog_ident", "value_string": [""]}, {"representation": "", "value_type": "STRING", "name": "cef_meta.syslog_facility", "value_string": [""]}, {"representation": "", "value_type": "STRING", "name": "cef_meta.syslog_options", "value_string": [""]}], "logger": "", "env_version": "0.8", "type": "cef", "payload": "Jul 04 15:41:26 Victors-MacBook-Air.local CEF:0|mozilla|weave|3|xx\\\\|x|xx\\\\|x|5|cs1Label=requestClientApplication cs1=MySuperBrowser requestMethod=GET request=/ src=127.0.0.1 dest=127.0.0.1 suser=none", "severity": 6}'` c.Specify("A HttpInput", func() { httpInput := pipeline.HttpInput{} c.Specify("honors time ticker to flush", func() { ith := new(pipeline.InputTestHelper) ith.MockHelper = NewMockPluginHelper(ctrl) ith.MockInputRunner = NewMockInputRunner(ctrl) startInput := func() { go func() { err := httpInput.Run(ith.MockInputRunner, ith.MockHelper) c.Expect(err, gs.IsNil) }() } ith.Pack = NewPipelinePack(pConfig.inputRecycleChan) ith.PackSupply = make(chan *PipelinePack, 1) ith.PackSupply <- ith.Pack ith.Decoders = make([]DecoderRunner, int(message.Header_JSON+1)) ith.Decoders[message.Header_JSON] = NewMockDecoderRunner(ctrl) ith.MockDecoderSet = NewMockDecoderSet(ctrl) // Spin up a http server server, err := ts.NewOneHttpServer(jsonPost, "localhost", 9876) c.Expect(err, gs.IsNil) go server.Start("/") time.Sleep(10 * time.Millisecond) config := httpInput.ConfigStruct().(*HttpInputConfig) config.DecoderName = "JsonDecoder" config.Url = "http://localhost:9876/" tickChan := make(chan time.Time) ith.MockInputRunner.EXPECT().LogMessage(gomock.Any()).Times(2) ith.MockHelper.EXPECT().DecoderSet().Return(ith.MockDecoderSet) ith.MockHelper.EXPECT().PipelineConfig().Return(pConfig) ith.MockInputRunner.EXPECT().InChan().Return(ith.PackSupply) ith.MockInputRunner.EXPECT().Ticker().Return(tickChan) mockDecoderRunner := ith.Decoders[message.Header_JSON].(*MockDecoderRunner) // Stub out the DecoderRunner input channel so that we can // inspect bytes later on dRunnerInChan := make(chan *PipelinePack, 1) mockDecoderRunner.EXPECT().InChan().Return(dRunnerInChan) dset := ith.MockDecoderSet.EXPECT().ByName("JsonDecoder") dset.Return(ith.Decoders[message.Header_JSON], true) err = httpInput.Init(config) c.Assume(err, gs.IsNil) startInput() tickChan <- time.Now() // We need for the pipeline to finish up time.Sleep(50 * time.Millisecond) }) c.Specify("short circuits packs into the router", func() { ith := new(InputTestHelper) ith.MockHelper = NewMockPluginHelper(ctrl) ith.MockInputRunner = NewMockInputRunner(ctrl) startInput := func() { go func() { err := httpInput.Run(ith.MockInputRunner, ith.MockHelper) c.Expect(err, gs.IsNil) }() } ith.Pack = NewPipelinePack(pConfig.inputRecycleChan) ith.PackSupply = make(chan *PipelinePack, 1) ith.PackSupply <- ith.Pack ith.Decoders = make([]DecoderRunner, int(message.Header_JSON+1)) ith.Decoders[message.Header_JSON] = NewMockDecoderRunner(ctrl) ith.MockDecoderSet = NewMockDecoderSet(ctrl) config := httpInput.ConfigStruct().(*HttpInputConfig) config.Url = "http://localhost:9876/" tickChan := make(chan time.Time) ith.MockInputRunner.EXPECT().LogMessage(gomock.Any()).Times(2) ith.MockHelper.EXPECT().DecoderSet().Return(ith.MockDecoderSet) ith.MockHelper.EXPECT().PipelineConfig().Return(pConfig) ith.MockInputRunner.EXPECT().InChan().Return(ith.PackSupply) ith.MockInputRunner.EXPECT().Ticker().Return(tickChan) err := httpInput.Init(config) c.Assume(err, gs.IsNil) startInput() tickChan <- time.Now() // We need for the pipeline to finish up time.Sleep(50 * time.Millisecond) }) }) }
func LoadFromConfigSpec(c gs.Context) { origGlobals := Globals origAvailablePlugins := make(map[string]func() interface{}) for k, v := range AvailablePlugins { origAvailablePlugins[k] = v } pipeConfig := NewPipelineConfig(nil) defer func() { Globals = origGlobals AvailablePlugins = origAvailablePlugins }() c.Assume(pipeConfig, gs.Not(gs.IsNil)) c.Specify("Config file loading", func() { c.Specify("works w/ good config file", func() { err := pipeConfig.LoadFromConfigFile("./testsupport/config_test.toml") c.Assume(err, gs.IsNil) // We use a set of Expect's rather than c.Specify because the // pipeConfig can't be re-loaded per child as gospec will do // since each one needs to bind to the same address // and the inputs section loads properly with a custom name udp, ok := pipeConfig.InputRunners["UdpInput"] c.Expect(ok, gs.Equals, true) // and the decoders sections load _, ok = pipeConfig.DecoderWrappers["JsonDecoder"] c.Expect(ok, gs.Equals, false) _, ok = pipeConfig.DecoderWrappers["ProtobufDecoder"] c.Expect(ok, gs.Equals, true) // and the outputs section loads _, ok = pipeConfig.OutputRunners["LogOutput"] c.Expect(ok, gs.Equals, true) // and the filters sections loads _, ok = pipeConfig.FilterRunners["sample"] c.Expect(ok, gs.Equals, true) // Shut down UdpInput to free up the port for future tests. udp.Input().Stop() }) c.Specify("works w/ decoder defaults", func() { err := pipeConfig.LoadFromConfigFile("./testsupport/config_test_defaults.toml") c.Assume(err, gs.Not(gs.IsNil)) // Only the ProtobufDecoder is loaded c.Expect(len(pipeConfig.DecoderWrappers), gs.Equals, 1) }) c.Specify("works w/ MultiDecoder", func() { err := pipeConfig.LoadFromConfigFile("./testsupport/config_test_multidecoder.toml") c.Assume(err, gs.IsNil) hasSyncDecoder := false // ProtobufDecoder will always be loaded c.Assume(len(pipeConfig.DecoderWrappers), gs.Equals, 2) // Check that the MultiDecoder actually loaded for k, _ := range pipeConfig.DecoderWrappers { if k == "syncdecoder" { hasSyncDecoder = true break } } c.Assume(hasSyncDecoder, gs.IsTrue) }) c.Specify("explodes w/ bad config file", func() { err := pipeConfig.LoadFromConfigFile("./testsupport/config_bad_test.toml") c.Assume(err, gs.Not(gs.IsNil)) c.Expect(err.Error(), ts.StringContains, "2 errors loading plugins") c.Expect(pipeConfig.LogMsgs, gs.ContainsAny, gs.Values("No such plugin: CounterOutput")) }) c.Specify("handles missing config file correctly", func() { err := pipeConfig.LoadFromConfigFile("no_such_file.toml") c.Assume(err, gs.Not(gs.IsNil)) if runtime.GOOS == "windows" { c.Expect(err.Error(), ts.StringContains, "open no_such_file.toml: The system cannot find the file specified.") } else { c.Expect(err.Error(), ts.StringContains, "open no_such_file.toml: no such file or directory") } }) c.Specify("errors correctly w/ bad outputs config", func() { err := pipeConfig.LoadFromConfigFile("./testsupport/config_bad_outputs.toml") c.Assume(err, gs.Not(gs.IsNil)) c.Expect(err.Error(), ts.StringContains, "1 errors loading plugins") msg := pipeConfig.LogMsgs[0] c.Expect(msg, ts.StringContains, "No such plugin") }) c.Specify("for a DefaultsTestOutput", func() { RegisterPlugin("DefaultsTestOutput", func() interface{} { return new(DefaultsTestOutput) }) err := pipeConfig.LoadFromConfigFile("./testsupport/config_test_defaults2.toml") c.Expect(err, gs.IsNil) runner, ok := pipeConfig.OutputRunners["DefaultsTestOutput"] c.Expect(ok, gs.IsTrue) ticker := runner.Ticker() c.Expect(ticker, gs.Not(gs.IsNil)) matcher := runner.MatchRunner().MatcherSpecification().String() c.Expect(matcher, gs.Equals, messageMatchStr) }) c.Specify("can render JSON reports as pipe delimited data", func() { RegisterPlugin("DefaultsTestOutput", func() interface{} { return new(DefaultsTestOutput) }) err := pipeConfig.LoadFromConfigFile("./testsupport/config_test_defaults2.toml") c.Expect(err, gs.IsNil) data := `{"globals":[{"Name":"inputRecycleChan","InChanCapacity":{"value":"100", "representation":"count"},"InChanLength":{"value":"99", "representation":"count"}},{"Name":"injectRecycleChan","InChanCapacity":{"value":"100", "representation":"count"},"InChanLength":{"value":"98", "representation":"count"}},{"Name":"Router","InChanCapacity":{"value":"50", "representation":"count"},"InChanLength":{"value":"0", "representation":"count"},"ProcessMessageCount":{"value":"26", "representation":"count"}}], "inputs": [{"Name": "TcpInput"}], "decoders": [{"Name":"ProtobufDecoder","InChanCapacity":{"value":"50", "representation":"count"},"InChanLength":{"value":"0", "representation":"count"}}], "filters": [{"Name":"OpsSandboxManager","RunningFilters":{"value":"0", "representation":"count"},"ProcessMessageCount":{"value":"0", "representation":"count"},"InChanCapacity":{"value":"50", "representation":"count"},"InChanLength":{"value":"0", "representation":"count"},"MatchChanCapacity":{"value":"50", "representation":"count"},"MatchChanLength":{"value":"0", "representation":"count"},"MatchAvgDuration":{"value":"0", "representation":"ns"}},{"Name":"hekabench_counter","Memory":{"value":"20644", "representation":"B"},"MaxMemory":{"value":"20644", "representation":"B"},"MaxInstructions":{"value":"18", "representation":"count"},"MaxOutput":{"value":"0", "representation":"B"},"ProcessMessageCount":{"value":"0", "representation":"count"},"InjectMessageCount":{"value":"0", "representation":"count"},"ProcessMessageAvgDuration":{"value":"0", "representation":"ns"},"TimerEventAvgDuration":{"value":"78532", "representation":"ns"},"InChanCapacity":{"value":"50", "representation":"count"},"InChanLength":{"value":"0", "representation":"count"},"MatchChanCapacity":{"value":"50", "representation":"count"},"MatchChanLength":{"value":"0", "representation":"count"},"MatchAvgDuration":{"value":"445", "representation":"ns"}}], "outputs": [{"Name":"LogOutput","InChanCapacity":{"value":"50", "representation":"count"},"InChanLength":{"value":"0", "representation":"count"},"MatchChanCapacity":{"value":"50", "representation":"count"},"MatchChanLength":{"value":"0", "representation":"count"},"MatchAvgDuration":{"value":"406", "representation":"ns"}},{"Name":"DashboardOutput","InChanCapacity":{"value":"50", "representation":"count"},"InChanLength":{"value":"0", "representation":"count"},"MatchChanCapacity":{"value":"50", "representation":"count"},"MatchChanLength":{"value":"0", "representation":"count"},"MatchAvgDuration":{"value":"336", "representation":"ns"}}]} ` report := pipeConfig.FormatTextReport("heka.all-report", data) expected := `========[heka.all-report]======== ====Globals==== inputRecycleChan: InChanCapacity: 100 InChanLength: 99 injectRecycleChan: InChanCapacity: 100 InChanLength: 98 Router: InChanCapacity: 50 InChanLength: 0 ProcessMessageCount: 26 ====Inputs==== TcpInput: ====Decoders==== ProtobufDecoder: InChanCapacity: 50 InChanLength: 0 ====Filters==== OpsSandboxManager: InChanCapacity: 50 InChanLength: 0 MatchChanCapacity: 50 MatchChanLength: 0 MatchAvgDuration: 0 ProcessMessageCount: 0 hekabench_counter: InChanCapacity: 50 InChanLength: 0 MatchChanCapacity: 50 MatchChanLength: 0 MatchAvgDuration: 445 ProcessMessageCount: 0 InjectMessageCount: 0 Memory: 20644 MaxMemory: 20644 MaxInstructions: 18 MaxOutput: 0 ProcessMessageAvgDuration: 0 TimerEventAvgDuration: 78532 ====Outputs==== LogOutput: InChanCapacity: 50 InChanLength: 0 MatchChanCapacity: 50 MatchChanLength: 0 MatchAvgDuration: 406 DashboardOutput: InChanCapacity: 50 InChanLength: 0 MatchChanCapacity: 50 MatchChanLength: 0 MatchAvgDuration: 336 ======== ` c.Expect(report, gs.Equals, expected) }) c.Specify("works w/ bad param config file", func() { err := pipeConfig.LoadFromConfigFile("./testsupport/config_bad_params.toml") c.Assume(err, gs.Not(gs.IsNil)) }) c.Specify("works w/ common parameters that are not part of the struct", func() { err := pipeConfig.LoadFromConfigFile("./testsupport/config_test_common.toml") c.Assume(err, gs.IsNil) }) }) c.Specify("Config directory helpers", func() { Globals().BaseDir = "/base/dir" Globals().ShareDir = "/share/dir" c.Specify("PrependBaseDir", func() { c.Specify("prepends for relative paths", func() { dir := filepath.FromSlash("relative/path") result := PrependBaseDir(dir) c.Expect(result, gs.Equals, filepath.FromSlash("/base/dir/relative/path")) }) c.Specify("doesn't prepend for absolute paths", func() { dir := filepath.FromSlash("/absolute/path") result := PrependBaseDir(dir) c.Expect(result, gs.Equals, dir) }) }) c.Specify("PrependShareDir", func() { c.Specify("prepends for relative paths", func() { dir := filepath.FromSlash("relative/path") result := PrependShareDir(dir) c.Expect(result, gs.Equals, filepath.FromSlash("/share/dir/relative/path")) }) c.Specify("doesn't prepend for absolute paths", func() { dir := filepath.FromSlash("/absolute/path") result := PrependShareDir(dir) c.Expect(result, gs.Equals, dir) }) }) }) c.Specify("PluginHelper", func() { c.Specify("starts and stops DecoderRunners appropriately", func() { err := pipeConfig.LoadFromConfigFile("./testsupport/config_test.toml") c.Assume(err, gs.IsNil) // Start two DecoderRunners. dr1, ok := pipeConfig.DecoderRunner("ProtobufDecoder", "ProtobufDecoder_1") c.Expect(ok, gs.IsTrue) dr2, ok := pipeConfig.DecoderRunner("ProtobufDecoder", "ProtobufDecoder_2") c.Expect(ok, gs.IsTrue) // Stop the second one. ok = pipeConfig.StopDecoderRunner(dr2) c.Expect(ok, gs.IsTrue) // Verify that it's stopped, i.e. InChan is closed. _, ok = <-dr2.InChan() c.Expect(ok, gs.IsFalse) // Verify that dr1 is *not* stopped, i.e. InChan is still open. rChan := make(chan *PipelinePack, 1) pack := NewPipelinePack(rChan) dr1.InChan() <- pack // <-- Failure case means this will panic. // Try to stop dr2 again. Shouldn't fail, but ok should be false. ok = pipeConfig.StopDecoderRunner(dr2) c.Expect(ok, gs.IsFalse) // Clean up our UdpInput. udp, _ := pipeConfig.InputRunners["UdpInput"] udp.Input().Stop() }) }) }
func MatcherSpecificationSpec(c gospec.Context) { msg := getTestMessage() uuidStr := msg.GetUuidString() data := []byte("data") date := "Mon Jan 02 15:04:05 -0700 2006" field1, _ := NewField("bytes", data, "") field2, _ := NewField("int", int64(999), "") field2.AddValue(int64(1024)) field3, _ := NewField("double", float64(99.9), "") field4, _ := NewField("bool", true, "") field5, _ := NewField("foo", "alternate", "") field6, _ := NewField("Payload", "name=test;type=web;", "") field7, _ := NewField("Timestamp", date, "date-time") field8, _ := NewField("zero", int64(0), "") field9, _ := NewField("string", "43", "") msg.AddField(field1) msg.AddField(field2) msg.AddField(field3) msg.AddField(field4) msg.AddField(field5) msg.AddField(field6) msg.AddField(field7) msg.AddField(field8) msg.AddField(field9) c.Specify("A MatcherSpecification", func() { malformed := []string{ "", "bogus", "Type = 'test'", // invalid operator "Pid == 'test='", // Pid is not a string "Type == 'test' && (Severity==7 || Payload == 'Test Payload'", // missing paren "Invalid == 'bogus'", // unknown variable name "Fields[]", // empty name key "Fields[test][]", // empty field index "Fields[test][a]", // non numeric field index "Fields[test][0][]", // empty array index "Fields[test][0][a]", // non numeric array index "Fields[test][0][0][]", // extra index dimension "Fields[test][xxxx", // unmatched bracket "Pid =~ /6/", // regex not allowed on numeric "Pid !~ /6/", // regex not allowed on numeric "Type =~ /test", // unmatched slash "Type == /test/", // incorrect operator "Type =~ 'test'", // string instead of regexp "Type =~ /\\ytest/", // invalid escape character "Type != 'test\"", // mis matched quote types "Pid =~ 6", // number instead of regexp "NIL", // invalid use of constant "Type == NIL", // existence check only works on fields "Fields[test] > NIL", // existence check only works with equals and not equals } negative := []string{ "FALSE", "Type == 'test'&&(Severity==7||Payload=='Test Payload')", "EnvVersion == '0.9'", "EnvVersion != '0.8'", "EnvVersion > '0.9'", "EnvVersion >= '0.9'", "EnvVersion < '0.8'", "EnvVersion <= '0.7'", "Severity == 5", "Severity != 6", "Severity < 6", "Severity <= 5", "Severity > 6", "Severity >= 7", "Fields[foo] == 'ba'", "Fields[foo][1] == 'bar'", "Fields[foo][0][1] == 'bar'", "Fields[bool] == FALSE", "Type =~ /Test/", "Type !~ /TEST/", "Payload =~ /^Payload/", "Type == \"te'st\"", "Type == 'te\"st'", "Fields[int] =~ /999/", "Fields[zero] == \"0\"", "Fields[string] == 43", "Fields[int] == NIL", "Fields[int][0][1] == NIL", "Fields[missing] != NIL", "Type =~ /^te/", "Type =~ /st$/", "Type !~ /^TE/", "Type !~ /ST$/", "Logger =~ /./ && Type =~ /^anything/", } positive := []string{ "TRUE", "(Severity == 7 || Payload == 'Test Payload') && Type == 'TEST'", "EnvVersion == \"0.8\"", "EnvVersion == '0.8'", "EnvVersion != '0.9'", "EnvVersion > '0.7'", "EnvVersion >= '0.8'", "EnvVersion < '0.9'", "EnvVersion <= '0.8'", "Hostname != ''", "Logger == 'GoSpec'", "Pid != 0", "Severity != 5", "Severity < 7", "Severity <= 6", "Severity == 6", "Severity > 5", "Severity >= 6", "Timestamp > 0", "Type != 'test'", "Type == 'TEST' && Severity == 6", "Type == 'test' && Severity == 7 || Payload == 'Test Payload'", "Type == 'TEST'", "Type == 'foo' || Type == 'bar' || Type == 'TEST'", fmt.Sprintf("Uuid == '%s'", uuidStr), "Fields[foo] == 'bar'", "Fields[foo][0] == 'bar'", "Fields[foo][0][0] == 'bar'", "Fields[foo][1] == 'alternate'", "Fields[foo][1][0] == 'alternate'", "Fields[foo] == 'bar'", "Fields[bytes] == 'data'", "Fields[int] == 999", "Fields[int][0][1] == 1024", "Fields[double] == 99.9", "Fields[bool] == TRUE", "Type =~ /TEST/", "Type !~ /bogus/", "Type =~ /TEST/ && Payload =~ /Payload/", "Fields[foo][1] =~ /alt/", "Fields[Payload] =~ /name=\\w+/", "Type =~ /(ST)/", "Fields[int] != NIL", "Fields[int][0][1] != NIL", "Fields[int][0][2] == NIL", "Fields[missing] == NIL", "Type =~ /^TE/", "Type =~ /ST$/", "Type !~ /^te/", "Type !~ /st$/", } c.Specify("malformed matcher tests", func() { for _, v := range malformed { _, err := CreateMatcherSpecification(v) c.Expect(err, gs.Not(gs.IsNil)) } }) c.Specify("negative matcher tests", func() { for _, v := range negative { ms, err := CreateMatcherSpecification(v) c.Expect(err, gs.IsNil) match := ms.Match(msg) c.Expect(match, gs.IsFalse) } }) c.Specify("positive matcher tests", func() { for _, v := range positive { ms, err := CreateMatcherSpecification(v) c.Expect(err, gs.IsNil) match := ms.Match(msg) c.Expect(match, gs.IsTrue) } }) }) }
func compareCaptures(c gospec.Context, m1, m2 map[string]string) { for k, v := range m1 { v1, _ := m2[k] c.Expect(v, gs.Equals, v1) } }
func DecoderSpec(c gs.Context) { t := new(ts.SimpleT) ctrl := gomock.NewController(t) defer ctrl.Finish() pConfig := pipeline.NewPipelineConfig(nil) c.Specify("A SandboxDecoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) c.Specify("that uses lpeg and inject_message", func() { dRunner.EXPECT().Name().Return("serialize") conf.ScriptFilename = "../lua/testsupport/decoder.lua" err := decoder.Init(conf) c.Assume(err, gs.IsNil) c.Specify("decodes simple messages", func() { data := "1376389920 debug id=2321 url=example.com item=1" decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1376389920000000000)) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("id") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "2321") value, ok = pack.Message.GetFieldValue("url") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "example.com") value, ok = pack.Message.GetFieldValue("item") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "1") decoder.Shutdown() }) c.Specify("decodes an invalid messages", func() { data := "1376389920 bogus id=2321 url=example.com item=1" decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) c.Specify("Preserves data", func() { conf.ScriptFilename = "../lua/testsupport/serialize.lua" conf.PreserveData = true err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) decoder.Shutdown() _, err = os.Stat("sandbox_preservation/serialize.data") c.Expect(err, gs.IsNil) err = os.Remove("sandbox_preservation/serialize.data") c.Expect(err, gs.IsNil) }) }) c.Specify("that only uses write_message", func() { conf.ScriptFilename = "../lua/testsupport/write_message_decoder.lua" dRunner.EXPECT().Name().Return("write_message") err := decoder.Init(conf) decoder.SetDecoderRunner(dRunner) c.Assume(err, gs.IsNil) c.Specify("adds a string field to the message", func() { data := "string field scribble" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) value, ok := pack.Message.GetFieldValue("scribble") c.Expect(ok, gs.IsTrue) c.Expect(value.(string), gs.Equals, "foo") }) c.Specify("adds a numeric field to the message", func() { data := "num field scribble" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) value, ok := pack.Message.GetFieldValue("scribble") c.Expect(ok, gs.IsTrue) c.Expect(value.(float64), gs.Equals, float64(1)) }) c.Specify("adds a boolean field to the message", func() { data := "bool field scribble" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) value, ok := pack.Message.GetFieldValue("scribble") c.Expect(ok, gs.IsTrue) c.Expect(value.(bool), gs.Equals, true) }) c.Specify("sets type and payload", func() { data := "set type and payload" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) c.Expect(pack.Message.GetType(), gs.Equals, "my_type") c.Expect(pack.Message.GetPayload(), gs.Equals, "my_payload") }) c.Specify("sets field value with representation", func() { data := "set field value with representation" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) fields := pack.Message.FindAllFields("rep") c.Expect(len(fields), gs.Equals, 1) field := fields[0] values := field.GetValueString() c.Expect(len(values), gs.Equals, 1) c.Expect(values[0], gs.Equals, "foo") c.Expect(field.GetRepresentation(), gs.Equals, "representation") }) c.Specify("sets multiple field string values", func() { data := "set multiple field string values" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) fields := pack.Message.FindAllFields("multi") c.Expect(len(fields), gs.Equals, 2) values := fields[0].GetValueString() c.Expect(len(values), gs.Equals, 1) c.Expect(values[0], gs.Equals, "first") values = fields[1].GetValueString() c.Expect(len(values), gs.Equals, 1) c.Expect(values[0], gs.Equals, "second") }) c.Specify("sets field string array value", func() { data := "set field string array value" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) fields := pack.Message.FindAllFields("array") c.Expect(len(fields), gs.Equals, 1) values := fields[0].GetValueString() c.Expect(len(values), gs.Equals, 2) c.Expect(values[0], gs.Equals, "first") c.Expect(values[1], gs.Equals, "second") }) }) }) c.Specify("A Multipack SandboxDecoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/testsupport/multipack_decoder.lua" supply := make(chan *pipeline.PipelinePack, 3) pack := pipeline.NewPipelinePack(supply) pack.Message = getTestMessage() pack1 := pipeline.NewPipelinePack(supply) pack2 := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") c.Specify("decodes into multiple packs", func() { err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) gomock.InOrder( dRunner.EXPECT().NewPack().Return(pack1), dRunner.EXPECT().NewPack().Return(pack2), ) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 3) c.Expect(packs[0].Message.GetPayload(), gs.Equals, "message one") c.Expect(packs[1].Message.GetPayload(), gs.Equals, "message two") c.Expect(packs[2].Message.GetPayload(), gs.Equals, "message three") for i := 0; i < 1; i++ { c.Expect(packs[i].Message.GetType(), gs.Equals, "TEST") c.Expect(packs[i].Message.GetHostname(), gs.Equals, "my.host.name") c.Expect(packs[i].Message.GetLogger(), gs.Equals, "GoSpec") c.Expect(packs[i].Message.GetSeverity(), gs.Equals, int32(6)) } decoder.Shutdown() }) }) c.Specify("Linux Cpu Stats decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/linux_loadavg.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes a message", func() { payload := "0.00 0.01 0.05 3/153 660\n" pack.Message.SetPayload(payload) f, err := message.NewField("FilePath", "/proc/loadavg", "") c.Assume(err, gs.IsNil) pack.Message.AddField(f) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("1MinAvg") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, 0.00) value, ok = pack.Message.GetFieldValue("5MinAvg") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, 0.01) value, ok = pack.Message.GetFieldValue("15MinAvg") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, 0.05) value, ok = pack.Message.GetFieldValue("NumProcesses") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(3)) value, ok = pack.Message.GetFieldValue("FilePath") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, "/proc/loadavg") }) c.Specify("decodes an invalid message", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) c.Specify("Linux Mem Stats decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/linux_memstats.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes a message", func() { payload := `MemTotal: 4047616 kB MemFree: 3135780 kB HugePages_Free: 0 ` pack.Message.SetPayload(payload) f, err := message.NewField("FilePath", "/proc/meminfo", "") c.Assume(err, gs.IsNil) pack.Message.AddField(f) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("MemTotal") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, 4.047616e+06) value, ok = pack.Message.GetFieldValue("MemFree") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, 3.13578e+06) value, ok = pack.Message.GetFieldValue("HugePages_Free") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(0)) value, ok = pack.Message.GetFieldValue("FilePath") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, "/proc/meminfo") }) c.Specify("decodes an invalid message", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) c.Specify("Linux Disk Stats decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/linux_diskstats.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes a message", func() { payload := " 13903 11393 969224 49444 10780 10161 1511920 4104 0 5064 53468\n" pack.Message.SetPayload(payload) f, err := message.NewField("FilePath", "/sys/block/sda/stat", "") c.Assume(err, gs.IsNil) pack.Message.AddField(f) f, err = message.NewField("TickerInterval", int64(2), "") c.Assume(err, gs.IsNil) pack.Message.AddField(f) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} // These are in the same order the payload should be value, ok = pack.Message.GetFieldValue("ReadsCompleted") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(13903)) value, ok = pack.Message.GetFieldValue("ReadsMerged") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(11393)) value, ok = pack.Message.GetFieldValue("SectorsRead") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(969224)) value, ok = pack.Message.GetFieldValue("TimeReading") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(49444)) value, ok = pack.Message.GetFieldValue("WritesCompleted") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(10780)) value, ok = pack.Message.GetFieldValue("WritesMerged") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(10161)) value, ok = pack.Message.GetFieldValue("SectorsWritten") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(1511920)) value, ok = pack.Message.GetFieldValue("TimeWriting") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(4104)) value, ok = pack.Message.GetFieldValue("NumIOInProgress") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(0)) value, ok = pack.Message.GetFieldValue("TimeDoingIO") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(5064)) value, ok = pack.Message.GetFieldValue("WeightedTimeDoingIO") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(53468)) value, ok = pack.Message.GetFieldValue("TickerInterval") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(2)) value, ok = pack.Message.GetFieldValue("FilePath") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, "/sys/block/sda/stat") }) c.Specify("decodes a message with no leading space", func() { payload := "19092852 0 510563170 15817012 46452019 0 1546950712 262535124 0 23823976 278362684\n" pack.Message.SetPayload(payload) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) value, ok := pack.Message.GetFieldValue("ReadsCompleted") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(19092852)) }) c.Specify("decodes an invalid message", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) c.Specify("Nginx access log decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/nginx_access.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) conf.Config["log_format"] = "$remote_addr - $remote_user [$time_local] \"$request\" $status $body_bytes_sent \"$http_referer\" \"$http_user_agent\"" conf.Config["user_agent_transform"] = true supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes simple messages", func() { data := "127.0.0.1 - - [10/Feb/2014:08:46:41 -0800] \"GET / HTTP/1.1\" 304 0 \"-\" \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:26.0) Gecko/20100101 Firefox/26.0\"" pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1392050801000000000)) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("remote_addr") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "127.0.0.1") value, ok = pack.Message.GetFieldValue("user_agent_browser") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "Firefox") value, ok = pack.Message.GetFieldValue("user_agent_version") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(26)) value, ok = pack.Message.GetFieldValue("user_agent_os") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "Linux") _, ok = pack.Message.GetFieldValue("http_user_agent") c.Expect(ok, gs.Equals, false) value, ok = pack.Message.GetFieldValue("body_bytes_sent") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(0)) value, ok = pack.Message.GetFieldValue("status") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(304)) decoder.Shutdown() }) c.Specify("decodes an invalid messages", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) c.Specify("Apache access log decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/apache_access.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) conf.Config["log_format"] = "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" conf.Config["user_agent_transform"] = true supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes simple messages", func() { data := "127.0.0.1 - - [10/Feb/2014:08:46:41 -0800] \"GET / HTTP/1.1\" 304 0 \"-\" \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:26.0) Gecko/20100101 Firefox/26.0\"" pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1392050801000000000)) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("remote_addr") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "127.0.0.1") value, ok = pack.Message.GetFieldValue("user_agent_browser") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "Firefox") value, ok = pack.Message.GetFieldValue("user_agent_version") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(26)) value, ok = pack.Message.GetFieldValue("user_agent_os") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "Linux") _, ok = pack.Message.GetFieldValue("http_user_agent") c.Expect(ok, gs.Equals, false) value, ok = pack.Message.GetFieldValue("body_bytes_sent") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(0)) value, ok = pack.Message.GetFieldValue("status") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(304)) decoder.Shutdown() }) c.Specify("decodes an invalid messages", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) c.Specify("rsyslog decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/rsyslog.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) conf.Config["type"] = "MyTestFormat" conf.Config["template"] = "%pri% %TIMESTAMP% %TIMEGENERATED:::date-rfc3339% %HOSTNAME% %syslogtag%%msg:::sp-if-no-1st-sp%%msg:::drop-last-lf%\n" conf.Config["tz"] = "America/Los_Angeles" supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes simple messages", func() { data := "28 Feb 10 12:58:58 2014-02-10T12:58:59-08:00 testhost widget[4322]: test message.\n" pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) // Syslog timestamp doesn't support year, so we have to calculate // it for the current year or else this test will fail every // January. year := time.Now().Year() tStr := fmt.Sprintf("%d Feb 10 12:58:58 -0800", year) t, err := time.Parse("2006 Jan 02 15:04:05 -0700", tStr) c.Assume(err, gs.IsNil) unixT := t.UnixNano() c.Expect(pack.Message.GetTimestamp(), gs.Equals, unixT) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(4)) c.Expect(pack.Message.GetHostname(), gs.Equals, "testhost") c.Expect(pack.Message.GetPid(), gs.Equals, int32(4322)) c.Expect(pack.Message.GetPayload(), gs.Equals, "test message.") c.Expect(pack.Message.GetType(), gs.Equals, conf.Config["type"]) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("programname") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "widget") value, ok = pack.Message.GetFieldValue("syslogfacility") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(3)) value, ok = pack.Message.GetFieldValue("timegenerated") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(1392065939000000000)) decoder.Shutdown() }) c.Specify("decodes an invalid messages", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) c.Specify("mysql decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/mysql_slow_query.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) conf.Config["truncate_sql"] = int64(5) supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decode standard slow query log", func() { data := `# User@Host: syncrw[syncrw] @ [127.0.0.1] # Query_time: 2.964652 Lock_time: 0.000050 Rows_sent: 251 Rows_examined: 9773 use widget; SET last_insert_id=999,insert_id=1000,timestamp=1399500744; # administrator command: do something /* [queryName=FIND_ITEMS] */ SELECT * FROM widget WHERE id = 10;` pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1399500744000000000)) c.Expect(pack.Message.GetPayload(), gs.Equals, "/* [q...") c.Expect(pack.Message.GetType(), gs.Equals, "mysql.slow-query") decoder.Shutdown() }) }) c.Specify("mariadb decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/mariadb_slow_query.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) conf.Config["truncate_sql"] = int64(5) supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decode standard slow query log", func() { data := `# User@Host: syncrw[syncrw] @ [127.0.0.1] # Thread_id: 110804 Schema: weave0 QC_hit: No # Query_time: 1.178108 Lock_time: 0.000053 Rows_sent: 198 Rows_examined: 198 SET timestamp=1399500744; /* [queryName=FIND_ITEMS] */ SELECT * FROM widget WHERE id = 10;` pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1399500744000000000)) c.Expect(pack.Message.GetPayload(), gs.Equals, "/* [q...") c.Expect(pack.Message.GetType(), gs.Equals, "mariadb.slow-query") decoder.Shutdown() }) }) }
func LogstreamerInputSpec(c gs.Context) { t := &pipeline_ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() here, _ := os.Getwd() dirPath := filepath.Join(here, "../../logstreamer", "testdir", "filehandling/subdir") tmpDir, tmpErr := ioutil.TempDir("", "hekad-tests") c.Expect(tmpErr, gs.Equals, nil) defer func() { tmpErr = os.RemoveAll(tmpDir) c.Expect(tmpErr, gs.IsNil) }() globals := DefaultGlobals() globals.BaseDir = tmpDir config := NewPipelineConfig(globals) ith := new(plugins_ts.InputTestHelper) ith.Msg = pipeline_ts.GetTestMessage() ith.Pack = NewPipelinePack(config.InputRecycleChan()) // Specify localhost, but we're not really going to use the network ith.AddrStr = "localhost:55565" ith.ResolvedAddrStr = "127.0.0.1:55565" // set up mock helper, decoder set, and packSupply channel ith.MockHelper = pipelinemock.NewMockPluginHelper(ctrl) ith.MockInputRunner = pipelinemock.NewMockInputRunner(ctrl) ith.Decoder = pipelinemock.NewMockDecoderRunner(ctrl) ith.PackSupply = make(chan *PipelinePack, 1) ith.DecodeChan = make(chan *PipelinePack) c.Specify("A LogstreamerInput", func() { lsInput := &LogstreamerInput{pConfig: config} lsiConfig := lsInput.ConfigStruct().(*LogstreamerInputConfig) lsiConfig.LogDirectory = dirPath lsiConfig.FileMatch = `file.log(\.?)(?P<Seq>\d+)?` lsiConfig.Differentiator = []string{"logfile"} lsiConfig.Priority = []string{"^Seq"} lsiConfig.Decoder = "decoder-name" c.Specify("w/ no translation map", func() { err := lsInput.Init(lsiConfig) c.Expect(err, gs.IsNil) c.Expect(len(lsInput.plugins), gs.Equals, 1) mockDecoderRunner := pipelinemock.NewMockDecoderRunner(ctrl) // Create pool of packs. numLines := 5 // # of lines in the log file we're parsing. packs := make([]*PipelinePack, numLines) ith.PackSupply = make(chan *PipelinePack, numLines) for i := 0; i < numLines; i++ { packs[i] = NewPipelinePack(ith.PackSupply) ith.PackSupply <- packs[i] } c.Specify("reads a log file", func() { // Expect InputRunner calls to get InChan and inject outgoing msgs ith.MockInputRunner.EXPECT().LogError(gomock.Any()).AnyTimes() ith.MockInputRunner.EXPECT().LogMessage(gomock.Any()).AnyTimes() ith.MockInputRunner.EXPECT().InChan().Return(ith.PackSupply).Times(numLines) // Expect calls to get decoder and decode each message. Since the // decoding is a no-op, the message payload will be the log file // line, unchanged. pbcall := ith.MockHelper.EXPECT().DecoderRunner(lsiConfig.Decoder, "-"+lsiConfig.Decoder) pbcall.Return(mockDecoderRunner, true) decodeCall := mockDecoderRunner.EXPECT().InChan().Times(numLines) decodeCall.Return(ith.DecodeChan) runOutChan := make(chan error, 1) go func() { err = lsInput.Run(ith.MockInputRunner, ith.MockHelper) runOutChan <- err }() d, _ := time.ParseDuration("5s") timeout := time.After(d) timed := false for x := 0; x < numLines; x++ { select { case <-ith.DecodeChan: case <-timeout: timed = true x += numLines } // Free up the scheduler while we wait for the log file lines // to be processed. runtime.Gosched() } lsInput.Stop() c.Expect(timed, gs.Equals, false) c.Expect(<-runOutChan, gs.Equals, nil) }) }) c.Specify("with a translation map", func() { lsiConfig.Translation = make(ls.SubmatchTranslationMap) lsiConfig.Translation["Seq"] = make(ls.MatchTranslationMap) c.Specify("allows len 1 translation map for 'missing'", func() { lsiConfig.Translation["Seq"]["missing"] = 9999 err := lsInput.Init(lsiConfig) c.Expect(err, gs.IsNil) }) c.Specify("doesn't allow len 1 map for other keys", func() { lsiConfig.Translation["Seq"]["missin"] = 9999 err := lsInput.Init(lsiConfig) c.Expect(err, gs.Not(gs.IsNil)) c.Expect(err.Error(), gs.Equals, "A translation map with one entry ('Seq') must be specifying a "+ "'missing' key.") }) }) }) }
func OutputsSpec(c gs.Context) { t := new(ts.SimpleT) ctrl := gomock.NewController(t) defer ctrl.Finish() c.Specify("A FileWriter", func() { fileWriter := new(FileWriter) tmpFileName := fmt.Sprintf("fileoutput-test-%d", time.Now().UnixNano()) tmpFilePath := fmt.Sprint(os.TempDir(), string(os.PathSeparator), tmpFileName) config := fileWriter.ConfigStruct().(*FileWriterConfig) config.Path = tmpFilePath msg := getTestMessage() pipelinePack := getTestPipelinePack() pipelinePack.Message = msg pipelinePack.Decoded = true stopAndDelete := func() { os.Remove(tmpFilePath) fileWriter.Event(STOP) } toString := func(outData interface{}) string { return string(*(outData.(*[]byte))) } c.Specify("makes a pointer to a byte slice", func() { outData := fileWriter.MakeOutData() _, ok := outData.(*[]byte) c.Expect(ok, gs.IsTrue) }) c.Specify("zeroes a byte slice", func() { outBytes := make([]byte, 0, 100) str := "This is a test" outBytes = append(outBytes, []byte(str)...) c.Expect(len(outBytes), gs.Equals, len(str)) fileWriter.ZeroOutData(&outBytes) c.Expect(len(outBytes), gs.Equals, 0) }) c.Specify("correctly formats text output", func() { _, err := fileWriter.Init(config) defer stopAndDelete() c.Assume(err, gs.IsNil) outData := fileWriter.MakeOutData() c.Specify("by default", func() { fileWriter.PrepOutData(pipelinePack, outData, nil) c.Expect(toString(outData), gs.Equals, msg.Payload+"\n") }) c.Specify("w/ a prepended timestamp when specified", func() { fileWriter.prefix_ts = true fileWriter.PrepOutData(pipelinePack, outData, nil) // Test will fail if date flips btn PrepOutData and todayStr // calculation... should be extremely rare. todayStr := time.Now().Format("[2006/Jan/02:") strContents := toString(outData) c.Expect(strContents, ts.StringContains, msg.Payload) c.Expect(strContents, ts.StringStartsWith, todayStr) }) }) c.Specify("correctly formats JSON output", func() { config.Format = "json" _, err := fileWriter.Init(config) defer stopAndDelete() c.Assume(err, gs.IsNil) outData := fileWriter.MakeOutData() c.Specify("when specified", func() { fileWriter.PrepOutData(pipelinePack, outData, nil) msgJson, err := json.Marshal(pipelinePack.Message) c.Assume(err, gs.IsNil) c.Expect(toString(outData), gs.Equals, string(msgJson)+"\n") }) c.Specify("and with a timestamp", func() { fileWriter.prefix_ts = true fileWriter.PrepOutData(pipelinePack, outData, nil) // Test will fail if date flips btn PrepOutData and todayStr // calculation... should be extremely rare. todayStr := time.Now().Format("[2006/Jan/02:") strContents := toString(outData) msgJson, err := json.Marshal(pipelinePack.Message) c.Assume(err, gs.IsNil) c.Expect(strContents, ts.StringContains, string(msgJson)+"\n") c.Expect(strContents, ts.StringStartsWith, todayStr) }) }) c.Specify("writes out to a file", func() { outData := fileWriter.MakeOutData() outBytes := outData.(*[]byte) outStr := "Write me out to the log file" *outBytes = append(*outBytes, []byte(outStr)...) c.Specify("with default settings", func() { _, err := fileWriter.Init(config) defer stopAndDelete() c.Assume(err, gs.IsNil) err = fileWriter.Batch(outData) c.Expect(err, gs.IsNil) err = fileWriter.Commit() c.Expect(err, gs.IsNil) tmpFile, err := os.Open(tmpFilePath) defer tmpFile.Close() c.Assume(err, gs.IsNil) contents, err := ioutil.ReadAll(tmpFile) c.Assume(err, gs.IsNil) c.Expect(string(contents), gs.Equals, outStr) }) c.Specify("honors different Perm settings", func() { config.Perm = 0600 _, err := fileWriter.Init(config) defer stopAndDelete() c.Assume(err, gs.IsNil) err = fileWriter.Batch(outData) c.Expect(err, gs.IsNil) err = fileWriter.Commit() c.Expect(err, gs.IsNil) tmpFile, err := os.Open(tmpFilePath) defer tmpFile.Close() c.Assume(err, gs.IsNil) fileInfo, err := tmpFile.Stat() c.Assume(err, gs.IsNil) fileMode := fileInfo.Mode() // 7 consecutive dashes implies no perms for group or other c.Expect(fileMode.String(), ts.StringContains, "-------") }) }) }) }
func DecoderSpec(c gs.Context) { t := new(ts.SimpleT) ctrl := gomock.NewController(t) defer ctrl.Finish() // NewPipelineConfig sets up Globals which is needed for the // pipeline.Prepend*Dir functions to not die during plugin Init(). _ = pipeline.NewPipelineConfig(nil) c.Specify("A SandboxDecoder", func() { decoder := new(SandboxDecoder) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) conf.ScriptType = "lua" c.Specify("that uses lpeg and inject_message", func() { dRunner.EXPECT().Name().Return("serialize") conf.ScriptFilename = "../lua/testsupport/decoder.lua" err := decoder.Init(conf) c.Assume(err, gs.IsNil) c.Specify("decodes simple messages", func() { data := "1376389920 debug id=2321 url=example.com item=1" decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1376389920000000000)) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("id") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "2321") value, ok = pack.Message.GetFieldValue("url") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "example.com") value, ok = pack.Message.GetFieldValue("item") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "1") decoder.Shutdown() }) c.Specify("decodes an invalid messages", func() { data := "1376389920 bogus id=2321 url=example.com item=1" decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) c.Specify("Preserves data", func() { conf.ScriptFilename = "../lua/testsupport/serialize.lua" conf.PreserveData = true err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) decoder.Shutdown() _, err = os.Stat("sandbox_preservation/serialize.data") c.Expect(err, gs.IsNil) err = os.Remove("sandbox_preservation/serialize.data") c.Expect(err, gs.IsNil) }) }) c.Specify("that only uses write_message", func() { conf.ScriptFilename = "../lua/testsupport/write_message_decoder.lua" dRunner.EXPECT().Name().Return("write_message") err := decoder.Init(conf) decoder.SetDecoderRunner(dRunner) c.Assume(err, gs.IsNil) c.Specify("adds a string field to the message", func() { data := "string field scribble" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) value, ok := pack.Message.GetFieldValue("scribble") c.Expect(ok, gs.IsTrue) c.Expect(value.(string), gs.Equals, "foo") }) c.Specify("adds a numeric field to the message", func() { data := "num field scribble" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) value, ok := pack.Message.GetFieldValue("scribble") c.Expect(ok, gs.IsTrue) c.Expect(value.(float64), gs.Equals, float64(1)) }) c.Specify("adds a boolean field to the message", func() { data := "bool field scribble" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) value, ok := pack.Message.GetFieldValue("scribble") c.Expect(ok, gs.IsTrue) c.Expect(value.(bool), gs.Equals, true) }) c.Specify("sets type and payload", func() { data := "set type and payload" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) c.Expect(pack.Message.GetType(), gs.Equals, "my_type") c.Expect(pack.Message.GetPayload(), gs.Equals, "my_payload") }) c.Specify("sets field value with representation", func() { data := "set field value with representation" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) fields := pack.Message.FindAllFields("rep") c.Expect(len(fields), gs.Equals, 1) field := fields[0] values := field.GetValueString() c.Expect(len(values), gs.Equals, 1) c.Expect(values[0], gs.Equals, "foo") c.Expect(field.GetRepresentation(), gs.Equals, "representation") }) c.Specify("sets multiple field string values", func() { data := "set multiple field string values" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) fields := pack.Message.FindAllFields("multi") c.Expect(len(fields), gs.Equals, 2) values := fields[0].GetValueString() c.Expect(len(values), gs.Equals, 1) c.Expect(values[0], gs.Equals, "first") values = fields[1].GetValueString() c.Expect(len(values), gs.Equals, 1) c.Expect(values[0], gs.Equals, "second") }) c.Specify("sets field string array value", func() { data := "set field string array value" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) fields := pack.Message.FindAllFields("array") c.Expect(len(fields), gs.Equals, 1) values := fields[0].GetValueString() c.Expect(len(values), gs.Equals, 2) c.Expect(values[0], gs.Equals, "first") c.Expect(values[1], gs.Equals, "second") }) }) }) c.Specify("A Multipack SandboxDecoder", func() { decoder := new(SandboxDecoder) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/testsupport/multipack_decoder.lua" conf.ScriptType = "lua" supply := make(chan *pipeline.PipelinePack, 3) pack := pipeline.NewPipelinePack(supply) pack.Message = getTestMessage() pack1 := pipeline.NewPipelinePack(supply) pack2 := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") c.Specify("decodes into multiple packs", func() { err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) gomock.InOrder( dRunner.EXPECT().NewPack().Return(pack1), dRunner.EXPECT().NewPack().Return(pack2), ) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 3) c.Expect(packs[0].Message.GetPayload(), gs.Equals, "message one") c.Expect(packs[1].Message.GetPayload(), gs.Equals, "message two") c.Expect(packs[2].Message.GetPayload(), gs.Equals, "message three") for i := 0; i < 1; i++ { c.Expect(packs[i].Message.GetType(), gs.Equals, "TEST") c.Expect(packs[i].Message.GetHostname(), gs.Equals, "my.host.name") c.Expect(packs[i].Message.GetLogger(), gs.Equals, "GoSpec") c.Expect(packs[i].Message.GetSeverity(), gs.Equals, int32(6)) } decoder.Shutdown() }) }) c.Specify("Nginx access log decoder", func() { decoder := new(SandboxDecoder) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/nginx_access.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.ScriptType = "lua" conf.Config = make(map[string]interface{}) conf.Config["log_format"] = "$remote_addr - $remote_user [$time_local] \"$request\" $status $body_bytes_sent \"$http_referer\" \"$http_user_agent\"" conf.Config["user_agent_transform"] = true supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes simple messages", func() { data := "127.0.0.1 - - [10/Feb/2014:08:46:41 -0800] \"GET / HTTP/1.1\" 304 0 \"-\" \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:26.0) Gecko/20100101 Firefox/26.0\"" pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1392050801000000000)) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("remote_addr") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "127.0.0.1") value, ok = pack.Message.GetFieldValue("user_agent_browser") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "Firefox") value, ok = pack.Message.GetFieldValue("user_agent_version") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(26)) value, ok = pack.Message.GetFieldValue("user_agent_os") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "Linux") _, ok = pack.Message.GetFieldValue("http_user_agent") c.Expect(ok, gs.Equals, false) value, ok = pack.Message.GetFieldValue("body_bytes_sent") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(0)) value, ok = pack.Message.GetFieldValue("status") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(304)) decoder.Shutdown() }) c.Specify("decodes an invalid messages", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) c.Specify("rsyslog decoder", func() { decoder := new(SandboxDecoder) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/rsyslog.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.ScriptType = "lua" conf.Config = make(map[string]interface{}) conf.Config["template"] = "%pri% %TIMESTAMP% %TIMEGENERATED:::date-rfc3339% %HOSTNAME% %syslogtag%%msg:::sp-if-no-1st-sp%%msg:::drop-last-lf%\n" conf.Config["tz"] = "America/Los_Angeles" supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes simple messages", func() { data := "28 Feb 10 12:58:58 2014-02-10T12:58:59-08:00 testhost kernel: imklog 5.8.6, log source = /proc/kmsg started.\n" pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1392065938000000000)) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(4)) c.Expect(pack.Message.GetHostname(), gs.Equals, "testhost") c.Expect(pack.Message.GetPayload(), gs.Equals, "imklog 5.8.6, log source = /proc/kmsg started.") var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("syslogtag") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "kernel:") value, ok = pack.Message.GetFieldValue("syslogfacility") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(3)) value, ok = pack.Message.GetFieldValue("timegenerated") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(1392065939000000000)) decoder.Shutdown() }) c.Specify("decodes an invalid messages", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) }
func DecodersSpec(c gospec.Context) { t := &ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() msg := getTestMessage() config := NewPipelineConfig(nil) c.Specify("A ProtobufDecoder", func() { encoded, err := proto.Marshal(msg) c.Assume(err, gs.IsNil) pack := NewPipelinePack(config.inputRecycleChan) decoder := new(ProtobufDecoder) c.Specify("decodes a protobuf message", func() { pack.MsgBytes = encoded err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(pack.Message, gs.Equals, msg) v, ok := pack.Message.GetFieldValue("foo") c.Expect(ok, gs.IsTrue) c.Expect(v, gs.Equals, "bar") }) c.Specify("returns an error for bunk encoding", func() { bunk := append([]byte{0, 0, 0}, encoded...) pack.MsgBytes = bunk err := decoder.Decode(pack) c.Expect(err, gs.Not(gs.IsNil)) }) }) c.Specify("A MultiDecoder", func() { decoder := new(MultiDecoder) conf := decoder.ConfigStruct().(*MultiDecoderConfig) supply := make(chan *PipelinePack, 1) pack := NewPipelinePack(supply) conf.Name = "MyMultiDecoder" conf.Subs = make(map[string]interface{}, 0) conf.Subs["StartsWithM"] = make(map[string]interface{}, 0) withM := conf.Subs["StartsWithM"].(map[string]interface{}) withM["type"] = "PayloadRegexDecoder" withM["match_regex"] = "^(?P<TheData>m.*)" withMFields := make(map[string]interface{}, 0) withMFields["StartsWithM"] = "%TheData%" withM["message_fields"] = withMFields conf.Order = []string{"StartsWithM"} errMsg := "Unable to decode message with any contained decoder." dRunner := NewMockDecoderRunner(ctrl) // An error will be spit out b/c there's no real *dRunner in there; // doesn't impact the tests. dRunner.EXPECT().LogError(gomock.Any()) c.Specify("decodes simple messages", func() { err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) regex_data := "matching text" pack.Message.SetPayload(regex_data) err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetType(), gs.Equals, "heka.MyMultiDecoder") value, ok := pack.Message.GetFieldValue("StartsWithM") c.Assume(ok, gs.IsTrue) c.Expect(value, gs.Equals, regex_data) }) c.Specify("returns an error if all decoders fail", func() { err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) regex_data := "non-matching text" pack.Message.SetPayload(regex_data) err = decoder.Decode(pack) c.Expect(err.Error(), gs.Equals, errMsg) }) c.Specify("logs subdecoder failures when configured to do so", func() { conf.LogSubErrors = true err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) regex_data := "non-matching text" pack.Message.SetPayload(regex_data) // Expect that we log an error for undecoded message. dRunner.EXPECT().LogError(fmt.Errorf("Subdecoder 'StartsWithM' decode error: No match")) err = decoder.Decode(pack) c.Expect(err.Error(), gs.Equals, errMsg) }) c.Specify("sets subdecoder runner correctly", func() { err := decoder.Init(conf) c.Assume(err, gs.IsNil) // Call LogError to appease the angry gomock gods. dRunner.LogError(errors.New("foo")) // Now create a real *dRunner, pass it in, make sure a wrapper // gets handed to the subdecoder. dr := NewDecoderRunner(decoder.Name, decoder, new(PluginGlobals)) decoder.SetDecoderRunner(dr) sub := decoder.Decoders["StartsWithM"] subRunner := sub.(*PayloadRegexDecoder).dRunner subRunner, ok := subRunner.(*mDRunner) c.Expect(ok, gs.IsTrue) c.Expect(subRunner.Name(), gs.Equals, fmt.Sprintf("%s-StartsWithM", decoder.Name)) c.Expect(subRunner.Decoder(), gs.Equals, sub) }) c.Specify("with multiple registered decoders", func() { conf.Subs["StartsWithS"] = make(map[string]interface{}, 0) withS := conf.Subs["StartsWithS"].(map[string]interface{}) withS["type"] = "PayloadRegexDecoder" withS["match_regex"] = "^(?P<TheData>s.*)" withSFields := make(map[string]interface{}, 0) withSFields["StartsWithS"] = "%TheData%" withS["message_fields"] = withSFields conf.Subs["StartsWithM2"] = make(map[string]interface{}, 0) withM2 := conf.Subs["StartsWithM2"].(map[string]interface{}) withM2["type"] = "PayloadRegexDecoder" withM2["match_regex"] = "^(?P<TheData>m.*)" withM2Fields := make(map[string]interface{}, 0) withM2Fields["StartsWithM2"] = "%TheData%" withM2["message_fields"] = withM2Fields conf.Order = append(conf.Order, "StartsWithS", "StartsWithM2") var ok bool // Two more subdecoders means two more LogError calls. dRunner.EXPECT().LogError(gomock.Any()).Times(2) c.Specify("defaults to `first-wins` cascading", func() { err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("on a first match condition", func() { regexData := "match first" pack.Message.SetPayload(regexData) err = decoder.Decode(pack) c.Expect(err, gs.IsNil) _, ok = pack.Message.GetFieldValue("StartsWithM") c.Expect(ok, gs.IsTrue) _, ok = pack.Message.GetFieldValue("StartsWithS") c.Expect(ok, gs.IsFalse) _, ok = pack.Message.GetFieldValue("StartsWithM2") c.Expect(ok, gs.IsFalse) }) c.Specify("and a second match condition", func() { regexData := "second match" pack.Message.SetPayload(regexData) err = decoder.Decode(pack) c.Expect(err, gs.IsNil) _, ok = pack.Message.GetFieldValue("StartsWithM") c.Expect(ok, gs.IsFalse) _, ok = pack.Message.GetFieldValue("StartsWithS") c.Expect(ok, gs.IsTrue) _, ok = pack.Message.GetFieldValue("StartsWithM2") c.Expect(ok, gs.IsFalse) }) c.Specify("returning an error if they all fail", func() { regexData := "won't match" pack.Message.SetPayload(regexData) err = decoder.Decode(pack) c.Expect(err.Error(), gs.Equals, errMsg) _, ok = pack.Message.GetFieldValue("StartsWithM") c.Expect(ok, gs.IsFalse) _, ok = pack.Message.GetFieldValue("StartsWithS") c.Expect(ok, gs.IsFalse) _, ok = pack.Message.GetFieldValue("StartsWithM2") c.Expect(ok, gs.IsFalse) }) }) c.Specify("and using `all` cascading", func() { conf.CascadeStrategy = "all" err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("matches multiples when appropriate", func() { regexData := "matches twice" pack.Message.SetPayload(regexData) err = decoder.Decode(pack) c.Expect(err, gs.IsNil) _, ok = pack.Message.GetFieldValue("StartsWithM") c.Expect(ok, gs.IsTrue) _, ok = pack.Message.GetFieldValue("StartsWithS") c.Expect(ok, gs.IsFalse) _, ok = pack.Message.GetFieldValue("StartsWithM2") c.Expect(ok, gs.IsTrue) }) c.Specify("matches singles when appropriate", func() { regexData := "second match" pack.Message.SetPayload(regexData) err = decoder.Decode(pack) c.Expect(err, gs.IsNil) _, ok = pack.Message.GetFieldValue("StartsWithM") c.Expect(ok, gs.IsFalse) _, ok = pack.Message.GetFieldValue("StartsWithS") c.Expect(ok, gs.IsTrue) _, ok = pack.Message.GetFieldValue("StartsWithM2") c.Expect(ok, gs.IsFalse) }) c.Specify("returns an error if they all fail", func() { regexData := "won't match" pack.Message.SetPayload(regexData) err = decoder.Decode(pack) c.Expect(err.Error(), gs.Equals, errMsg) _, ok = pack.Message.GetFieldValue("StartsWithM") c.Expect(ok, gs.IsFalse) _, ok = pack.Message.GetFieldValue("StartsWithS") c.Expect(ok, gs.IsFalse) _, ok = pack.Message.GetFieldValue("StartsWithM2") c.Expect(ok, gs.IsFalse) }) }) }) }) c.Specify("A PayloadJsonDecoder", func() { decoder := new(PayloadJsonDecoder) conf := decoder.ConfigStruct().(*PayloadJsonDecoderConfig) supply := make(chan *PipelinePack, 1) pack := NewPipelinePack(supply) c.Specify("decodes simple messages", func() { json_data := `{"statsd": {"count": 1, "name": "some.counter"}, "pid": 532, "timestamp": "2013-08-13T10:32:00.000Z"}` conf.JsonMap = map[string]string{"Count": "$.statsd.count", "Name": "$.statsd.name", "Pid": "$.pid", "Timestamp": "$.timestamp", } conf.MessageFields = MessageTemplate{ "Pid": "%Pid%", "StatCount": "%Count%", "StatName": "%Name%", "Timestamp": "%Timestamp%", } err := decoder.Init(conf) c.Assume(err, gs.IsNil) dRunner := NewMockDecoderRunner(ctrl) decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload(json_data) err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetPid(), gs.Equals, int32(532)) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1376389920000000000)) var ok bool var name, count interface{} count, ok = pack.Message.GetFieldValue("StatCount") c.Expect(ok, gs.Equals, true) c.Expect(count, gs.Equals, "1") name, ok = pack.Message.GetFieldValue("StatName") c.Expect(ok, gs.Equals, true) c.Expect(name, gs.Equals, "some.counter") }) }) c.Specify("A PayloadXmlDecoder", func() { decoder := new(PayloadXmlDecoder) conf := decoder.ConfigStruct().(*PayloadXmlDecoderConfig) supply := make(chan *PipelinePack, 1) pack := NewPipelinePack(supply) c.Specify("decodes simple messages", func() { xml_data := `<library> <!-- Great book. --> <book id="b0836217462" available="true"> <isbn>0836217462</isbn> <title lang="en">Being a Dog Is a Full-Time Job</title> <quote>I'd dog paddle the deepest ocean.</quote> <author id="CMS"> <?echo "go rocks"?> <name>Charles M Schulz</name> <born>1922-11-26</born> <dead>2000-02-12</dead> </author> <character id="PP"> <name>Peppermint Patty</name> <born>1966-08-22</born> <qualificati>bold, brash and tomboyish</qualificati> </character> <character id="Snoopy"> <name>Snoopy</name> <born>1950-10-04</born> <qualificati>extroverted beagle</qualificati> </character> </book> </library>` conf.XPathMapConfig = map[string]string{"Isbn": "library/*/isbn", "Name": "/library/book/character[born='1950-10-04']/name", "Patty": "/library/book//node()[@id='PP']/name", "Title": "//book[author/@id='CMS']/title", "Comment": "/library/book/preceding::comment()", } conf.MessageFields = MessageTemplate{ "Isbn": "%Isbn%", "Name": "%Name%", "Patty": "%Patty%", "Title": "%Title%", "Comment": "%Comment%", } err := decoder.Init(conf) c.Assume(err, gs.IsNil) dRunner := NewMockDecoderRunner(ctrl) decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload(xml_data) err = decoder.Decode(pack) c.Assume(err, gs.IsNil) var isbn, name, patty, title, comment interface{} var ok bool isbn, ok = pack.Message.GetFieldValue("Isbn") c.Expect(ok, gs.Equals, true) name, ok = pack.Message.GetFieldValue("Name") c.Expect(ok, gs.Equals, true) patty, ok = pack.Message.GetFieldValue("Patty") c.Expect(ok, gs.Equals, true) title, ok = pack.Message.GetFieldValue("Title") c.Expect(ok, gs.Equals, true) comment, ok = pack.Message.GetFieldValue("Comment") c.Expect(ok, gs.Equals, true) c.Expect(isbn, gs.Equals, "0836217462") c.Expect(name, gs.Equals, "Snoopy") c.Expect(patty, gs.Equals, "Peppermint Patty") c.Expect(title, gs.Equals, "Being a Dog Is a Full-Time Job") c.Expect(comment, gs.Equals, " Great book. ") }) }) c.Specify("A PayloadRegexDecoder", func() { decoder := new(PayloadRegexDecoder) conf := decoder.ConfigStruct().(*PayloadRegexDecoderConfig) supply := make(chan *PipelinePack, 1) pack := NewPipelinePack(supply) conf.TimestampLayout = "02/Jan/2006:15:04:05 -0700" c.Specify("non capture regex", func() { conf.MatchRegex = `\d+` err := decoder.Init(conf) c.Expect(err, gs.Not(gs.IsNil)) c.Expect(err.Error(), gs.Equals, "PayloadRegexDecoder regex must contain capture groups") }) c.Specify("invalid regex", func() { conf.MatchRegex = `\mtest` err := decoder.Init(conf) c.Expect(err, gs.Not(gs.IsNil)) c.Expect(err.Error(), gs.Equals, "PayloadRegexDecoder: error parsing regexp: invalid escape sequence: `\\m`") }) c.Specify("reading an apache timestamp", func() { conf.MatchRegex = `\[(?P<Timestamp>[^\]]+)\]` err := decoder.Init(conf) c.Assume(err, gs.IsNil) dRunner := NewMockDecoderRunner(ctrl) decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload("[18/Apr/2013:14:00:28 -0700]") err = decoder.Decode(pack) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1366318828000000000)) pack.Zero() }) c.Specify("uses kitchen timestamp", func() { conf.MatchRegex = `\[(?P<Timestamp>[^\]]+)\]` err := decoder.Init(conf) c.Assume(err, gs.IsNil) dRunner := NewMockDecoderRunner(ctrl) decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload("[5:16PM]") now := time.Now() cur_date := time.Date(now.Year(), now.Month(), now.Day(), 17, 16, 0, 0, time.UTC) err = decoder.Decode(pack) c.Expect(pack.Message.GetTimestamp(), gs.Equals, cur_date.UnixNano()) pack.Zero() }) c.Specify("adjusts timestamps as specified", func() { conf.MatchRegex = `\[(?P<Timestamp>[^\]]+)\]` conf.TimestampLayout = "02/Jan/2006:15:04:05" conf.TimestampLocation = "America/Los_Angeles" timeStr := "18/Apr/2013:14:00:28" loc, err := time.LoadLocation(conf.TimestampLocation) c.Assume(err, gs.IsNil) expectedLocal, err := time.ParseInLocation(conf.TimestampLayout, timeStr, loc) c.Assume(err, gs.IsNil) err = decoder.Init(conf) c.Assume(err, gs.IsNil) dRunner := NewMockDecoderRunner(ctrl) decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload("[" + timeStr + "]") err = decoder.Decode(pack) c.Expect(pack.Message.GetTimestamp(), gs.Equals, expectedLocal.UnixNano()) pack.Zero() }) c.Specify("apply representation metadata to a captured field", func() { value := "0.23" payload := "header" conf.MatchRegex = `(?P<ResponseTime>\d+\.\d+)` conf.MessageFields = MessageTemplate{ "ResponseTime|s": "%ResponseTime%", "Payload|s": "%ResponseTime%", "Payload": payload, } err := decoder.Init(conf) c.Assume(err, gs.IsNil) dRunner := NewMockDecoderRunner(ctrl) decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload(value) err = decoder.Decode(pack) f := pack.Message.FindFirstField("ResponseTime") c.Expect(f, gs.Not(gs.IsNil)) c.Expect(f.GetValue(), gs.Equals, value) c.Expect(f.GetRepresentation(), gs.Equals, "s") f = pack.Message.FindFirstField("Payload") c.Expect(f, gs.Not(gs.IsNil)) c.Expect(f.GetValue(), gs.Equals, value) c.Expect(f.GetRepresentation(), gs.Equals, "s") c.Expect(pack.Message.GetPayload(), gs.Equals, payload) pack.Zero() }) c.Specify("reading test-zeus.log", func() { conf.MatchRegex = `(?P<Ip>([0-9]{1,3}\.){3}[0-9]{1,3}) (?P<Hostname>(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])) (?P<User>\w+) \[(?P<Timestamp>[^\]]+)\] \"(?P<Verb>[A-X]+) (?P<Request>\/\S*) HTTP\/(?P<Httpversion>\d\.\d)\" (?P<Response>\d{3}) (?P<Bytes>\d+)` conf.MessageFields = MessageTemplate{ "hostname": "%Hostname%", "ip": "%Ip%", "response": "%Response%", } err := decoder.Init(conf) c.Assume(err, gs.IsNil) filePath := "../testsupport/test-zeus.log" fileBytes, err := ioutil.ReadFile(filePath) c.Assume(err, gs.IsNil) fileStr := string(fileBytes) lines := strings.Split(fileStr, "\n") containsFieldValue := func(str, fieldName string, msg *message.Message) bool { raw, ok := msg.GetFieldValue(fieldName) if !ok { return false } value := raw.(string) return strings.Contains(str, value) } c.Specify("extracts capture data and puts it in the message fields", func() { var misses int for _, line := range lines { if strings.TrimSpace(line) == "" { continue } pack.Message.SetPayload(line) err = decoder.Decode(pack) if err != nil { misses++ continue } c.Expect(containsFieldValue(line, "hostname", pack.Message), gs.IsTrue) c.Expect(containsFieldValue(line, "ip", pack.Message), gs.IsTrue) c.Expect(containsFieldValue(line, "response", pack.Message), gs.IsTrue) pack.Zero() } c.Expect(misses, gs.Equals, 3) }) }) c.Specify("reading test-severity.log", func() { conf.MatchRegex = `severity: (?P<Severity>[a-zA-Z]+)` conf.SeverityMap = map[string]int32{ "emergency": 0, "alert": 1, "critical": 2, "error": 3, "warning": 4, "notice": 5, "info": 6, "debug": 7, } reverseMap := make(map[int32]string) for str, i := range conf.SeverityMap { reverseMap[i] = str } err := decoder.Init(conf) c.Assume(err, gs.IsNil) dRunner := NewMockDecoderRunner(ctrl) decoder.SetDecoderRunner(dRunner) filePath := "../testsupport/test-severity.log" fileBytes, err := ioutil.ReadFile(filePath) c.Assume(err, gs.IsNil) fileStr := string(fileBytes) lines := strings.Split(fileStr, "\n") c.Specify("sets message severity based on SeverityMap", func() { err := errors.New("Don't recognize severity: 'BOGUS'") dRunner.EXPECT().LogError(err) for _, line := range lines { if strings.TrimSpace(line) == "" { continue } pack.Message.SetPayload(line) err = decoder.Decode(pack) if err != nil { fmt.Println(line) } c.Expect(err, gs.IsNil) if strings.Contains(line, "BOGUS") { continue } strVal := reverseMap[pack.Message.GetSeverity()] c.Expect(strings.Contains(line, strVal), gs.IsTrue) } }) }) }) c.Specify("A SandboxDecoder", func() { decoder := new(SandboxDecoder) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../sandbox/lua/testsupport/decoder.lua" conf.ScriptType = "lua" supply := make(chan *PipelinePack, 1) pack := NewPipelinePack(supply) c.Specify("decodes simple messages", func() { data := "1376389920 debug id=2321 url=example.com item=1" err := decoder.Init(conf) c.Assume(err, gs.IsNil) dRunner := NewMockDecoderRunner(ctrl) decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload(data) err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1376389920000000000)) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("id") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "2321") value, ok = pack.Message.GetFieldValue("url") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "example.com") value, ok = pack.Message.GetFieldValue("item") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "1") }) c.Specify("decodes an invalid messages", func() { data := "1376389920 bogus id=2321 url=example.com item=1" err := decoder.Init(conf) c.Assume(err, gs.IsNil) dRunner := NewMockDecoderRunner(ctrl) decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload(data) err = decoder.Decode(pack) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) }) }) c.Specify("A StatsToFieldsDecoder", func() { decoder := new(StatsToFieldsDecoder) router := NewMessageRouter() router.inChan = make(chan *PipelinePack, 5) dRunner := NewMockDecoderRunner(ctrl) decoder.runner = dRunner dRunner.EXPECT().Router().Return(router) pack := NewPipelinePack(config.inputRecycleChan) mergeStats := func(stats [][]string) string { lines := make([]string, len(stats)) for i, line := range stats { lines[i] = strings.Join(line, " ") } return strings.Join(lines, "\n") } c.Specify("correctly converts stats to fields", func() { stats := [][]string{ {"stat.one", "1", "1380047333"}, {"stat.two", "2", "1380047333"}, {"stat.three", "3", "1380047333"}, {"stat.four", "4", "1380047333"}, {"stat.five", "5", "1380047333"}, } pack.Message.SetPayload(mergeStats(stats)) err := decoder.Decode(pack) c.Expect(err, gs.IsNil) for i, stats := range stats { value, ok := pack.Message.GetFieldValue(stats[0]) c.Expect(ok, gs.IsTrue) expected := float64(i + 1) c.Expect(value.(float64), gs.Equals, expected) } value, ok := pack.Message.GetFieldValue("timestamp") c.Expect(ok, gs.IsTrue) expected, err := strconv.ParseInt(stats[0][2], 0, 32) c.Assume(err, gs.IsNil) c.Expect(value.(int64), gs.Equals, expected) }) c.Specify("generates multiple messages for multiple timestamps", func() { stats := [][]string{ {"stat.one", "1", "1380047333"}, {"stat.two", "2", "1380047333"}, {"stat.three", "3", "1380047331"}, {"stat.four", "4", "1380047333"}, {"stat.five", "5", "1380047332"}, } // Prime the pack supply w/ two new packs. dRunner.EXPECT().NewPack().Return(NewPipelinePack(nil)) dRunner.EXPECT().NewPack().Return(NewPipelinePack(nil)) // Decode and check the main pack. pack.Message.SetPayload(mergeStats(stats)) err := decoder.Decode(pack) c.Expect(err, gs.IsNil) value, ok := pack.Message.GetFieldValue("timestamp") c.Expect(ok, gs.IsTrue) expected, err := strconv.ParseInt(stats[0][2], 0, 32) c.Assume(err, gs.IsNil) c.Expect(value.(int64), gs.Equals, expected) // Check the first extra. pack = <-router.inChan value, ok = pack.Message.GetFieldValue("timestamp") c.Expect(ok, gs.IsTrue) expected, err = strconv.ParseInt(stats[2][2], 0, 32) c.Assume(err, gs.IsNil) c.Expect(value.(int64), gs.Equals, expected) // Check the second extra. pack = <-router.inChan value, ok = pack.Message.GetFieldValue("timestamp") c.Expect(ok, gs.IsTrue) expected, err = strconv.ParseInt(stats[4][2], 0, 32) c.Assume(err, gs.IsNil) c.Expect(value.(int64), gs.Equals, expected) }) c.Specify("fails w/ invalid timestamp", func() { stats := [][]string{ {"stat.one", "1", "1380047333"}, {"stat.two", "2", "1380047333"}, {"stat.three", "3", "1380047333c"}, {"stat.four", "4", "1380047333"}, {"stat.five", "5", "1380047332"}, } pack.Message.SetPayload(mergeStats(stats)) err := decoder.Decode(pack) c.Expect(err, gs.Not(gs.IsNil)) expected := fmt.Sprintf("invalid timestamp: '%s'", strings.Join(stats[2], " ")) c.Expect(err.Error(), gs.Equals, expected) }) c.Specify("fails w/ invalid value", func() { stats := [][]string{ {"stat.one", "1", "1380047333"}, {"stat.two", "2", "1380047333"}, {"stat.three", "3", "1380047333"}, {"stat.four", "4d", "1380047333"}, {"stat.five", "5", "1380047332"}, } pack.Message.SetPayload(mergeStats(stats)) err := decoder.Decode(pack) c.Expect(err, gs.Not(gs.IsNil)) expected := fmt.Sprintf("invalid value: '%s'", strings.Join(stats[3], " ")) c.Expect(err.Error(), gs.Equals, expected) }) }) }
func OutputsSpec(c gs.Context) { t := new(ts.SimpleT) ctrl := gomock.NewController(t) defer ctrl.Finish() oth := NewOutputTestHelper(ctrl) var wg sync.WaitGroup inChan := make(chan *PipelinePack, 1) pConfig := NewPipelineConfig(nil) c.Specify("A FileOutput", func() { fileOutput := new(FileOutput) tmpFileName := fmt.Sprintf("fileoutput-test-%d", time.Now().UnixNano()) tmpFilePath := fmt.Sprint(os.TempDir(), string(os.PathSeparator), tmpFileName) config := fileOutput.ConfigStruct().(*FileOutputConfig) config.Path = tmpFilePath msg := getTestMessage() pack := NewPipelinePack(pConfig.inputRecycleChan) pack.Message = msg pack.Decoded = true toString := func(outData interface{}) string { return string(*(outData.(*[]byte))) } c.Specify("correctly formats text output", func() { err := fileOutput.Init(config) defer os.Remove(tmpFilePath) c.Assume(err, gs.IsNil) outData := make([]byte, 0, 20) c.Specify("by default", func() { fileOutput.handleMessage(pack, &outData) c.Expect(toString(&outData), gs.Equals, *msg.Payload+"\n") }) c.Specify("w/ a prepended timestamp when specified", func() { fileOutput.prefix_ts = true fileOutput.handleMessage(pack, &outData) // Test will fail if date flips btn handleMessage call and // todayStr calculation... should be extremely rare. todayStr := time.Now().Format("[2006/Jan/02:") strContents := toString(&outData) payload := *msg.Payload c.Expect(strContents, ts.StringContains, payload) c.Expect(strContents, ts.StringStartsWith, todayStr) }) }) c.Specify("correctly formats JSON output", func() { config.Format = "json" err := fileOutput.Init(config) defer os.Remove(tmpFilePath) c.Assume(err, gs.IsNil) outData := make([]byte, 0, 200) c.Specify("when specified", func() { fileOutput.handleMessage(pack, &outData) msgJson, err := json.Marshal(pack.Message) c.Assume(err, gs.IsNil) c.Expect(toString(&outData), gs.Equals, string(msgJson)+"\n") }) c.Specify("and with a timestamp", func() { fileOutput.prefix_ts = true fileOutput.handleMessage(pack, &outData) // Test will fail if date flips btn handleMessage call and // todayStr calculation... should be extremely rare. todayStr := time.Now().Format("[2006/Jan/02:") strContents := toString(&outData) msgJson, err := json.Marshal(pack.Message) c.Assume(err, gs.IsNil) c.Expect(strContents, ts.StringContains, string(msgJson)+"\n") c.Expect(strContents, ts.StringStartsWith, todayStr) }) }) c.Specify("correctly formats protocol buffer stream output", func() { config.Format = "protobufstream" err := fileOutput.Init(config) defer os.Remove(tmpFilePath) c.Assume(err, gs.IsNil) outData := make([]byte, 0, 200) c.Specify("when specified and timestamp ignored", func() { fileOutput.prefix_ts = true err := fileOutput.handleMessage(pack, &outData) c.Expect(err, gs.IsNil) b := []byte{30, 2, 8, uint8(proto.Size(pack.Message)), 31, 10, 16} // sanity check the header and the start of the protocol buffer c.Expect(bytes.Equal(b, outData[:len(b)]), gs.IsTrue) }) }) c.Specify("processes incoming messages", func() { err := fileOutput.Init(config) defer os.Remove(tmpFilePath) c.Assume(err, gs.IsNil) // Save for comparison. payload := fmt.Sprintf("%s\n", pack.Message.GetPayload()) oth.MockOutputRunner.EXPECT().InChan().Return(inChan) wg.Add(1) go fileOutput.receiver(oth.MockOutputRunner, &wg) inChan <- pack close(inChan) outBatch := <-fileOutput.batchChan wg.Wait() c.Expect(string(outBatch), gs.Equals, payload) }) c.Specify("Init halts if basedirectory is not writable", func() { tmpdir := filepath.Join(os.TempDir(), "tmpdir") err := os.MkdirAll(tmpdir, 0400) c.Assume(err, gs.IsNil) config.Path = tmpdir err = fileOutput.Init(config) c.Assume(err, gs.Not(gs.IsNil)) }) c.Specify("commits to a file", func() { outStr := "Write me out to the log file" outBytes := []byte(outStr) c.Specify("with default settings", func() { err := fileOutput.Init(config) defer os.Remove(tmpFilePath) c.Assume(err, gs.IsNil) // Start committer loop wg.Add(1) go fileOutput.committer(oth.MockOutputRunner, &wg) // Feed and close the batchChan go func() { fileOutput.batchChan <- outBytes _ = <-fileOutput.backChan // clear backChan to prevent blocking close(fileOutput.batchChan) }() wg.Wait() // Wait for the file close operation to happen. //for ; err == nil; _, err = fileOutput.file.Stat() { //} tmpFile, err := os.Open(tmpFilePath) defer tmpFile.Close() c.Assume(err, gs.IsNil) contents, err := ioutil.ReadAll(tmpFile) c.Assume(err, gs.IsNil) c.Expect(string(contents), gs.Equals, outStr) }) c.Specify("with different Perm settings", func() { config.Perm = "600" err := fileOutput.Init(config) defer os.Remove(tmpFilePath) c.Assume(err, gs.IsNil) // Start committer loop wg.Add(1) go fileOutput.committer(oth.MockOutputRunner, &wg) // Feed and close the batchChan go func() { fileOutput.batchChan <- outBytes _ = <-fileOutput.backChan // clear backChan to prevent blocking close(fileOutput.batchChan) }() wg.Wait() // Wait for the file close operation to happen. //for ; err == nil; _, err = fileOutput.file.Stat() { //} tmpFile, err := os.Open(tmpFilePath) defer tmpFile.Close() c.Assume(err, gs.IsNil) fileInfo, err := tmpFile.Stat() c.Assume(err, gs.IsNil) fileMode := fileInfo.Mode() if runtime.GOOS == "windows" { c.Expect(fileMode.String(), ts.StringContains, "-rw-rw-rw-") } else { // 7 consecutive dashes implies no perms for group or other c.Expect(fileMode.String(), ts.StringContains, "-------") } }) }) }) c.Specify("A TcpOutput", func() { tcpOutput := new(TcpOutput) config := tcpOutput.ConfigStruct().(*TcpOutputConfig) tcpOutput.connection = ts.NewMockConn(ctrl) msg := getTestMessage() pack := NewPipelinePack(pConfig.inputRecycleChan) pack.Message = msg pack.Decoded = true c.Specify("correctly formats protocol buffer stream output", func() { outBytes := make([]byte, 0, 200) err := createProtobufStream(pack, &outBytes) c.Expect(err, gs.IsNil) b := []byte{30, 2, 8, uint8(proto.Size(pack.Message)), 31, 10, 16} // sanity check the header and the start of the protocol buffer c.Expect(bytes.Equal(b, (outBytes)[:len(b)]), gs.IsTrue) }) c.Specify("writes out to the network", func() { inChanCall := oth.MockOutputRunner.EXPECT().InChan().AnyTimes() inChanCall.Return(inChan) collectData := func(ch chan string) { ln, err := net.Listen("tcp", "localhost:9125") if err != nil { ch <- err.Error() } ch <- "ready" conn, err := ln.Accept() if err != nil { ch <- err.Error() } b := make([]byte, 1000) n, _ := conn.Read(b) ch <- string(b[0:n]) } ch := make(chan string, 1) // don't block on put go collectData(ch) result := <-ch // wait for server err := tcpOutput.Init(config) c.Assume(err, gs.IsNil) outStr := "Write me out to the network" pack.Message.SetPayload(outStr) go func() { wg.Add(1) tcpOutput.Run(oth.MockOutputRunner, oth.MockHelper) wg.Done() }() inChan <- pack close(inChan) wg.Wait() // wait for close to finish, prevents intermittent test failures matchBytes := make([]byte, 0, 1000) err = createProtobufStream(pack, &matchBytes) c.Expect(err, gs.IsNil) result = <-ch c.Expect(result, gs.Equals, string(matchBytes)) }) }) c.Specify("Runner restarts a plugin on the first time only", func() { pc := new(PipelineConfig) var pluginGlobals PluginGlobals pluginGlobals.Retries = RetryOptions{ MaxDelay: "1us", Delay: "1us", MaxJitter: "1us", MaxRetries: 1, } pw := &PluginWrapper{ name: "stoppingOutput", configCreator: func() interface{} { return nil }, pluginCreator: func() interface{} { return new(StoppingOutput) }, } output := new(StoppingOutput) pc.outputWrappers = make(map[string]*PluginWrapper) pc.outputWrappers["stoppingOutput"] = pw oRunner := NewFORunner("stoppingOutput", output, &pluginGlobals) var wg sync.WaitGroup cfgCall := oth.MockHelper.EXPECT().PipelineConfig() cfgCall.Return(pc) wg.Add(1) oRunner.Start(oth.MockHelper, &wg) // no panic => success wg.Wait() c.Expect(stopoutputTimes, gs.Equals, 2) }) c.Specify("Runner restarts plugin and resumes feeding it", func() { pc := new(PipelineConfig) var pluginGlobals PluginGlobals pluginGlobals.Retries = RetryOptions{ MaxDelay: "1us", Delay: "1us", MaxJitter: "1us", MaxRetries: 4, } pw := &PluginWrapper{ name: "stoppingresumeOutput", configCreator: func() interface{} { return nil }, pluginCreator: func() interface{} { return new(StopResumeOutput) }, } output := new(StopResumeOutput) pc.outputWrappers = make(map[string]*PluginWrapper) pc.outputWrappers["stoppingresumeOutput"] = pw oRunner := NewFORunner("stoppingresumeOutput", output, &pluginGlobals) var wg sync.WaitGroup cfgCall := oth.MockHelper.EXPECT().PipelineConfig() cfgCall.Return(pc) wg.Add(1) oRunner.Start(oth.MockHelper, &wg) // no panic => success wg.Wait() c.Expect(stopresumerunTimes, gs.Equals, 3) c.Expect(len(stopresumeHolder), gs.Equals, 2) c.Expect(stopresumeHolder[1], gs.Equals, "woot") c.Expect(oRunner.retainPack, gs.IsNil) }) }
func KeenOutputSpec(c gs.Context) { ctrl := gomock.NewController(universalT) defer ctrl.Finish() c.Specify("A KeenOutput", func() { successTests := []SuccessfulTestCase{ SuccessfulTestCase{ "successfully records a valid job-finished message", "{\"JobType\":\"nyc_aris\",\"SystemId\":\"1234567890abcdefghijklmn\",\"TimeCreated\":\"2014-07-03T23:35:24.000Z\",\"Duration\":38900,\"Success\":true,\"Message\":\"\"}", func(eventData map[string]interface{}) bool { return eventData["JobType"] == "nyc_aris" && eventData["SystemId"] == "1234567890abcdefghijklmn" && eventData["TimeCreated"] == "2014-07-03T23:35:24.000Z" && eventData["Duration"] == float64(38900) && eventData["Success"] == true && eventData["Message"] == "" }, }, } errorTests := []ErrorTestCase{ ErrorTestCase{ "logs an error but does not crash when the message payload is not valid JSON", "not json", "*json.SyntaxError", "invalid character 'o' in literal null (expecting 'u')", }, } for _, test := range successTests { oth := NewOutputTestHelper(universalT, ctrl) output := new(KeenOutput) output.Init(&KeenOutputConfig{Collection: "job-finished"}) mockClient := MockKeenClient{mock.Mock{}} output.client = &mockClient inChan := make(chan *pipeline.PipelinePack, 1) oth.MockOutputRunner.EXPECT().On("InChan").Return(inChan) mockClient.EXPECT().On("AddEvent", "job-finished", mock.Anything).Return(nil) pack := getEmptyKeenOutputPack() pack.Message.SetPayload(test.MessagePayload) inChan <- pack close(inChan) output.Run(oth.MockOutputRunner, oth.MockHelper) ExpectCall(universalT, &mockClient.mock, "AddEvent with expected JSON", "AddEvent", func(args []interface{}) bool { if len(args) != 2 { return false } eventData, ok := args[1].(map[string]interface{}) return ok && test.IsEventDataCorrect(eventData) }) } for _, test := range errorTests { oth := NewOutputTestHelper(universalT, ctrl) output := new(KeenOutput) output.Init(&KeenOutputConfig{}) mockClient := MockKeenClient{mock.Mock{}} output.client = &mockClient inChan := make(chan *pipeline.PipelinePack, 1) oth.MockOutputRunner.EXPECT().On("InChan").Return(inChan) oth.MockOutputRunner.EXPECT().On("LogError", mock.AnythingOfType(test.ExpectedErrorType)).Return() pack := getEmptyKeenOutputPack() pack.Message.SetPayload(test.MessagePayload) inChan <- pack close(inChan) output.Run(oth.MockOutputRunner, oth.MockHelper) ExpectCall(universalT, &oth.MockOutputRunner.mock, "Log correct error", "LogError", func(args []interface{}) bool { if len(args) != 1 { return false } err, ok := args[0].(error) return ok && err.Error() == test.ExpectedErrorMessage }) oth.MockOutputRunner.EXPECT().AssertExpectations(universalT) } }) }
func FilterSpec(c gs.Context) { t := new(ts.SimpleT) ctrl := gomock.NewController(t) defer ctrl.Finish() fth := NewFilterTestHelper(ctrl) inChan := make(chan *pipeline.PipelinePack, 1) pConfig := pipeline.NewPipelineConfig(nil) c.Specify("A SandboxFilter", func() { sbFilter := new(SandboxFilter) config := sbFilter.ConfigStruct().(*sandbox.SandboxConfig) config.ScriptType = "lua" config.MemoryLimit = 32000 config.InstructionLimit = 1000 config.OutputLimit = 1024 msg := getTestMessage() pack := pipeline.NewPipelinePack(pConfig.InjectRecycleChan()) pack.Message = msg pack.Decoded = true c.Specify("Over inject messages from ProcessMessage", func() { var timer <-chan time.Time fth.MockFilterRunner.EXPECT().Ticker().Return(timer) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) fth.MockFilterRunner.EXPECT().Name().Return("processinject").Times(3) fth.MockFilterRunner.EXPECT().Inject(pack).Return(true).Times(2) fth.MockHelper.EXPECT().PipelineConfig().Return(pConfig) fth.MockHelper.EXPECT().PipelinePack(uint(0)).Return(pack).Times(2) fth.MockFilterRunner.EXPECT().LogError(fmt.Errorf("exceeded InjectMessage count")) config.ScriptFilename = "../lua/testsupport/processinject.lua" err := sbFilter.Init(config) c.Assume(err, gs.IsNil) inChan <- pack close(inChan) sbFilter.Run(fth.MockFilterRunner, fth.MockHelper) }) c.Specify("Over inject messages from TimerEvent", func() { var timer <-chan time.Time timer = time.Tick(time.Duration(1) * time.Millisecond) fth.MockFilterRunner.EXPECT().Ticker().Return(timer) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) fth.MockFilterRunner.EXPECT().Name().Return("timerinject").Times(12) fth.MockFilterRunner.EXPECT().Inject(pack).Return(true).Times(11) fth.MockHelper.EXPECT().PipelineConfig().Return(pConfig) fth.MockHelper.EXPECT().PipelinePack(uint(0)).Return(pack).Times(11) fth.MockFilterRunner.EXPECT().LogError(fmt.Errorf("exceeded InjectMessage count")) config.ScriptFilename = "../lua/testsupport/timerinject.lua" err := sbFilter.Init(config) c.Assume(err, gs.IsNil) go func() { time.Sleep(time.Duration(250) * time.Millisecond) close(inChan) }() sbFilter.Run(fth.MockFilterRunner, fth.MockHelper) }) c.Specify("Preserves data", func() { var timer <-chan time.Time fth.MockFilterRunner.EXPECT().Ticker().Return(timer) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) config.ScriptFilename = "../lua/testsupport/serialize.lua" config.PreserveData = true sbFilter.SetName("serialize") err := sbFilter.Init(config) c.Assume(err, gs.IsNil) close(inChan) sbFilter.Run(fth.MockFilterRunner, fth.MockHelper) _, err = os.Stat("sandbox_preservation/serialize.data") c.Expect(err, gs.IsNil) err = os.Remove("sandbox_preservation/serialize.data") c.Expect(err, gs.IsNil) }) }) c.Specify("A SandboxManagerFilter", func() { sbmFilter := new(SandboxManagerFilter) config := sbmFilter.ConfigStruct().(*SandboxManagerFilterConfig) config.MaxFilters = 1 origBaseDir := pipeline.Globals().BaseDir pipeline.Globals().BaseDir = os.TempDir() sbxMgrsDir := filepath.Join(pipeline.Globals().BaseDir, "sbxmgrs") defer func() { pipeline.Globals().BaseDir = origBaseDir tmpErr := os.RemoveAll(sbxMgrsDir) c.Expect(tmpErr, gs.IsNil) }() msg := getTestMessage() pack := pipeline.NewPipelinePack(pConfig.InputRecycleChan()) pack.Message = msg pack.Decoded = true c.Specify("Control message in the past", func() { sbmFilter.Init(config) pack.Message.SetTimestamp(time.Now().UnixNano() - 5e9) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) fth.MockFilterRunner.EXPECT().Name().Return("SandboxManagerFilter") fth.MockFilterRunner.EXPECT().LogError(fmt.Errorf("Discarded control message: 5 seconds skew")) inChan <- pack close(inChan) sbmFilter.Run(fth.MockFilterRunner, fth.MockHelper) }) c.Specify("Control message in the future", func() { sbmFilter.Init(config) pack.Message.SetTimestamp(time.Now().UnixNano() + 5.9e9) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) fth.MockFilterRunner.EXPECT().Name().Return("SandboxManagerFilter") fth.MockFilterRunner.EXPECT().LogError(fmt.Errorf("Discarded control message: -5 seconds skew")) inChan <- pack close(inChan) sbmFilter.Run(fth.MockFilterRunner, fth.MockHelper) }) c.Specify("Generates the right default working directory", func() { sbmFilter.Init(config) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) name := "SandboxManagerFilter" fth.MockFilterRunner.EXPECT().Name().Return(name) close(inChan) sbmFilter.Run(fth.MockFilterRunner, fth.MockHelper) c.Expect(sbmFilter.workingDirectory, gs.Equals, sbxMgrsDir) _, err := os.Stat(sbxMgrsDir) c.Expect(err, gs.IsNil) }) c.Specify("Sanity check the default sandbox configuration limits", func() { sbmFilter.Init(config) c.Expect(sbmFilter.memoryLimit, gs.Equals, uint(8*1024*1024)) c.Expect(sbmFilter.instructionLimit, gs.Equals, uint(1e6)) c.Expect(sbmFilter.outputLimit, gs.Equals, uint(63*1024)) }) c.Specify("Sanity check the user specified sandbox configuration limits", func() { config.MemoryLimit = 123456 config.InstructionLimit = 4321 config.OutputLimit = 8765 sbmFilter.Init(config) c.Expect(sbmFilter.memoryLimit, gs.Equals, config.MemoryLimit) c.Expect(sbmFilter.instructionLimit, gs.Equals, config.InstructionLimit) c.Expect(sbmFilter.outputLimit, gs.Equals, config.OutputLimit) }) }) }
func ReportSpec(c gs.Context) { t := new(ts.SimpleT) ctrl := gomock.NewController(t) defer ctrl.Finish() pConfig := NewPipelineConfig(nil) chanSize := pConfig.Globals.PluginChanSize checkForFields := func(c gs.Context, msg *message.Message) { f0Val, ok := msg.GetFieldValue(f0.GetName()) c.Expect(ok, gs.IsTrue) c.Expect(f0Val.(int64), gs.Equals, f0.GetValue().(int64)) f1Val, ok := msg.GetFieldValue(f1.GetName()) c.Expect(ok, gs.IsTrue) c.Expect(f1Val.(string), gs.Equals, f1.GetValue().(string)) } hasChannelData := func(msg *message.Message) (ok bool) { capVal, _ := msg.GetFieldValue("InChanCapacity") lenVal, _ := msg.GetFieldValue("InChanLength") var i int64 if i, ok = capVal.(int64); !ok { return } if ok = (i == int64(chanSize)); !ok { return } if i, ok = lenVal.(int64); !ok { return } ok = (i == int64(0)) return } fName := "counter" filter := new(CounterFilter) fRunner := NewFORunner(fName, filter, nil, chanSize) var err error fRunner.matcher, err = NewMatchRunner("Type == ''", "", fRunner, chanSize) c.Assume(err, gs.IsNil) fRunner.matcher.inChan = make(chan *PipelinePack, chanSize) leakCount := 10 fRunner.SetLeakCount(leakCount) iName := "stat_accum" input := new(StatAccumInput) iRunner := NewInputRunner(iName, input, nil, false) c.Specify("`PopulateReportMsg`", func() { msg := ts.GetTestMessage() c.Specify("w/ a filter", func() { err := PopulateReportMsg(fRunner, msg) c.Assume(err, gs.IsNil) c.Specify("invokes `ReportMsg` on the filter", func() { checkForFields(c, msg) }) c.Specify("adds the channel data", func() { c.Expect(hasChannelData(msg), gs.IsTrue) }) c.Specify("has its leak count set properly", func() { leakVal, ok := msg.GetFieldValue("LeakCount") c.Assume(ok, gs.IsTrue) i, ok := leakVal.(int64) c.Assume(ok, gs.IsTrue) c.Expect(int(i), gs.Equals, leakCount) }) }) c.Specify("w/ an input", func() { err := PopulateReportMsg(iRunner, msg) c.Assume(err, gs.IsNil) c.Specify("invokes `ReportMsg` on the input", func() { checkForFields(c, msg) }) c.Specify("doesn't add any channel data", func() { capVal, ok := msg.GetFieldValue("InChanCapacity") c.Expect(capVal, gs.IsNil) c.Expect(ok, gs.IsFalse) lenVal, ok := msg.GetFieldValue("InChanLength") c.Expect(lenVal, gs.IsNil) c.Expect(ok, gs.IsFalse) }) }) }) c.Specify("PipelineConfig", func() { pc := NewPipelineConfig(nil) // Initialize all of the PipelinePacks that we'll need pc.reportRecycleChan <- NewPipelinePack(pc.reportRecycleChan) pc.FilterRunners = map[string]FilterRunner{fName: fRunner} pc.InputRunners = map[string]InputRunner{iName: iRunner} c.Specify("returns full set of accurate reports", func() { reportChan := make(chan *PipelinePack) go pc.reports(reportChan) reports := make(map[string]*PipelinePack) for r := range reportChan { iName, ok := r.Message.GetFieldValue("name") c.Expect(ok, gs.IsTrue) name, ok := iName.(string) c.Expect(ok, gs.IsTrue) c.Expect(name, gs.Not(gs.Equals), "MISSING") reports[name] = r pc.reportRecycleChan <- NewPipelinePack(pc.reportRecycleChan) } fReport := reports[fName] c.Expect(fReport, gs.Not(gs.IsNil)) checkForFields(c, fReport.Message) c.Expect(hasChannelData(fReport.Message), gs.IsTrue) iReport := reports[iName] c.Expect(iReport, gs.Not(gs.IsNil)) checkForFields(c, iReport.Message) recycleReport := reports["inputRecycleChan"] c.Expect(recycleReport, gs.Not(gs.IsNil)) capVal, ok := recycleReport.Message.GetFieldValue("InChanCapacity") c.Expect(ok, gs.IsTrue) c.Expect(capVal.(int64), gs.Equals, int64(pConfig.Globals.PoolSize)) injectReport := reports["injectRecycleChan"] c.Expect(injectReport, gs.Not(gs.IsNil)) capVal, ok = injectReport.Message.GetFieldValue("InChanCapacity") c.Expect(ok, gs.IsTrue) c.Expect(capVal.(int64), gs.Equals, int64(pConfig.Globals.PoolSize)) routerReport := reports["Router"] c.Expect(routerReport, gs.Not(gs.IsNil)) c.Expect(hasChannelData(routerReport.Message), gs.IsTrue) }) }) }
func FilehandlingSpec(c gs.Context) { here, err := os.Getwd() c.Assume(err, gs.IsNil) dirPath := filepath.Join(here, "testdir", "filehandling") c.Specify("The directory scanner", func() { c.Specify("scans a directory properly", func() { matchRegex := regexp.MustCompile(dirPath + `/subdir/.*\.log(\..*)?`) results := ScanDirectoryForLogfiles(dirPath, matchRegex) c.Expect(len(results), gs.Equals, 3) }) c.Specify("scans a directory with a bad regexp", func() { matchRegex := regexp.MustCompile(dirPath + "/subdir/.*.logg(.*)?") results := ScanDirectoryForLogfiles(dirPath, matchRegex) c.Expect(len(results), gs.Equals, 0) }) }) c.Specify("Populating logfile with match parts", func() { logfile := Logfile{} c.Specify("works without errors", func() { subexpNames := []string{"MonthName", "LogNumber"} matches := []string{"October", "24"} translation := make(SubmatchTranslationMap) logfile.PopulateMatchParts(subexpNames, matches, translation) c.Expect(logfile.MatchParts["MonthName"], gs.Equals, 10) c.Expect(logfile.MatchParts["LogNumber"], gs.Equals, 24) }) c.Specify("works with bad month name", func() { subexpNames := []string{"MonthName", "LogNumber"} matches := []string{"Octoberrr", "24"} translation := make(SubmatchTranslationMap) err := logfile.PopulateMatchParts(subexpNames, matches, translation) c.Assume(err, gs.Not(gs.IsNil)) c.Expect(err.Error(), gs.Equals, "Unable to locate month name: Octoberrr") }) c.Specify("works with missing value in submatch translation map", func() { subexpNames := []string{"MonthName", "LogNumber"} matches := []string{"October", "24"} translation := make(SubmatchTranslationMap) translation["LogNumber"] = make(MatchTranslationMap) translation["LogNumber"]["23"] = 22 translation["LogNumber"]["999"] = 999 // Non-"missing" submatches must be len > 1. err := logfile.PopulateMatchParts(subexpNames, matches, translation) c.Assume(err, gs.Not(gs.IsNil)) c.Expect(err.Error(), gs.Equals, "Value '24' not found in translation map 'LogNumber'.") }) c.Specify("works with custom value in submatch translation map", func() { subexpNames := []string{"MonthName", "LogNumber"} matches := []string{"October", "24"} translation := make(SubmatchTranslationMap) translation["LogNumber"] = make(MatchTranslationMap) translation["LogNumber"]["24"] = 2 translation["LogNumber"]["999"] = 999 // Non-"missing" submatches must be len > 1. logfile.PopulateMatchParts(subexpNames, matches, translation) c.Expect(logfile.MatchParts["MonthName"], gs.Equals, 10) c.Expect(logfile.MatchParts["LogNumber"], gs.Equals, 2) }) }) c.Specify("Populating logfiles with match parts", func() { translation := make(SubmatchTranslationMap) matchRegex := regexp.MustCompile(dirPath + `/subdir/.*\.log\.?(?P<FileNumber>.*)?`) logfiles := ScanDirectoryForLogfiles(dirPath, matchRegex) c.Specify("is populated", func() { logfiles.PopulateMatchParts(matchRegex, translation) c.Expect(len(logfiles), gs.Equals, 3) c.Expect(logfiles[0].MatchParts["FileNumber"], gs.Equals, -1) c.Expect(logfiles[1].MatchParts["FileNumber"], gs.Equals, 1) c.Expect(logfiles[1].StringMatchParts["FileNumber"], gs.Equals, "1") }) c.Specify("returns errors", func() { translation["FileNumber"] = make(MatchTranslationMap) translation["FileNumber"]["23"] = 22 translation["FileNumber"]["999"] = 999 // Non-"missing" submatches must be len > 1. err := logfiles.PopulateMatchParts(matchRegex, translation) c.Assume(err, gs.Not(gs.IsNil)) c.Expect(len(logfiles), gs.Equals, 3) }) }) c.Specify("Sorting logfiles", func() { translation := make(SubmatchTranslationMap) matchRegex := regexp.MustCompile(dirPath + `/subdir/.*\.log\.?(?P<FileNumber>.*)?`) logfiles := ScanDirectoryForLogfiles(dirPath, matchRegex) c.Specify("with no 'missing' translation value", func() { err := logfiles.PopulateMatchParts(matchRegex, translation) c.Assume(err, gs.IsNil) c.Expect(len(logfiles), gs.Equals, 3) c.Specify("can be sorted newest to oldest", func() { byp := ByPriority{Logfiles: logfiles, Priority: []string{"FileNumber"}} sort.Sort(byp) c.Expect(logfiles[0].MatchParts["FileNumber"], gs.Equals, -1) c.Expect(logfiles[1].MatchParts["FileNumber"], gs.Equals, 1) }) c.Specify("can be sorted oldest to newest", func() { byp := ByPriority{Logfiles: logfiles, Priority: []string{"^FileNumber"}} sort.Sort(byp) c.Expect(logfiles[0].MatchParts["FileNumber"], gs.Equals, 2) c.Expect(logfiles[1].MatchParts["FileNumber"], gs.Equals, 1) }) }) c.Specify("with 'missing' translation value", func() { translation["FileNumber"] = make(MatchTranslationMap) translation["FileNumber"]["missing"] = 5 err := logfiles.PopulateMatchParts(matchRegex, translation) c.Assume(err, gs.IsNil) c.Expect(len(logfiles), gs.Equals, 3) c.Specify("honors 'missing' translation value", func() { byp := ByPriority{Logfiles: logfiles, Priority: []string{"FileNumber"}} sort.Sort(byp) c.Expect(logfiles[0].MatchParts["FileNumber"], gs.Equals, 1) c.Expect(logfiles[1].MatchParts["FileNumber"], gs.Equals, 2) c.Expect(logfiles[2].MatchParts["FileNumber"], gs.Equals, 5) }) }) }) c.Specify("Sorting out a directory of access/error logs", func() { translation := make(SubmatchTranslationMap) matchRegex := regexp.MustCompile(dirPath + `/(?P<Year>\d+)/(?P<Month>\d+)/(?P<Type>\w+)\.log(\.(?P<Seq>\d+))?`) logfiles := ScanDirectoryForLogfiles(dirPath, matchRegex) err := logfiles.PopulateMatchParts(matchRegex, translation) c.Assume(err, gs.IsNil) c.Expect(len(logfiles), gs.Equals, 52) c.Specify("can result in multiple logfile streams", func() { mfs := FilterMultipleStreamFiles(logfiles, []string{"Type"}) c.Expect(len(mfs), gs.Equals, 2) access, ok := mfs["access"] c.Assume(ok, gs.IsTrue) c.Expect(len(access), gs.Equals, 26) error, ok := mfs["error"] c.Assume(ok, gs.IsTrue) c.Expect(len(error), gs.Equals, 26) c.Specify("can be individually sorted properly by access", func() { byp := ByPriority{Logfiles: mfs["access"], Priority: []string{"Year", "Month", "^Seq"}} sort.Sort(byp) lf := mfs["access"] c.Expect(lf[0].FileName, gs.Equals, dirPath+"/2010/05/access.log.3") c.Expect(lf[len(lf)-1].FileName, gs.Equals, dirPath+"/2013/08/access.log") }) c.Specify("can be individually sorted properly by error", func() { byp := ByPriority{Logfiles: mfs["error"], Priority: []string{"Year", "Month", "^Seq"}} sort.Sort(byp) lf := mfs["error"] c.Expect(lf[0].FileName, gs.Equals, dirPath+"/2010/07/error.log.2") c.Expect(lf[len(lf)-1].FileName, gs.Equals, dirPath+"/2013/08/error.log") }) }) c.Specify("Can result in multiple logfile streams with a prefix", func() { mfs := FilterMultipleStreamFiles(logfiles, []string{"website-", "Type"}) c.Expect(len(mfs), gs.Equals, 2) access, ok := mfs["website-access"] c.Assume(ok, gs.IsTrue) c.Expect(len(access), gs.Equals, 26) error, ok := mfs["website-error"] c.Assume(ok, gs.IsTrue) c.Expect(len(error), gs.Equals, 26) }) }) }
func FilePollingInputSpec(c gs.Context) { t := new(pipeline_ts.SimpleT) ctrl := gomock.NewController(t) tmpFileName := fmt.Sprintf("filepollinginput-test-%d", time.Now().UnixNano()) tmpFilePath := filepath.Join(os.TempDir(), tmpFileName) defer func() { ctrl.Finish() os.Remove(tmpFilePath) }() pConfig := NewPipelineConfig(nil) var wg sync.WaitGroup errChan := make(chan error, 1) bytesChan := make(chan []byte, 1) tickChan := make(chan time.Time) retPackChan := make(chan *PipelinePack, 2) defer close(retPackChan) c.Specify("A FilePollingInput", func() { input := new(FilePollingInput) ith := new(plugins_ts.InputTestHelper) ith.MockHelper = pipelinemock.NewMockPluginHelper(ctrl) ith.MockInputRunner = pipelinemock.NewMockInputRunner(ctrl) ith.MockSplitterRunner = pipelinemock.NewMockSplitterRunner(ctrl) config := input.ConfigStruct().(*FilePollingInputConfig) config.FilePath = tmpFilePath startInput := func(msgCount int) { wg.Add(1) go func() { errChan <- input.Run(ith.MockInputRunner, ith.MockHelper) wg.Done() }() } ith.MockInputRunner.EXPECT().Ticker().Return(tickChan) ith.MockHelper.EXPECT().PipelineConfig().Return(pConfig) c.Specify("gets updated information when reading a file", func() { err := input.Init(config) c.Assume(err, gs.IsNil) ith.MockInputRunner.EXPECT().NewSplitterRunner("").Return(ith.MockSplitterRunner) ith.MockSplitterRunner.EXPECT().UseMsgBytes().Return(false) ith.MockSplitterRunner.EXPECT().SetPackDecorator(gomock.Any()) splitCall := ith.MockSplitterRunner.EXPECT().SplitStream(gomock.Any(), nil).Return(io.EOF).Times(2) splitCall.Do(func(f *os.File, del Deliverer) { fBytes, err := ioutil.ReadAll(f) if err != nil { fBytes = []byte(err.Error()) } bytesChan <- fBytes }) startInput(2) f, err := os.Create(tmpFilePath) c.Expect(err, gs.IsNil) _, err = f.Write([]byte("test1")) c.Expect(err, gs.IsNil) c.Expect(f.Close(), gs.IsNil) tickChan <- time.Now() msgBytes := <-bytesChan c.Expect(string(msgBytes), gs.Equals, "test1") f, err = os.Create(tmpFilePath) c.Expect(err, gs.IsNil) _, err = f.Write([]byte("test2")) c.Expect(err, gs.IsNil) c.Expect(f.Close(), gs.IsNil) tickChan <- time.Now() msgBytes = <-bytesChan c.Expect(string(msgBytes), gs.Equals, "test2") input.Stop() wg.Wait() c.Expect(<-errChan, gs.IsNil) }) }) }