func ProtobufDecoderSpec(c gospec.Context) { t := &ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() msg := ts.GetTestMessage() config := NewPipelineConfig(nil) // Initializes globals. c.Specify("A ProtobufDecoder", func() { encoded, err := proto.Marshal(msg) c.Assume(err, gs.IsNil) pack := NewPipelinePack(config.inputRecycleChan) decoder := new(ProtobufDecoder) decoder.sampleDenominator = 1000 // Since we don't call decoder.Init(). c.Specify("decodes a protobuf message", func() { pack.MsgBytes = encoded _, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(pack.Message, gs.Equals, msg) v, ok := pack.Message.GetFieldValue("foo") c.Expect(ok, gs.IsTrue) c.Expect(v, gs.Equals, "bar") }) c.Specify("returns an error for bunk encoding", func() { bunk := append([]byte{0, 0, 0}, encoded...) pack.MsgBytes = bunk _, err := decoder.Decode(pack) c.Expect(err, gs.Not(gs.IsNil)) }) }) }
func TestRun(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mockOR := pipelinemock.NewMockOutputRunner(mockCtrl) mockPH := pipelinemock.NewMockPluginHelper(mockCtrl) mockFirehose := NewMockRecordPutter(mockCtrl) firehoseOutput := FirehoseOutput{ client: mockFirehose, } testChan := make(chan *pipeline.PipelinePack) mockOR.EXPECT().InChan().Return(testChan) // Send test input through the channel input := `{"key":"value"}` go func() { testPack := pipeline.PipelinePack{ Message: &message.Message{ Payload: &input, }, } testChan <- &testPack close(testChan) }() mockFirehose.EXPECT().PutRecord([]byte(input)).Return(nil) err := firehoseOutput.Run(mockOR, mockPH) assert.NoError(t, err, "did not expect err for valid Run()") }
func StatsdInputSpec(c gs.Context) { t := &pipeline_ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() pConfig := NewPipelineConfig(nil) ith := new(plugins_ts.InputTestHelper) ith.Msg = pipeline_ts.GetTestMessage() ith.Pack = NewPipelinePack(pConfig.InputRecycleChan()) ith.PackSupply = make(chan *PipelinePack, 1) // Specify localhost, but we're not really going to use the network ith.AddrStr = "localhost:55565" ith.ResolvedAddrStr = "127.0.0.1:55565" // set up mock helper, input runner, and stat accumulator ith.MockHelper = NewMockPluginHelper(ctrl) ith.MockInputRunner = NewMockInputRunner(ctrl) mockStatAccum := NewMockStatAccumulator(ctrl) c.Specify("A StatsdInput", func() { statsdInput := StatsdInput{} config := statsdInput.ConfigStruct().(*StatsdInputConfig) config.Address = ith.AddrStr err := statsdInput.Init(config) c.Assume(err, gs.IsNil) realListener := statsdInput.listener c.Expect(realListener.LocalAddr().String(), gs.Equals, ith.ResolvedAddrStr) realListener.Close() mockListener := pipeline_ts.NewMockConn(ctrl) statsdInput.listener = mockListener ith.MockHelper.EXPECT().StatAccumulator("StatAccumInput").Return(mockStatAccum, nil) mockListener.EXPECT().Close() mockListener.EXPECT().SetReadDeadline(gomock.Any()) c.Specify("sends a Stat to the StatAccumulator", func() { statName := "sample.count" statVal := 303 msg := fmt.Sprintf("%s:%d|c\n", statName, statVal) expected := Stat{statName, strconv.Itoa(statVal), "c", float32(1)} mockStatAccum.EXPECT().DropStat(expected).Return(true) readCall := mockListener.EXPECT().Read(make([]byte, 512)) readCall.Return(len(msg), nil) readCall.Do(func(msgBytes []byte) { copy(msgBytes, []byte(msg)) statsdInput.Stop() }) var wg sync.WaitGroup wg.Add(1) go func() { err = statsdInput.Run(ith.MockInputRunner, ith.MockHelper) c.Expect(err, gs.IsNil) wg.Done() }() wg.Wait() }) }) }
func createFixtures(t *testing.T) (reporter *ErrorReporter, ctrl *gomock.Controller) { // reporter acts as a testing.T-like object that we pass to the // Controller. We use it to test that the mock considered tests // successful or failed. reporter = NewErrorReporter(t) ctrl = gomock.NewController(reporter) return }
func TestPanicOverridesExpectationChecks(t *testing.T) { ctrl := gomock.NewController(t) reporter := NewErrorReporter(t) reporter.assertFatal(func() { ctrl.RecordCall(new(Subject), "FooMethod", "1") defer ctrl.Finish() reporter.Fatalf("Intentional panic") }) }
func TestCreateEndpoint(t *testing.T) { useMockFuncs() defer useStdFuncs() mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mckEndHandler := NewMockHandler(mockCtrl) Convey("Should allocate update endpoints", t, func() { app := NewApplication() app.endpointTemplate = testEndpointTemplate Convey("Should not encrypt endpoints without a key", func() { app.SetTokenKey("") app.SetEndpointHandler(mckEndHandler) mckEndHandler.EXPECT().URL().Return("https://example.com") endpoint, err := app.CreateEndpoint("123") So(err, ShouldBeNil) So(endpoint, ShouldEqual, "https://example.com/123") }) Convey("Should encrypt endpoints with a key", func() { app.SetTokenKey("HVozKz_n-DPopP5W877DpRKQOW_dylVf") app.SetEndpointHandler(mckEndHandler) mckEndHandler.EXPECT().URL().Return("https://example.com") endpoint, err := app.CreateEndpoint("456") So(err, ShouldBeNil) So(endpoint, ShouldEqual, "https://example.com/AAAAAAAAAAAAAAAAAAAAAGMKig==") }) Convey("Should reject invalid keys", func() { app.SetTokenKey("lLyhlLk8qus1ky4ER8yjN5o=") app.SetEndpointHandler(mckEndHandler) _, err := app.CreateEndpoint("123") So(err, ShouldEqual, aes.KeySizeError(17)) }) Convey("Should return a relative URL without an update handler", func() { app.SetTokenKey("O03rpLsdafhIhJEjEJt-CgVHyqHI650oy0pZZvplKDc=") endpoint, err := app.CreateEndpoint("789") So(err, ShouldBeNil) So(endpoint, ShouldEqual, "/AAAAAAAAAAAAAAAAAAAAAPfdsA==") }) }) }
func GeoIpDecoderSpec(c gs.Context) { t := &ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() pConfig := NewPipelineConfig(nil) pConfig.Globals.ShareDir = "/foo/bar/baz" c.Specify("A GeoIpDecoder", func() { decoder := new(GeoIpDecoder) decoder.SetPipelineConfig(pConfig) rec := new(geoip.GeoIPRecord) conf := decoder.ConfigStruct().(*GeoIpDecoderConfig) c.Expect(conf.DatabaseFile, gs.Equals, "/foo/bar/baz/GeoLiteCity.dat") supply := make(chan *PipelinePack, 1) pack := NewPipelinePack(supply) nf, _ := message.NewField("remote_host", "74.125.142.147", "") pack.Message.AddField(nf) decoder.SourceIpField = "remote_host" conf.SourceIpField = "remote_host" decoder.Init(conf) rec.CountryCode = "US" rec.CountryCode3 = "USA" rec.CountryName = "United States" rec.Region = "CA" rec.City = "Mountain View" rec.PostalCode = "94043" rec.Latitude = 37.4192 rec.Longitude = -122.0574 rec.AreaCode = 650 rec.CharSet = 1 rec.ContinentCode = "NA" c.Specify("Test GeoIpDecoder Output", func() { buf := decoder.GeoBuff(rec) nf, _ = message.NewField("geoip", buf.Bytes(), "") pack.Message.AddField(nf) b, ok := pack.Message.GetFieldValue("geoip") c.Expect(ok, gs.IsTrue) c.Expect(string(b.([]byte)), gs.Equals, `{"latitude":37.4192008972168,"longitude":-122.0574035644531,"location":[-122.0574035644531,37.4192008972168],"coordinates":["-122.0574035644531","37.4192008972168"],"countrycode":"US","countrycode3":"USA","countryname":"United States","region":"CA","city":"Mountain View","postalcode":"94043","areacode":650,"charset":1,"continentcode":"NA"}`) }) }) }
func TestSocketListenConfig(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mckLogger := NewMockLogger(mockCtrl) mckLogger.EXPECT().ShouldLog(gomock.Any()).Return(true).AnyTimes() mckLogger.EXPECT().Log(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes() mckStat := NewMockStatistician(mockCtrl) mckListenerConfig := NewMockListenerConfig(mockCtrl) app := NewApplication() app.hostname = "example.org" app.SetLogger(mckLogger) app.SetMetrics(mckStat) sh := NewSocketHandler() sh.setApp(app) // Should forward Listen errors. listenErr := errors.New("splines not reticulated") mckListenerConfig.EXPECT().Listen().Return(nil, listenErr) if err := sh.listenWithConfig(mckListenerConfig); err != listenErr { t.Errorf("Wrong error: got %#v; want %#v", err, listenErr) } // Should use the wss:// scheme if UseTLS returns true. ml := newMockListener(netAddr{"test", "[::1]:8080"}) gomock.InOrder( mckListenerConfig.EXPECT().Listen().Return(ml, nil), mckListenerConfig.EXPECT().UseTLS().Return(true), mckListenerConfig.EXPECT().GetMaxConns().Return(1), ) if err := sh.listenWithConfig(mckListenerConfig); err != nil { t.Errorf("Error setting listener: %s", err) } if maxConns := sh.MaxConns(); maxConns != 1 { t.Errorf("Mismatched maximum connection count: got %d; want 1", maxConns) } expectedURL := "wss://example.org:8080" if url := sh.URL(); url != expectedURL { t.Errorf("Mismatched handler URL: got %q; want %q", url, expectedURL) } }
// A more thorough test of notMatcher func TestNotMatcher(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() mockMatcher := mock_matcher.NewMockMatcher(ctrl) notMatcher := gomock.Not(mockMatcher) mockMatcher.EXPECT().Matches(4).Return(true) if match := notMatcher.Matches(4); match { t.Errorf("notMatcher should not match 4") } mockMatcher.EXPECT().Matches(5).Return(false) if match := notMatcher.Matches(5); !match { t.Errorf("notMatcher should match 5") } }
// decodeMessageAndVerifyOutput takes a decoder conf, message payload, and a fn -> the fn is a number of // assertions to verify that the message after decoding is as expected. func decodeMessageAndVerifyOutput(c gs.Context, conf *JsonDecoderConfig, payload string, fn packVerifier) { t := &pipeline_ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() // 1. Initialize test decoder decoder := new(JsonDecoder) err := decoder.Init(conf) c.Assume(err, gs.IsNil) dRunner := pipelinemock.NewMockDecoderRunner(ctrl) decoder.SetDecoderRunner(dRunner) // 2. Set payload to be tested, and decode it supply := make(chan *PipelinePack, 1) pack := NewPipelinePack(supply) pack.Message.SetPayload(payload) _, err = decoder.Decode(pack) // 3. Assert outcome of decoding fn(c, pack) pack.Zero() }
func BenchmarkCreateEndpoint(b *testing.B) { mockCtrl := gomock.NewController(b) defer mockCtrl.Finish() app := NewApplication() app.endpointTemplate = testEndpointTemplate app.SetTokenKey("") mckEndHandler := NewMockHandler(mockCtrl) mckEndHandler.EXPECT().URL().Return("https://example.com").Times(b.N) app.SetEndpointHandler(mckEndHandler) b.ResetTimer() for i := 0; i < b.N; i++ { endpoint, err := app.CreateEndpoint("123") if err != nil { b.Fatalf("Error generating update endpoint: %s", err) } expected := "https://example.com/123" if endpoint != expected { b.Fatalf("Wrong endpoint: got %q; want %q", endpoint, expected) } } }
func InputRunnerSpec(c gs.Context) { t := &ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() globals := &GlobalConfigStruct{ PluginChanSize: 5, } pc := NewPipelineConfig(globals) mockHelper := NewMockPluginHelper(ctrl) c.Specify("Runner restarts a plugin on the first time only", func() { var pluginGlobals PluginGlobals pluginGlobals.Retries = RetryOptions{ MaxDelay: "1us", Delay: "1us", MaxJitter: "1us", MaxRetries: 1, } pw := NewPluginWrapper("stopping", pc) pw.ConfigCreator = func() interface{} { return nil } pw.PluginCreator = func() interface{} { return new(StoppingInput) } pc.inputWrappers["stopping"] = pw input := new(StoppingInput) iRunner := NewInputRunner("stopping", input, &pluginGlobals, false) var wg sync.WaitGroup cfgCall := mockHelper.EXPECT().PipelineConfig().Times(2) cfgCall.Return(pc) wg.Add(1) iRunner.Start(mockHelper, &wg) wg.Wait() c.Expect(stopinputTimes, gs.Equals, 2) }) }
// TestRunWithTimestamp tests that if a TimestampColumn is provided in the config // then the Heka message's timestamp gets added to the message with that column name. func TestRunWithTimestamp(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mockOR := pipelinemock.NewMockOutputRunner(mockCtrl) mockPH := pipelinemock.NewMockPluginHelper(mockCtrl) mockFirehose := NewMockRecordPutter(mockCtrl) firehoseOutput := FirehoseOutput{ client: mockFirehose, timestampColumn: "created", } testChan := make(chan *pipeline.PipelinePack) mockOR.EXPECT().InChan().Return(testChan) // Send test input through the channel input := `{}` timestamp := time.Date(2015, 07, 1, 13, 14, 15, 0, time.UTC).UnixNano() go func() { testPack := pipeline.PipelinePack{ Message: &message.Message{ Payload: &input, Timestamp: ×tamp, }, } testChan <- &testPack close(testChan) }() expected := `{"created":"2015-07-01 13:14:15.000"}` mockFirehose.EXPECT().PutRecord([]byte(expected)).Return(nil) err := firehoseOutput.Run(mockOR, mockPH) assert.NoError(t, err, "did not expect err for valid Run()") }
func ScribbleDecoderSpec(c gs.Context) { t := &pipeline_ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() c.Specify("A ScribbleDecoder", func() { decoder := new(ScribbleDecoder) config := decoder.ConfigStruct().(*ScribbleDecoderConfig) myType := "myType" myPayload := "myPayload" config.MessageFields = MessageTemplate{"Type": myType, "Payload": myPayload} supply := make(chan *PipelinePack, 1) pack := NewPipelinePack(supply) c.Specify("sets basic values correctly", func() { decoder.Init(config) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(pack.Message.GetType(), gs.Equals, myType) c.Expect(pack.Message.GetPayload(), gs.Equals, myPayload) }) }) }
func TestLocatorReadyNotify(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() uaid := "fce61180716a40ed8e79bf5ff0ba34bc" mckLogger := NewMockLogger(mockCtrl) mckLogger.EXPECT().ShouldLog(gomock.Any()).Return(true).AnyTimes() mckLogger.EXPECT().Log(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes() var ( // routerPipes maps fake peer addresses to their respective pipes. Used // by dialRouter to connect to peers. routerPipes = make(map[netAddr]*pipeListener) // contacts is a list of peer URLs for the locator. contacts []string ) // Fake listener for the sender's router, used to test self-routing. sndRouterAddr := netAddr{"tcp", "snd-router.example.com:3000"} sndRouterPipe := newPipeListener() defer sndRouterPipe.Close() routerPipes[sndRouterAddr] = sndRouterPipe contacts = append(contacts, "http://snd-router.example.com:3000") // Fake listener for the receiver's router, used to test routing updates // to different hosts. recvRouterAddr := netAddr{"tcp", "recv-router.example.com:3000"} recvRouterPipe := newPipeListener() defer recvRouterPipe.Close() routerPipes[recvRouterAddr] = recvRouterPipe contacts = append(contacts, "http://recv-router.example.com:3000") // Fake listener for the receiver's WebSocket handler, used to accept a // WebSocket client connection. socketHandlerPipe := newPipeListener() defer socketHandlerPipe.Close() // Fake locator. mckLocator := NewMockLocator(mockCtrl) mckLocator.EXPECT().Contacts(uaid).Return(contacts, nil).Times(2) // Fake dialer to connect to each peer's routing listener. dialRouter := func(network, address string) (net.Conn, error) { if pipe, ok := routerPipes[netAddr{network, address}]; ok { return pipe.Dial(network, address) } return nil, &netErr{temporary: false, timeout: false} } // Configures a fake router for the app. setRouter := func(app *Application, listener net.Listener) { r := NewBroadcastRouter() r.setApp(app) r.setClientOptions(10, 3*time.Second, 3*time.Second) // Defaults. r.setClientTransport(&http.Transport{Dial: dialRouter}) r.listenWithConfig(listenerConfig{listener: listener}) r.maxDataLen = 4096 r.server = newServeWaiter(&http.Server{Handler: r.ServeMux()}) app.SetRouter(r) } // sndApp is the server broadcasting the update. The locator returns the // addresses of the sender and receiver to test self-routing. sndApp := NewApplication() sndApp.SetLogger(mckLogger) sndStat := NewMockStatistician(mockCtrl) sndApp.SetMetrics(sndStat) sndStore := NewMockStore(mockCtrl) sndApp.SetStore(sndStore) sndApp.SetLocator(mckLocator) // Set up a fake router for the sender. setRouter(sndApp, sndRouterPipe) // recvApp is the server receiving the update. recvApp := NewApplication() recvApp.SetLogger(mckLogger) recvStat := NewMockStatistician(mockCtrl) recvApp.SetMetrics(recvStat) recvStore := NewMockStore(mockCtrl) recvApp.SetStore(recvStore) // Wrap the fake locator in a type that implements ReadyNotifier. recvLocator := newMockReadyNotifier(mckLocator) recvApp.SetLocator(recvLocator) // Set up a fake WebSocket handler for the receiver. recvSocketHandler := NewSocketHandler() recvSocketHandler.setApp(recvApp) recvSocketHandler.listenWithConfig(listenerConfig{ listener: socketHandlerPipe}) recvSocketHandler.server = newServeWaiter(&http.Server{Handler: recvSocketHandler.ServeMux()}) recvApp.SetSocketHandler(recvSocketHandler) // Set up a fake router for the receiver. setRouter(recvApp, recvRouterPipe) chid := "2b7c5c27d6224bfeaf1c158c3c57fca3" version := int64(2) data := "I'm a little teapot, short and stout." var wg sync.WaitGroup // Waits for the client to close. wg.Add(1) dialChan := make(chan bool) // Signals when the client connects. timeout := closeAfter(2 * time.Second) go func() { defer wg.Done() origin := &url.URL{Scheme: "ws", Host: "recv-conn.example.com"} ws, err := dialSocketListener(socketHandlerPipe, &websocket.Config{ Location: origin, Origin: origin, Version: websocket.ProtocolVersionHybi13, }) if err != nil { t.Errorf("Error dialing host: %s", err) return } defer ws.Close() err = websocket.JSON.Send(ws, struct { Type string `json:"messageType"` DeviceID string `json:"uaid"` ChannelIDs []string `json:"channelIDs"` }{"hello", uaid, []string{}}) if err != nil { t.Errorf("Error writing handshake request: %s", err) return } helloReply := new(HelloReply) if err = websocket.JSON.Receive(ws, helloReply); err != nil { t.Errorf("Error reading handshake reply: %s", err) return } select { case dialChan <- true: case <-timeout: t.Errorf("Timed out waiting for router") return } flushReply := new(FlushReply) if err = websocket.JSON.Receive(ws, flushReply); err != nil { t.Errorf("Error reading routed update: %s", err) return } ok := false expected := Update{chid, uint64(version), data} for _, update := range flushReply.Updates { if ok = update == expected; ok { break } } if !ok { t.Errorf("Missing update %#v in %#v", expected, flushReply.Updates) return } }() // Start the handlers. errChan := make(chan error, 3) go sndApp.Router().Start(errChan) go recvApp.SocketHandler().Start(errChan) go recvApp.Router().Start(errChan) // First and second routing attempts to self. sndStat.EXPECT().Increment("updates.routed.unknown").Times(2) // Initial routing attempt to peer. recvStat.EXPECT().Increment("updates.routed.unknown") // Client connects to peer. recvStat.EXPECT().Increment("client.socket.connect") recvStore.EXPECT().CanStore(0).Return(true) recvStat.EXPECT().Increment("updates.client.hello") recvStore.EXPECT().FetchAll(uaid, gomock.Any()).Return(nil, nil, nil) recvStat.EXPECT().Timer("client.flush", gomock.Any()) // Second routing attempt to peer. recvStat.EXPECT().Increment("updates.routed.incoming") recvStat.EXPECT().Increment("updates.sent") recvStat.EXPECT().Timer("client.flush", gomock.Any()) recvStat.EXPECT().Increment("updates.routed.received") recvStat.EXPECT().Timer("client.socket.lifespan", gomock.Any()) recvStat.EXPECT().Increment("client.socket.disconnect") // Initial routing attempt should fail; the WebSocket listener shouldn't // accept client connections before the locator is ready. delivered, err := sndApp.Router().Route(nil, uaid, chid, version, timeNow(), "disconnected", data) if err != nil { t.Errorf("Error routing to disconnected client: %s", err) } else if delivered { t.Error("Should not route to disconnected client") } // Signal the locator is ready, then wait for the client to connect. recvLocator.SignalReady() select { case <-dialChan: case <-time.After(5 * time.Second): t.Fatalf("Timed out waiting for the client to connect") } // Routing should succeed once the client is connected. delivered, err = sndApp.Router().Route(nil, uaid, chid, version, timeNow(), "connected", data) if err != nil { t.Errorf("Error routing to connected client: %s", err) } else if !delivered { t.Error("Should route to connected client") } gomock.InOrder( mckLocator.EXPECT().Close(), recvStore.EXPECT().Close(), ) if err := recvApp.Close(); err != nil { t.Errorf("Error closing peer: %s", err) } wg.Wait() gomock.InOrder( mckLocator.EXPECT().Close(), sndStore.EXPECT().Close(), ) if err := sndApp.Close(); err != nil { t.Errorf("Error closing self: %s", err) } // Wait for the handlers to stop. for i := 0; i < 3; i++ { <-errChan } }
func ReportSpec(c gs.Context) { t := new(ts.SimpleT) ctrl := gomock.NewController(t) defer ctrl.Finish() pConfig := NewPipelineConfig(nil) chanSize := pConfig.Globals.PluginChanSize checkForFields := func(c gs.Context, msg *message.Message) { f0Val, ok := msg.GetFieldValue(f0.GetName()) c.Expect(ok, gs.IsTrue) c.Expect(f0Val.(int64), gs.Equals, f0.GetValue().(int64)) f1Val, ok := msg.GetFieldValue(f1.GetName()) c.Expect(ok, gs.IsTrue) c.Expect(f1Val.(string), gs.Equals, f1.GetValue().(string)) } hasChannelData := func(msg *message.Message) (ok bool) { capVal, _ := msg.GetFieldValue("InChanCapacity") lenVal, _ := msg.GetFieldValue("InChanLength") var i int64 if i, ok = capVal.(int64); !ok { return } if ok = (i == int64(chanSize)); !ok { return } if i, ok = lenVal.(int64); !ok { return } ok = (i == int64(0)) return } fName := "counter" filter := new(CounterFilter) fRunner := NewFORunner(fName, filter, nil, chanSize) var err error fRunner.matcher, err = NewMatchRunner("Type == ''", "", fRunner, chanSize) c.Assume(err, gs.IsNil) fRunner.matcher.inChan = make(chan *PipelinePack, chanSize) leakCount := 10 fRunner.SetLeakCount(leakCount) iName := "stat_accum" input := new(StatAccumInput) iRunner := NewInputRunner(iName, input, nil, false) c.Specify("`PopulateReportMsg`", func() { msg := ts.GetTestMessage() c.Specify("w/ a filter", func() { err := PopulateReportMsg(fRunner, msg) c.Assume(err, gs.IsNil) c.Specify("invokes `ReportMsg` on the filter", func() { checkForFields(c, msg) }) c.Specify("adds the channel data", func() { c.Expect(hasChannelData(msg), gs.IsTrue) }) c.Specify("has its leak count set properly", func() { leakVal, ok := msg.GetFieldValue("LeakCount") c.Assume(ok, gs.IsTrue) i, ok := leakVal.(int64) c.Assume(ok, gs.IsTrue) c.Expect(int(i), gs.Equals, leakCount) }) }) c.Specify("w/ an input", func() { err := PopulateReportMsg(iRunner, msg) c.Assume(err, gs.IsNil) c.Specify("invokes `ReportMsg` on the input", func() { checkForFields(c, msg) }) c.Specify("doesn't add any channel data", func() { capVal, ok := msg.GetFieldValue("InChanCapacity") c.Expect(capVal, gs.IsNil) c.Expect(ok, gs.IsFalse) lenVal, ok := msg.GetFieldValue("InChanLength") c.Expect(lenVal, gs.IsNil) c.Expect(ok, gs.IsFalse) }) }) }) c.Specify("PipelineConfig", func() { pc := NewPipelineConfig(nil) // Initialize all of the PipelinePacks that we'll need pc.reportRecycleChan <- NewPipelinePack(pc.reportRecycleChan) pc.FilterRunners = map[string]FilterRunner{fName: fRunner} pc.InputRunners = map[string]InputRunner{iName: iRunner} c.Specify("returns full set of accurate reports", func() { reportChan := make(chan *PipelinePack) go pc.reports(reportChan) reports := make(map[string]*PipelinePack) for r := range reportChan { iName, ok := r.Message.GetFieldValue("name") c.Expect(ok, gs.IsTrue) name, ok := iName.(string) c.Expect(ok, gs.IsTrue) c.Expect(name, gs.Not(gs.Equals), "MISSING") reports[name] = r pc.reportRecycleChan <- NewPipelinePack(pc.reportRecycleChan) } fReport := reports[fName] c.Expect(fReport, gs.Not(gs.IsNil)) checkForFields(c, fReport.Message) c.Expect(hasChannelData(fReport.Message), gs.IsTrue) iReport := reports[iName] c.Expect(iReport, gs.Not(gs.IsNil)) checkForFields(c, iReport.Message) recycleReport := reports["inputRecycleChan"] c.Expect(recycleReport, gs.Not(gs.IsNil)) capVal, ok := recycleReport.Message.GetFieldValue("InChanCapacity") c.Expect(ok, gs.IsTrue) c.Expect(capVal.(int64), gs.Equals, int64(pConfig.Globals.PoolSize)) injectReport := reports["injectRecycleChan"] c.Expect(injectReport, gs.Not(gs.IsNil)) capVal, ok = injectReport.Message.GetFieldValue("InChanCapacity") c.Expect(ok, gs.IsTrue) c.Expect(capVal.(int64), gs.Equals, int64(pConfig.Globals.PoolSize)) routerReport := reports["Router"] c.Expect(routerReport, gs.Not(gs.IsNil)) c.Expect(hasChannelData(routerReport.Message), gs.IsTrue) }) }) }
func KeenOutputSpec(c gs.Context) { ctrl := gomock.NewController(universalT) defer ctrl.Finish() c.Specify("A KeenOutput", func() { successTests := []SuccessfulTestCase{ SuccessfulTestCase{ "successfully records a valid job-finished message", "{\"JobType\":\"nyc_aris\",\"SystemId\":\"1234567890abcdefghijklmn\",\"TimeCreated\":\"2014-07-03T23:35:24.000Z\",\"Duration\":38900,\"Success\":true,\"Message\":\"\"}", func(eventData map[string]interface{}) bool { return eventData["JobType"] == "nyc_aris" && eventData["SystemId"] == "1234567890abcdefghijklmn" && eventData["TimeCreated"] == "2014-07-03T23:35:24.000Z" && eventData["Duration"] == float64(38900) && eventData["Success"] == true && eventData["Message"] == "" }, }, } errorTests := []ErrorTestCase{ ErrorTestCase{ "logs an error but does not crash when the message payload is not valid JSON", "not json", "*json.SyntaxError", "invalid character 'o' in literal null (expecting 'u')", }, } for _, test := range successTests { oth := NewOutputTestHelper(universalT, ctrl) output := new(KeenOutput) output.Init(&KeenOutputConfig{Collection: "job-finished"}) mockClient := MockKeenClient{mock.Mock{}} output.client = &mockClient inChan := make(chan *pipeline.PipelinePack, 1) oth.MockOutputRunner.EXPECT().On("InChan").Return(inChan) mockClient.EXPECT().On("AddEvent", "job-finished", mock.Anything).Return(nil) pack := getEmptyKeenOutputPack() pack.Message.SetPayload(test.MessagePayload) inChan <- pack close(inChan) output.Run(oth.MockOutputRunner, oth.MockHelper) ExpectCall(universalT, &mockClient.mock, "AddEvent with expected JSON", "AddEvent", func(args []interface{}) bool { if len(args) != 2 { return false } eventData, ok := args[1].(map[string]interface{}) return ok && test.IsEventDataCorrect(eventData) }) } for _, test := range errorTests { oth := NewOutputTestHelper(universalT, ctrl) output := new(KeenOutput) output.Init(&KeenOutputConfig{}) mockClient := MockKeenClient{mock.Mock{}} output.client = &mockClient inChan := make(chan *pipeline.PipelinePack, 1) oth.MockOutputRunner.EXPECT().On("InChan").Return(inChan) oth.MockOutputRunner.EXPECT().On("LogError", mock.AnythingOfType(test.ExpectedErrorType)).Return() pack := getEmptyKeenOutputPack() pack.Message.SetPayload(test.MessagePayload) inChan <- pack close(inChan) output.Run(oth.MockOutputRunner, oth.MockHelper) ExpectCall(universalT, &oth.MockOutputRunner.mock, "Log correct error", "LogError", func(args []interface{}) bool { if len(args) != 1 { return false } err, ok := args[0].(error) return ok && err.Error() == test.ExpectedErrorMessage }) oth.MockOutputRunner.EXPECT().AssertExpectations(universalT) } }) }
func HttpInputSpec(c gs.Context) { t := &pipeline_ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() pConfig := NewPipelineConfig(nil) json_post := `{"uuid": "xxBI3zyeXU+spG8Uiveumw==", "timestamp": 1372966886023588, "hostname": "Victors-MacBook-Air.local", "pid": 40183, "fields": [{"representation": "", "value_type": "STRING", "name": "cef_meta.syslog_priority", "value_string": [""]}, {"representation": "", "value_type": "STRING", "name": "cef_meta.syslog_ident", "value_string": [""]}, {"representation": "", "value_type": "STRING", "name": "cef_meta.syslog_facility", "value_string": [""]}, {"representation": "", "value_type": "STRING", "name": "cef_meta.syslog_options", "value_string": [""]}], "logger": "", "env_version": "0.8", "type": "cef", "payload": "Jul 04 15:41:26 Victors-MacBook-Air.local CEF:0|mozilla|weave|3|xx\\\\|x|xx\\\\|x|5|cs1Label=requestClientApplication cs1=MySuperBrowser requestMethod=GET request=/ src=127.0.0.1 dest=127.0.0.1 suser=none", "severity": 6}'` c.Specify("A HttpInput", func() { httpInput := HttpInput{} ith := new(plugins_ts.InputTestHelper) ith.MockHelper = pipelinemock.NewMockPluginHelper(ctrl) ith.MockInputRunner = pipelinemock.NewMockInputRunner(ctrl) ith.MockSplitterRunner = pipelinemock.NewMockSplitterRunner(ctrl) runOutputChan := make(chan error, 1) startInput := func() { go func() { runOutputChan <- httpInput.Run(ith.MockInputRunner, ith.MockHelper) }() } ith.Pack = NewPipelinePack(pConfig.InputRecycleChan()) // These assume that every sub-spec starts the input. config := httpInput.ConfigStruct().(*HttpInputConfig) tickChan := make(chan time.Time) ith.MockInputRunner.EXPECT().Ticker().Return(tickChan) ith.MockHelper.EXPECT().Hostname().Return("hekatests.example.com") // These assume that every sub-spec makes exactly one HTTP request. ith.MockInputRunner.EXPECT().NewSplitterRunner("0").Return(ith.MockSplitterRunner) getRecCall := ith.MockSplitterRunner.EXPECT().GetRecordFromStream(gomock.Any()) getRecCall.Return(len(json_post), []byte(json_post), io.EOF) ith.MockSplitterRunner.EXPECT().UseMsgBytes().Return(false) decChan := make(chan func(*PipelinePack), 1) packDecCall := ith.MockSplitterRunner.EXPECT().SetPackDecorator(gomock.Any()) packDecCall.Do(func(dec func(*PipelinePack)) { decChan <- dec }) ith.MockSplitterRunner.EXPECT().DeliverRecord([]byte(json_post), nil) ith.MockSplitterRunner.EXPECT().IncompleteFinal().Return(false).AnyTimes() splitter := &TokenSplitter{} // not actually used ith.MockSplitterRunner.EXPECT().Splitter().Return(splitter) c.Specify("honors time ticker to flush", func() { // Spin up a http server. server, err := plugins_ts.NewOneHttpServer(json_post, "localhost", 9876) c.Expect(err, gs.IsNil) go server.Start("/") time.Sleep(10 * time.Millisecond) config.Url = "http://localhost:9876/" err = httpInput.Init(config) c.Assume(err, gs.IsNil) startInput() tickChan <- time.Now() // Getting the decorator means we've made our HTTP request. <-decChan }) c.Specify("supports configuring HTTP Basic Authentication", func() { // Spin up a http server which expects username "user" and password "password" server, err := plugins_ts.NewHttpBasicAuthServer("user", "password", "localhost", 9875) c.Expect(err, gs.IsNil) go server.Start("/BasicAuthTest") time.Sleep(10 * time.Millisecond) config.Url = "http://localhost:9875/BasicAuthTest" config.User = "******" config.Password = "******" err = httpInput.Init(config) c.Assume(err, gs.IsNil) startInput() tickChan <- time.Now() dec := <-decChan dec(ith.Pack) // we expect a statuscode 200 (i.e. success) statusCode, ok := ith.Pack.Message.GetFieldValue("StatusCode") c.Assume(ok, gs.IsTrue) c.Expect(statusCode, gs.Equals, int64(200)) }) c.Specify("supports configuring a different HTTP method", func() { // Spin up a http server which expects requests with method "POST" server, err := plugins_ts.NewHttpMethodServer("POST", "localhost", 9874) c.Expect(err, gs.IsNil) go server.Start("/PostTest") time.Sleep(10 * time.Millisecond) config.Url = "http://localhost:9874/PostTest" config.Method = "POST" err = httpInput.Init(config) c.Assume(err, gs.IsNil) startInput() tickChan <- time.Now() dec := <-decChan dec(ith.Pack) // we expect a statuscode 200 (i.e. success) statusCode, ok := ith.Pack.Message.GetFieldValue("StatusCode") c.Assume(ok, gs.IsTrue) c.Expect(statusCode, gs.Equals, int64(200)) }) c.Specify("supports configuring HTTP headers", func() { // Spin up a http server which expects requests with method "POST" server, err := plugins_ts.NewHttpHeadersServer(map[string]string{"Accept": "text/plain"}, "localhost", 9873) c.Expect(err, gs.IsNil) go server.Start("/HeadersTest") time.Sleep(10 * time.Millisecond) config.Url = "http://localhost:9873/HeadersTest" config.Headers = map[string]string{"Accept": "text/plain"} err = httpInput.Init(config) c.Assume(err, gs.IsNil) startInput() tickChan <- time.Now() dec := <-decChan dec(ith.Pack) // we expect a statuscode 200 (i.e. success) statusCode, ok := ith.Pack.Message.GetFieldValue("StatusCode") c.Assume(ok, gs.IsTrue) c.Expect(statusCode, gs.Equals, int64(200)) }) c.Specify("supports configuring a request body", func() { // Spin up a http server that echoes back the request body server, err := plugins_ts.NewHttpBodyServer("localhost", 9872) c.Expect(err, gs.IsNil) go server.Start("/BodyTest") time.Sleep(10 * time.Millisecond) config.Url = "http://localhost:9872/BodyTest" config.Method = "POST" config.Body = json_post err = httpInput.Init(config) c.Assume(err, gs.IsNil) respBodyChan := make(chan []byte, 1) getRecCall.Do(func(r io.Reader) { respBody := make([]byte, len(config.Body)) n, err := r.Read(respBody) c.Expect(n, gs.Equals, len(config.Body)) c.Expect(err, gs.Equals, io.EOF) respBodyChan <- respBody }) startInput() tickChan <- time.Now() respBody := <-respBodyChan c.Expect(string(respBody), gs.Equals, json_post) }) httpInput.Stop() runOutput := <-runOutputChan c.Expect(runOutput, gs.IsNil) }) }
func LogstreamerInputSpec(c gs.Context) { t := &pipeline_ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() here, _ := os.Getwd() dirPath := filepath.Join(here, "../../logstreamer", "testdir", "filehandling/subdir") tmpDir, tmpErr := ioutil.TempDir("", "hekad-tests") c.Expect(tmpErr, gs.Equals, nil) defer func() { tmpErr = os.RemoveAll(tmpDir) c.Expect(tmpErr, gs.IsNil) }() globals := DefaultGlobals() globals.BaseDir = tmpDir config := NewPipelineConfig(globals) ith := new(plugins_ts.InputTestHelper) ith.Msg = pipeline_ts.GetTestMessage() ith.Pack = NewPipelinePack(config.InputRecycleChan()) // Specify localhost, but we're not really going to use the network ith.AddrStr = "localhost:55565" ith.ResolvedAddrStr = "127.0.0.1:55565" // set up mock helper, decoder set, and packSupply channel ith.MockHelper = pipelinemock.NewMockPluginHelper(ctrl) ith.MockInputRunner = pipelinemock.NewMockInputRunner(ctrl) ith.Decoder = pipelinemock.NewMockDecoderRunner(ctrl) ith.PackSupply = make(chan *PipelinePack, 1) ith.DecodeChan = make(chan *PipelinePack) c.Specify("A LogstreamerInput", func() { lsInput := &LogstreamerInput{pConfig: config} lsiConfig := lsInput.ConfigStruct().(*LogstreamerInputConfig) lsiConfig.LogDirectory = dirPath lsiConfig.FileMatch = `file.log(\.?)(?P<Seq>\d+)?` lsiConfig.Differentiator = []string{"logfile"} lsiConfig.Priority = []string{"^Seq"} lsiConfig.Decoder = "decoder-name" c.Specify("w/ no translation map", func() { err := lsInput.Init(lsiConfig) c.Expect(err, gs.IsNil) c.Expect(len(lsInput.plugins), gs.Equals, 1) mockDecoderRunner := pipelinemock.NewMockDecoderRunner(ctrl) // Create pool of packs. numLines := 5 // # of lines in the log file we're parsing. packs := make([]*PipelinePack, numLines) ith.PackSupply = make(chan *PipelinePack, numLines) for i := 0; i < numLines; i++ { packs[i] = NewPipelinePack(ith.PackSupply) ith.PackSupply <- packs[i] } c.Specify("reads a log file", func() { // Expect InputRunner calls to get InChan and inject outgoing msgs ith.MockInputRunner.EXPECT().LogError(gomock.Any()).AnyTimes() ith.MockInputRunner.EXPECT().LogMessage(gomock.Any()).AnyTimes() ith.MockInputRunner.EXPECT().InChan().Return(ith.PackSupply).Times(numLines) // Expect calls to get decoder and decode each message. Since the // decoding is a no-op, the message payload will be the log file // line, unchanged. pbcall := ith.MockHelper.EXPECT().DecoderRunner(lsiConfig.Decoder, "-"+lsiConfig.Decoder) pbcall.Return(mockDecoderRunner, true) decodeCall := mockDecoderRunner.EXPECT().InChan().Times(numLines) decodeCall.Return(ith.DecodeChan) runOutChan := make(chan error, 1) go func() { err = lsInput.Run(ith.MockInputRunner, ith.MockHelper) runOutChan <- err }() d, _ := time.ParseDuration("5s") timeout := time.After(d) timed := false for x := 0; x < numLines; x++ { select { case <-ith.DecodeChan: case <-timeout: timed = true x += numLines } // Free up the scheduler while we wait for the log file lines // to be processed. runtime.Gosched() } lsInput.Stop() c.Expect(timed, gs.Equals, false) c.Expect(<-runOutChan, gs.Equals, nil) }) }) c.Specify("with a translation map", func() { lsiConfig.Translation = make(ls.SubmatchTranslationMap) lsiConfig.Translation["Seq"] = make(ls.MatchTranslationMap) c.Specify("allows len 1 translation map for 'missing'", func() { lsiConfig.Translation["Seq"]["missing"] = 9999 err := lsInput.Init(lsiConfig) c.Expect(err, gs.IsNil) }) c.Specify("doesn't allow len 1 map for other keys", func() { lsiConfig.Translation["Seq"]["missin"] = 9999 err := lsInput.Init(lsiConfig) c.Expect(err, gs.Not(gs.IsNil)) c.Expect(err.Error(), gs.Equals, "A translation map with one entry ('Seq') must be specifying a "+ "'missing' key.") }) }) }) }
func LogstreamerInputSpec(c gs.Context) { t := &pipeline_ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() here, _ := os.Getwd() dirPath := filepath.Join(here, "../../logstreamer", "testdir", "filehandling/subdir") tmpDir, tmpErr := ioutil.TempDir("", "hekad-tests") c.Expect(tmpErr, gs.Equals, nil) defer func() { tmpErr = os.RemoveAll(tmpDir) c.Expect(tmpErr, gs.IsNil) }() globals := DefaultGlobals() globals.BaseDir = tmpDir pConfig := NewPipelineConfig(globals) ith := new(plugins_ts.InputTestHelper) ith.Msg = pipeline_ts.GetTestMessage() ith.Pack = NewPipelinePack(pConfig.InputRecycleChan()) // Specify localhost, but we're not really going to use the network. ith.AddrStr = "localhost:55565" ith.ResolvedAddrStr = "127.0.0.1:55565" // Set up mock helper, runner, and pack supply channel. ith.MockHelper = pipelinemock.NewMockPluginHelper(ctrl) ith.MockInputRunner = pipelinemock.NewMockInputRunner(ctrl) ith.MockDeliverer = pipelinemock.NewMockDeliverer(ctrl) ith.MockSplitterRunner = pipelinemock.NewMockSplitterRunner(ctrl) ith.PackSupply = make(chan *PipelinePack, 1) c.Specify("A LogstreamerInput", func() { lsInput := &LogstreamerInput{pConfig: pConfig} lsiConfig := lsInput.ConfigStruct().(*LogstreamerInputConfig) lsiConfig.LogDirectory = dirPath lsiConfig.FileMatch = `file.log(\.?)(?P<Seq>\d+)?` lsiConfig.Differentiator = []string{"logfile"} lsiConfig.Priority = []string{"^Seq"} c.Specify("w/ no translation map", func() { err := lsInput.Init(lsiConfig) c.Expect(err, gs.IsNil) c.Expect(len(lsInput.plugins), gs.Equals, 1) // Create pool of packs. numLines := 5 // # of lines in the log file we're parsing. packs := make([]*PipelinePack, numLines) ith.PackSupply = make(chan *PipelinePack, numLines) for i := 0; i < numLines; i++ { packs[i] = NewPipelinePack(ith.PackSupply) ith.PackSupply <- packs[i] } c.Specify("reads a log file", func() { // Expect InputRunner calls to get InChan and inject outgoing msgs. ith.MockInputRunner.EXPECT().LogError(gomock.Any()).AnyTimes() ith.MockInputRunner.EXPECT().LogMessage(gomock.Any()).AnyTimes() ith.MockInputRunner.EXPECT().NewDeliverer("1").Return(ith.MockDeliverer) ith.MockInputRunner.EXPECT().NewSplitterRunner("1").Return( ith.MockSplitterRunner) ith.MockSplitterRunner.EXPECT().UseMsgBytes().Return(false) ith.MockSplitterRunner.EXPECT().IncompleteFinal().Return(false) ith.MockSplitterRunner.EXPECT().SetPackDecorator(gomock.Any()) getRecCall := ith.MockSplitterRunner.EXPECT().GetRecordFromStream( gomock.Any()).Times(numLines) line := "boo hoo foo foo" getRecCall.Return(len(line), []byte(line), nil) getRecCall = ith.MockSplitterRunner.EXPECT().GetRecordFromStream(gomock.Any()) getRecCall.Return(0, make([]byte, 0), io.EOF) deliverChan := make(chan []byte, 1) deliverCall := ith.MockSplitterRunner.EXPECT().DeliverRecord(gomock.Any(), ith.MockDeliverer).Times(numLines) deliverCall.Do(func(record []byte, del Deliverer) { deliverChan <- record }) ith.MockDeliverer.EXPECT().Done() runOutChan := make(chan error, 1) go func() { err = lsInput.Run(ith.MockInputRunner, ith.MockHelper) runOutChan <- err }() dur, _ := time.ParseDuration("5s") timeout := time.After(dur) timed := false for x := 0; x < numLines; x++ { select { case record := <-deliverChan: c.Expect(string(record), gs.Equals, line) case <-timeout: timed = true x += numLines } // Free up the scheduler while we wait for the log file lines // to be processed. runtime.Gosched() } lsInput.Stop() c.Expect(timed, gs.Equals, false) c.Expect(<-runOutChan, gs.Equals, nil) }) }) c.Specify("with a translation map", func() { lsiConfig.Translation = make(ls.SubmatchTranslationMap) lsiConfig.Translation["Seq"] = make(ls.MatchTranslationMap) c.Specify("allows len 1 translation map for 'missing'", func() { lsiConfig.Translation["Seq"]["missing"] = 9999 err := lsInput.Init(lsiConfig) c.Expect(err, gs.IsNil) }) c.Specify("doesn't allow len 1 map for other keys", func() { lsiConfig.Translation["Seq"]["missin"] = 9999 err := lsInput.Init(lsiConfig) c.Expect(err, gs.Not(gs.IsNil)) c.Expect(err.Error(), gs.Equals, "A translation map with one entry ('Seq') must be specifying a "+ "'missing' key.") }) }) }) }
func TestEndpointResolveKey(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mckLogger := NewMockLogger(mockCtrl) mckLogger.EXPECT().ShouldLog(gomock.Any()).Return(true).AnyTimes() mckLogger.EXPECT().Log(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes() mckStat := NewMockStatistician(mockCtrl) mckStore := NewMockStore(mockCtrl) Convey("Endpoint tokens", t, func() { app := NewApplication() app.SetLogger(mckLogger) app.SetMetrics(mckStat) app.SetStore(mckStore) Convey("Should return a 404 for invalid tokens", func() { app.SetTokenKey("c3v0AlmmxXu_LSfdZY3l3eayLsIwkX48") eh := NewEndpointHandler() eh.setApp(app) app.SetEndpointHandler(eh) resp := httptest.NewRecorder() req := &http.Request{ Method: "PUT", Header: http.Header{}, URL: &url.URL{ Path: "/update/j1bqzFq9WiwFZbqay-y7xVlfSvtO1eY="}, // "123.456" } gomock.InOrder( mckStore.EXPECT().KeyToIDs("123.456").Return("", "", ErrInvalidKey), mckStat.EXPECT().Increment("updates.appserver.invalid"), ) eh.ServeMux().ServeHTTP(resp, req) So(resp.Code, ShouldEqual, 404) body, isJSON := getJSON(resp.HeaderMap, resp.Body) So(isJSON, ShouldBeTrue) So(body.String(), ShouldEqual, `"Invalid Token"`) }) Convey("Should not decode plaintext tokens without a key", func() { var err error app.SetTokenKey("") eh := NewEndpointHandler() eh.setApp(app) app.SetEndpointHandler(eh) _, err = eh.decodePK("") So(err, ShouldNotBeNil) pk, err := eh.decodePK("123.456") So(pk, ShouldEqual, "123.456") }) Convey("Should normalize decoded tokens", func() { app.SetTokenKey("LM1xDImCx0rB46LCnx-3v4-Iyfk1LeKJbx9wuvx_z3U=") eh := NewEndpointHandler() eh.setApp(app) app.SetEndpointHandler(eh) // Hyphenated IDs should be normalized. uaid := "dbda2ba2-004c-491f-9e3d-c5950aee93de" chid := "848cd568-3f2a-4108-9ce4-bd0d928ecad4" // " \t%s.%s\r\n" % (uaid, chid) encodedKey := "qfGSdZzwf20GXiYubmZfIXj11Rx4RGJujFsjSQGdF4LRBhHbB_vt3hdW7cRvL9Fq_t_guMBGkDgebOoa5gRd1GGLN-Cv6h5hkpRTbdju8Tk-hMyC91BP4CEres_8" // decodePK should trim whitespace from encoded keys. mckStore.EXPECT().KeyToIDs( fmt.Sprintf("%s.%s", uaid, chid)).Return(uaid, chid, nil) actualUAID, actualCHID, err := eh.resolvePK(encodedKey) So(err, ShouldBeNil) So(actualUAID, ShouldEqual, uaid) So(actualCHID, ShouldEqual, chid) }) Convey("Should reject invalid tokens", func() { var err error app.SetTokenKey("IhnNwMNbsFWiafTXSgF4Ag==") eh := NewEndpointHandler() eh.setApp(app) app.SetEndpointHandler(eh) invalidKey := "b54QOw2omSWBiEq0IuyfBGxHBIR7AI9YhCMA0lP9" // "_=!@#$%^&*()[]" uaid := "82398a648c834f8b838cb3945eceaf29" chid := "af445ad07e5f46b7a6c858150fc5aa92" validKey := fmt.Sprintf("%s.%s", uaid, chid) encodedKey := "swKSH8P2qprRt5y0J4Wi7ybl-qzFv1j09WPOfuabpEJmVUqwUpxjprXc2R3Yw0ITbqc_Swntw9_EpCgo_XuRTn7Q7opQYoQUgMPhCgT0EGbK" _, _, err = eh.resolvePK(invalidKey[:8]) So(err, ShouldNotBeNil) _, _, err = eh.resolvePK(invalidKey) So(err, ShouldNotBeNil) // Reject plaintext tokens if a key is specified. _, _, err = eh.resolvePK(validKey) So(err, ShouldNotBeNil) mckStore.EXPECT().KeyToIDs(validKey).Return("", "", ErrInvalidKey) _, _, err = eh.resolvePK(encodedKey) So(err, ShouldNotBeNil) mckStore.EXPECT().KeyToIDs(validKey).Return(uaid, chid, nil) actualUAID, actualCHID, err := eh.resolvePK(encodedKey) So(err, ShouldBeNil) So(actualUAID, ShouldEqual, uaid) So(actualCHID, ShouldEqual, chid) }) }) }
func TestEndpointDelivery(t *testing.T) { useMockFuncs() defer useStdFuncs() mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mckLogger := NewMockLogger(mockCtrl) mckLogger.EXPECT().ShouldLog(gomock.Any()).Return(true).AnyTimes() mckLogger.EXPECT().Log(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes() mckStat := NewMockStatistician(mockCtrl) mckStore := NewMockStore(mockCtrl) mckRouter := NewMockRouter(mockCtrl) mckWorker := NewMockWorker(mockCtrl) Convey("Update delivery", t, func() { app := NewApplication() app.SetLogger(mckLogger) app.SetMetrics(mckStat) app.SetStore(mckStore) app.SetRouter(mckRouter) Convey("Should attempt local delivery if `AlwaysRoute` is disabled", func() { eh := NewEndpointHandler() eh.setApp(app) app.SetEndpointHandler(eh) Convey("Should route updates if the device is not connected", func() { uaid := "f7e9fc483f7344c398701b6fa0e85e4f" chid := "737b7a0d25674be4bb184f015fce02cf" gomock.InOrder( mckStat.EXPECT().Increment("updates.routed.outgoing"), mckRouter.EXPECT().Route(nil, uaid, chid, int64(3), timeNow().UTC(), "", "").Return(true, nil), mckStat.EXPECT().Increment("router.broadcast.hit"), mckStat.EXPECT().Timer("updates.routed.hits", gomock.Any()), mckStat.EXPECT().Increment("updates.appserver.received"), ) ok := eh.deliver(nil, uaid, chid, 3, "", "") So(ok, ShouldBeTrue) }) // A routing failure still stores the alert for potential // client delivery. We should only return 404 for absolute // failures (where the endpoint is no longer valid Convey("Should return a 202 if routing fails", func() { resp := httptest.NewRecorder() req := &http.Request{ Method: "PUT", Header: http.Header{HeaderID: {"reqID"}}, URL: &url.URL{Path: "/update/123"}, Body: formReader(url.Values{"version": {"1"}}), } gomock.InOrder( mckStore.EXPECT().KeyToIDs("123").Return("123", "456", nil), mckStat.EXPECT().Increment("updates.appserver.incoming"), mckStore.EXPECT().Update("123", "456", int64(1)).Return(nil), mckStat.EXPECT().Increment("updates.routed.outgoing"), mckRouter.EXPECT().Route(nil, "123", "456", int64(1), gomock.Any(), "reqID", "").Return(false, nil), mckStat.EXPECT().Increment("router.broadcast.miss"), mckStat.EXPECT().Timer("updates.routed.misses", gomock.Any()), mckStat.EXPECT().Increment("updates.appserver.rejected"), ) eh.ServeMux().ServeHTTP(resp, req) So(resp.Code, ShouldEqual, 202) body, isJSON := getJSON(resp.HeaderMap, resp.Body) So(isJSON, ShouldBeTrue) So(body.String(), ShouldEqual, "{}") }) Convey("Should return a 404 if local delivery fails", func() { uaid := "9e98d6415d8e4fd099ab1bad7178f750" chid := "0eecf572e99f4d508666d8da6c0b15a9" app.AddWorker(uaid, mckWorker) gomock.InOrder( mckWorker.EXPECT().Send(chid, int64(3), "").Return( errors.New("client gone")), mckStat.EXPECT().Increment("updates.appserver.rejected"), ) ok := eh.deliver(nil, uaid, chid, int64(3), "", "") So(ok, ShouldBeFalse) }) Convey("Should return an error if storage is unavailable", func() { resp := httptest.NewRecorder() req := &http.Request{ Method: "PUT", Header: http.Header{}, URL: &url.URL{Path: "/update/123"}, Body: formReader(url.Values{"version": {"2"}}), } updateErr := ErrInvalidChannel gomock.InOrder( mckStore.EXPECT().KeyToIDs("123").Return("123", "456", nil), mckStat.EXPECT().Increment("updates.appserver.incoming"), mckStore.EXPECT().Update("123", "456", int64(2)).Return(updateErr), mckStat.EXPECT().Increment("updates.appserver.error"), ) eh.ServeMux().ServeHTTP(resp, req) So(resp.Code, ShouldEqual, updateErr.Status()) body, isJSON := getJSON(resp.HeaderMap, resp.Body) So(isJSON, ShouldBeTrue) So(body.String(), ShouldEqual, `"Could not update channel version"`) }) }) Convey("Should always route updates if `AlwaysRoute` is enabled", func() { eh := NewEndpointHandler() eh.setApp(app) eh.alwaysRoute = true app.SetEndpointHandler(eh) uaid := "6952a68ee0e7444ebc54f935c4444b13" app.AddWorker(uaid, mckWorker) chid := "b7ede546585f4cc9b95e9340e3406951" version := int64(1) data := "Happy, happy, joy, joy!" Convey("And router delivery fails, local succeeds", func() { gomock.InOrder( mckStat.EXPECT().Increment("updates.routed.outgoing"), mckRouter.EXPECT().Route(nil, uaid, chid, version, gomock.Any(), "", data).Return(false, nil), mckStat.EXPECT().Increment("router.broadcast.miss"), mckStat.EXPECT().Timer("updates.routed.misses", gomock.Any()), mckWorker.EXPECT().Send(chid, version, data).Return(nil), mckStat.EXPECT().Increment("updates.appserver.received"), ) ok := eh.deliver(nil, uaid, chid, version, "", data) So(ok, ShouldBeTrue) }) Convey("And router delivery succeeds, local succeeds", func() { gomock.InOrder( mckStat.EXPECT().Increment("updates.routed.outgoing"), mckRouter.EXPECT().Route(nil, uaid, chid, version, gomock.Any(), "", data).Return(true, nil), mckStat.EXPECT().Increment("router.broadcast.hit"), mckStat.EXPECT().Timer("updates.routed.hits", gomock.Any()), mckWorker.EXPECT().Send(chid, version, data).Return(nil), mckStat.EXPECT().Increment("updates.appserver.received"), ) ok := eh.deliver(nil, uaid, chid, version, "", data) So(ok, ShouldBeTrue) }) Convey("And router delivery succeeds, local fails", func() { gomock.InOrder( mckStat.EXPECT().Increment("updates.routed.outgoing"), mckRouter.EXPECT().Route(nil, uaid, chid, version, gomock.Any(), "", data).Return(true, nil), mckStat.EXPECT().Increment("router.broadcast.hit"), mckStat.EXPECT().Timer("updates.routed.hits", gomock.Any()), mckWorker.EXPECT().Send(chid, version, data).Return( errors.New("client gone")), mckStat.EXPECT().Increment("updates.appserver.received"), ) ok := eh.deliver(nil, uaid, chid, version, "", data) So(ok, ShouldBeTrue) }) Convey("And router/local delivery fails", func() { gomock.InOrder( mckStat.EXPECT().Increment("updates.routed.outgoing"), mckRouter.EXPECT().Route(nil, uaid, chid, version, gomock.Any(), "", data).Return(false, nil), mckStat.EXPECT().Increment("router.broadcast.miss"), mckStat.EXPECT().Timer("updates.routed.misses", gomock.Any()), mckWorker.EXPECT().Send(chid, version, data).Return( errors.New("client gone")), mckStat.EXPECT().Increment("updates.appserver.rejected"), ) ok := eh.deliver(nil, uaid, chid, version, "", data) So(ok, ShouldBeFalse) }) }) }) }
func TestEndpointPinger(t *testing.T) { useMockFuncs() defer useStdFuncs() mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mckLogger := NewMockLogger(mockCtrl) mckLogger.EXPECT().ShouldLog(gomock.Any()).Return(true).AnyTimes() mckLogger.EXPECT().Log(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes() mckStat := NewMockStatistician(mockCtrl) mckPinger := NewMockPropPinger(mockCtrl) mckStore := NewMockStore(mockCtrl) mckWorker := NewMockWorker(mockCtrl) Convey("Proprietary pings", t, func() { app := NewApplication() app.SetLogger(mckLogger) app.SetMetrics(mckStat) app.SetPropPinger(mckPinger) app.SetStore(mckStore) eh := NewEndpointHandler() eh.setApp(app) eh.setMaxDataLen(4096) app.SetEndpointHandler(eh) Convey("Should return early if the pinger can bypass the WebSocket", func() { uaid := "91357e1a34714cadacb3f13cf47a2736" app.AddWorker(uaid, mckWorker) resp := httptest.NewRecorder() req := &http.Request{ Method: "PUT", Header: http.Header{}, URL: &url.URL{Path: "/update/123"}, Body: nil, } gomock.InOrder( mckStore.EXPECT().KeyToIDs("123").Return(uaid, "456", nil), mckStat.EXPECT().Increment("updates.appserver.incoming"), mckPinger.EXPECT().Send(uaid, int64(1257894000), "").Return(true, nil), mckPinger.EXPECT().CanBypassWebsocket().Return(true), mckStat.EXPECT().Increment("updates.appserver.received"), mckStat.EXPECT().Timer("updates.handled", gomock.Any()), ) eh.ServeMux().ServeHTTP(resp, req) So(resp.Code, ShouldEqual, 200) body, isJSON := getJSON(resp.HeaderMap, resp.Body) So(isJSON, ShouldBeTrue) So(body.String(), ShouldEqual, "{}") }) Convey("Should continue if the pinger cannot bypass the WebSocket", func() { uaid := "e3fc2cf1dc44424685010148b076d08b" app.AddWorker(uaid, mckWorker) data := randomText(eh.maxDataLen) vals := make(url.Values) vals.Set("data", data) resp := httptest.NewRecorder() req := &http.Request{ Method: "PUT", Header: http.Header{}, URL: &url.URL{Path: "/update/123"}, Body: formReader(vals), } gomock.InOrder( mckStore.EXPECT().KeyToIDs("123").Return(uaid, "456", nil), mckStat.EXPECT().Increment("updates.appserver.incoming"), mckPinger.EXPECT().Send(uaid, int64(1257894000), data).Return(true, nil), mckPinger.EXPECT().CanBypassWebsocket().Return(false), mckStore.EXPECT().Update(uaid, "456", int64(1257894000)), mckWorker.EXPECT().Send("456", int64(1257894000), data), mckStat.EXPECT().Increment("updates.appserver.received"), mckStat.EXPECT().Timer("updates.handled", gomock.Any()), ) eh.ServeMux().ServeHTTP(resp, req) So(resp.Code, ShouldEqual, 200) body, isJSON := getJSON(resp.HeaderMap, resp.Body) So(isJSON, ShouldBeTrue) So(body.String(), ShouldEqual, "{}") }) Convey("Should continue if the pinger fails", func() { uaid := "8f412f5cb2384183bf60f7da26737271" app.AddWorker(uaid, mckWorker) vals := make(url.Values) vals.Set("version", "7") resp := httptest.NewRecorder() req := &http.Request{ Method: "PUT", Header: http.Header{}, URL: &url.URL{Path: "/update/123"}, Body: formReader(vals), } gomock.InOrder( mckStore.EXPECT().KeyToIDs("123").Return(uaid, "456", nil), mckStat.EXPECT().Increment("updates.appserver.incoming"), mckPinger.EXPECT().Send(uaid, int64(7), "").Return( true, errors.New("oops")), mckStore.EXPECT().Update(uaid, "456", int64(7)), mckWorker.EXPECT().Send("456", int64(7), ""), mckStat.EXPECT().Increment("updates.appserver.received"), mckStat.EXPECT().Timer("updates.handled", gomock.Any()), ) eh.ServeMux().ServeHTTP(resp, req) So(resp.Code, ShouldEqual, 200) body, isJSON := getJSON(resp.HeaderMap, resp.Body) So(isJSON, ShouldBeTrue) So(body.String(), ShouldEqual, "{}") }) }) }
func TestEndpointInvalidParams(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mckLogger := NewMockLogger(mockCtrl) mckLogger.EXPECT().ShouldLog(gomock.Any()).Return(true).AnyTimes() mckLogger.EXPECT().Log(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).AnyTimes() mckStat := NewMockStatistician(mockCtrl) Convey("Invalid update parameters", t, func() { app := NewApplication() app.SetLogger(mckLogger) app.SetMetrics(mckStat) eh := NewEndpointHandler() eh.setApp(app) eh.setMaxDataLen(512) app.SetEndpointHandler(eh) Convey("Should require PUT requests", func() { resp := httptest.NewRecorder() req := &http.Request{ Method: "POST", Header: http.Header{}, URL: &url.URL{Path: "/update/123"}, } mckStat.EXPECT().Increment("updates.appserver.invalid") eh.ServeMux().ServeHTTP(resp, req) So(resp.Code, ShouldEqual, 405) body, isJSON := getJSON(resp.HeaderMap, resp.Body) So(isJSON, ShouldBeTrue) So(body.String(), ShouldEqual, `"Method Not Allowed"`) }) Convey("Should reject negative versions", func() { vals := make(url.Values) vals.Set("version", "-1") resp := httptest.NewRecorder() req := &http.Request{ Method: "PUT", Header: http.Header{}, URL: &url.URL{Path: "/update/123"}, Body: formReader(vals), } mckStat.EXPECT().Increment("updates.appserver.invalid") eh.ServeMux().ServeHTTP(resp, req) So(resp.Code, ShouldEqual, 400) body, isJSON := getJSON(resp.HeaderMap, resp.Body) So(isJSON, ShouldBeTrue) So(body.String(), ShouldEqual, `"Invalid Version"`) }) Convey("Should reject invalid versions", func() { vals := make(url.Values) vals.Set("version", "abc") resp := httptest.NewRecorder() req := &http.Request{ Method: "PUT", Header: http.Header{}, URL: &url.URL{Path: "/update/123"}, Body: formReader(vals), } mckStat.EXPECT().Increment("updates.appserver.invalid") eh.ServeMux().ServeHTTP(resp, req) So(resp.Code, ShouldEqual, 400) body, isJSON := getJSON(resp.HeaderMap, resp.Body) So(isJSON, ShouldBeTrue) So(body.String(), ShouldEqual, `"Invalid Version"`) }) Convey("Should reject oversized payloads", func() { vals := make(url.Values) vals.Set("data", randomText(eh.maxDataLen+1)) resp := httptest.NewRecorder() req := &http.Request{ Method: "PUT", Header: http.Header{}, URL: &url.URL{Path: "/update/123"}, Body: formReader(vals), } mckStat.EXPECT().Increment("updates.appserver.toolong") eh.ServeMux().ServeHTTP(resp, req) So(resp.Code, ShouldEqual, 413) body, isJSON := getJSON(resp.HeaderMap, resp.Body) So(isJSON, ShouldBeTrue) So(body.String(), ShouldEqual, `"Data exceeds max length of 512 bytes"`) }) }) }
func FilePollingInputSpec(c gs.Context) { t := new(pipeline_ts.SimpleT) ctrl := gomock.NewController(t) tmpFileName := fmt.Sprintf("filepollinginput-test-%d", time.Now().UnixNano()) tmpFilePath := filepath.Join(os.TempDir(), tmpFileName) defer func() { ctrl.Finish() os.Remove(tmpFilePath) }() pConfig := NewPipelineConfig(nil) var wg sync.WaitGroup errChan := make(chan error, 1) bytesChan := make(chan []byte, 1) tickChan := make(chan time.Time) retPackChan := make(chan *PipelinePack, 2) defer close(retPackChan) c.Specify("A FilePollingInput", func() { input := new(FilePollingInput) ith := new(plugins_ts.InputTestHelper) ith.MockHelper = pipelinemock.NewMockPluginHelper(ctrl) ith.MockInputRunner = pipelinemock.NewMockInputRunner(ctrl) ith.MockSplitterRunner = pipelinemock.NewMockSplitterRunner(ctrl) config := input.ConfigStruct().(*FilePollingInputConfig) config.FilePath = tmpFilePath startInput := func(msgCount int) { wg.Add(1) go func() { errChan <- input.Run(ith.MockInputRunner, ith.MockHelper) wg.Done() }() } ith.MockInputRunner.EXPECT().Ticker().Return(tickChan) ith.MockHelper.EXPECT().PipelineConfig().Return(pConfig) c.Specify("gets updated information when reading a file", func() { err := input.Init(config) c.Assume(err, gs.IsNil) ith.MockInputRunner.EXPECT().NewSplitterRunner("").Return(ith.MockSplitterRunner) ith.MockSplitterRunner.EXPECT().UseMsgBytes().Return(false) ith.MockSplitterRunner.EXPECT().SetPackDecorator(gomock.Any()) splitCall := ith.MockSplitterRunner.EXPECT().SplitStream(gomock.Any(), nil).Return(io.EOF).Times(2) splitCall.Do(func(f *os.File, del Deliverer) { fBytes, err := ioutil.ReadAll(f) if err != nil { fBytes = []byte(err.Error()) } bytesChan <- fBytes }) startInput(2) f, err := os.Create(tmpFilePath) c.Expect(err, gs.IsNil) _, err = f.Write([]byte("test1")) c.Expect(err, gs.IsNil) c.Expect(f.Close(), gs.IsNil) tickChan <- time.Now() msgBytes := <-bytesChan c.Expect(string(msgBytes), gs.Equals, "test1") f, err = os.Create(tmpFilePath) c.Expect(err, gs.IsNil) _, err = f.Write([]byte("test2")) c.Expect(err, gs.IsNil) c.Expect(f.Close(), gs.IsNil) tickChan <- time.Now() msgBytes = <-bytesChan c.Expect(string(msgBytes), gs.Equals, "test2") input.Stop() wg.Wait() c.Expect(<-errChan, gs.IsNil) }) }) }
func FilterSpec(c gs.Context) { t := new(ts.SimpleT) ctrl := gomock.NewController(t) defer ctrl.Finish() fth := NewFilterTestHelper(ctrl) inChan := make(chan *pipeline.PipelinePack, 1) pConfig := pipeline.NewPipelineConfig(nil) c.Specify("A SandboxFilter", func() { sbFilter := new(SandboxFilter) sbFilter.SetPipelineConfig(pConfig) config := sbFilter.ConfigStruct().(*sandbox.SandboxConfig) config.MemoryLimit = 32000 config.InstructionLimit = 1000 config.OutputLimit = 1024 msg := getTestMessage() pack := pipeline.NewPipelinePack(pConfig.InjectRecycleChan()) pack.Message = msg pack.Decoded = true c.Specify("Uninitialized", func() { err := sbFilter.ReportMsg(msg) c.Expect(err, gs.IsNil) }) c.Specify("Over inject messages from ProcessMessage", func() { var timer <-chan time.Time fth.MockFilterRunner.EXPECT().Ticker().Return(timer) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) fth.MockFilterRunner.EXPECT().Name().Return("processinject").Times(2) fth.MockFilterRunner.EXPECT().Inject(pack).Return(true).Times(2) fth.MockHelper.EXPECT().PipelinePack(uint(0)).Return(pack).Times(2) config.ScriptFilename = "../lua/testsupport/processinject.lua" err := sbFilter.Init(config) c.Assume(err, gs.IsNil) inChan <- pack close(inChan) err = sbFilter.Run(fth.MockFilterRunner, fth.MockHelper) termErr := pipeline.TerminatedError("exceeded InjectMessage count") c.Expect(err.Error(), gs.Equals, termErr.Error()) }) c.Specify("Over inject messages from TimerEvent", func() { var timer <-chan time.Time timer = time.Tick(time.Duration(1) * time.Millisecond) fth.MockFilterRunner.EXPECT().Ticker().Return(timer) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) fth.MockFilterRunner.EXPECT().Name().Return("timerinject").Times(11) fth.MockFilterRunner.EXPECT().Inject(pack).Return(true).Times(11) fth.MockHelper.EXPECT().PipelinePack(uint(0)).Return(pack).Times(11) config.ScriptFilename = "../lua/testsupport/timerinject.lua" err := sbFilter.Init(config) c.Assume(err, gs.IsNil) go func() { time.Sleep(time.Duration(250) * time.Millisecond) close(inChan) }() err = sbFilter.Run(fth.MockFilterRunner, fth.MockHelper) termErr := pipeline.TerminatedError("exceeded InjectMessage count") c.Expect(err.Error(), gs.Equals, termErr.Error()) }) c.Specify("Preserves data", func() { var timer <-chan time.Time fth.MockFilterRunner.EXPECT().Ticker().Return(timer) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) config.ScriptFilename = "../lua/testsupport/serialize.lua" config.PreserveData = true sbFilter.SetName("serialize") err := sbFilter.Init(config) c.Assume(err, gs.IsNil) close(inChan) err = sbFilter.Run(fth.MockFilterRunner, fth.MockHelper) c.Expect(err, gs.IsNil) _, err = os.Stat("sandbox_preservation/serialize.data") c.Expect(err, gs.IsNil) err = os.Remove("sandbox_preservation/serialize.data") c.Expect(err, gs.IsNil) }) }) c.Specify("A SandboxManagerFilter", func() { pConfig.Globals.BaseDir = os.TempDir() sbxMgrsDir := filepath.Join(pConfig.Globals.BaseDir, "sbxmgrs") defer func() { tmpErr := os.RemoveAll(sbxMgrsDir) c.Expect(tmpErr, gs.IsNil) }() sbmFilter := new(SandboxManagerFilter) sbmFilter.SetPipelineConfig(pConfig) config := sbmFilter.ConfigStruct().(*SandboxManagerFilterConfig) config.MaxFilters = 1 msg := getTestMessage() pack := pipeline.NewPipelinePack(pConfig.InputRecycleChan()) pack.Message = msg pack.Decoded = true c.Specify("Control message in the past", func() { sbmFilter.Init(config) pack.Message.SetTimestamp(time.Now().UnixNano() - 5e9) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) fth.MockFilterRunner.EXPECT().Name().Return("SandboxManagerFilter") fth.MockFilterRunner.EXPECT().LogError(fmt.Errorf("Discarded control message: 5 seconds skew")) inChan <- pack close(inChan) err := sbmFilter.Run(fth.MockFilterRunner, fth.MockHelper) c.Expect(err, gs.IsNil) }) c.Specify("Control message in the future", func() { sbmFilter.Init(config) pack.Message.SetTimestamp(time.Now().UnixNano() + 5.9e9) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) fth.MockFilterRunner.EXPECT().Name().Return("SandboxManagerFilter") fth.MockFilterRunner.EXPECT().LogError(fmt.Errorf("Discarded control message: -5 seconds skew")) inChan <- pack close(inChan) err := sbmFilter.Run(fth.MockFilterRunner, fth.MockHelper) c.Expect(err, gs.IsNil) }) c.Specify("Generates the right default working directory", func() { sbmFilter.Init(config) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) name := "SandboxManagerFilter" fth.MockFilterRunner.EXPECT().Name().Return(name) close(inChan) err := sbmFilter.Run(fth.MockFilterRunner, fth.MockHelper) c.Expect(err, gs.IsNil) c.Expect(sbmFilter.workingDirectory, gs.Equals, sbxMgrsDir) _, err = os.Stat(sbxMgrsDir) c.Expect(err, gs.IsNil) }) c.Specify("Sanity check the default sandbox configuration limits", func() { sbmFilter.Init(config) c.Expect(sbmFilter.memoryLimit, gs.Equals, uint(8*1024*1024)) c.Expect(sbmFilter.instructionLimit, gs.Equals, uint(1e6)) c.Expect(sbmFilter.outputLimit, gs.Equals, uint(63*1024)) }) c.Specify("Sanity check the user specified sandbox configuration limits", func() { config.MemoryLimit = 123456 config.InstructionLimit = 4321 config.OutputLimit = 8765 sbmFilter.Init(config) c.Expect(sbmFilter.memoryLimit, gs.Equals, config.MemoryLimit) c.Expect(sbmFilter.instructionLimit, gs.Equals, config.InstructionLimit) c.Expect(sbmFilter.outputLimit, gs.Equals, config.OutputLimit) }) c.Specify("Creates a SandboxFilter runner", func() { sbxName := "SandboxFilter" sbxMgrName := "SandboxManagerFilter" code := ` require("cjson") function process_message() inject_payload(cjson.encode({a = "b"})) return 0 end ` cfg := ` [%s] type = "SandboxFilter" message_matcher = "TRUE" script_type = "lua" ` cfg = fmt.Sprintf(cfg, sbxName) msg.SetPayload(code) f, err := message.NewField("config", cfg, "toml") c.Assume(err, gs.IsNil) msg.AddField(f) fMatchChan := pConfig.Router().AddFilterMatcher() errChan := make(chan error) fth.MockFilterRunner.EXPECT().Name().Return(sbxMgrName) fullSbxName := fmt.Sprintf("%s-%s", sbxMgrName, sbxName) fth.MockHelper.EXPECT().Filter(fullSbxName).Return(nil, false) fth.MockFilterRunner.EXPECT().LogMessage(fmt.Sprintf("Loading: %s", fullSbxName)) sbmFilter.Init(config) go func() { err := sbmFilter.loadSandbox(fth.MockFilterRunner, fth.MockHelper, sbxMgrsDir, msg) errChan <- err }() fMatch := <-fMatchChan c.Expect(fMatch.MatcherSpecification().String(), gs.Equals, "TRUE") c.Expect(<-errChan, gs.IsNil) go func() { <-pConfig.Router().RemoveFilterMatcher() }() ok := pConfig.RemoveFilterRunner(fullSbxName) c.Expect(ok, gs.IsTrue) }) }) c.Specify("A Cpu Stats filter", func() { filter := new(SandboxFilter) filter.SetPipelineConfig(pConfig) filter.name = "cpustats" conf := filter.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/filters/cpustats.lua" conf.ModuleDirectory = "../lua/modules" conf.Config = make(map[string]interface{}) conf.Config["rows"] = int64(3) conf.Config["sec_per_row"] = int64(1) timer := make(chan time.Time, 1) errChan := make(chan error, 1) retPackChan := make(chan *pipeline.PipelinePack, 1) recycleChan := make(chan *pipeline.PipelinePack, 1) defer func() { close(errChan) close(retPackChan) }() msg := getTestMessage() fields := make([]*message.Field, 4) fields[0], _ = message.NewField("1MinAvg", 0.08, "") fields[1], _ = message.NewField("5MinAvg", 0.04, "") fields[2], _ = message.NewField("15MinAvg", 0.02, "") fields[3], _ = message.NewField("NumProcesses", 5, "") msg.Fields = fields pack := pipeline.NewPipelinePack(recycleChan) fth.MockHelper.EXPECT().PipelinePack(uint(0)).Return(pack) fth.MockFilterRunner.EXPECT().Ticker().Return(timer) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) fth.MockFilterRunner.EXPECT().Name().Return("cpustats") fth.MockFilterRunner.EXPECT().Inject(pack).Do(func(pack *pipeline.PipelinePack) { retPackChan <- pack }).Return(true) err := filter.Init(conf) c.Assume(err, gs.IsNil) c.Specify("should fill a cbuf with cpuload data", func() { go func() { errChan <- filter.Run(fth.MockFilterRunner, fth.MockHelper) }() for i := 1; i <= 3; i++ { // Fill in the data t := int64(i * 1000000000) pack.Message = msg pack.Message.SetTimestamp(t) // Feed in a pack inChan <- pack pack = <-recycleChan } timer <- time.Now() p := <-retPackChan // Check the result of the filter's inject pl := `{"time":1,"rows":3,"columns":4,"seconds_per_row":1,"column_info":[{"name":"1MinAvg","unit":"Count","aggregation":"max"},{"name":"5MinAvg","unit":"Count","aggregation":"max"},{"name":"15MinAvg","unit":"Count","aggregation":"max"},{"name":"NumProcesses","unit":"Count","aggregation":"max"}]} 0.08 0.04 0.02 5 0.08 0.04 0.02 5 0.08 0.04 0.02 5 ` c.Expect(p.Message.GetPayload(), gs.Equals, pl) }) close(inChan) c.Expect(<-errChan, gs.IsNil) }) c.Specify("A Memstats filter", func() { filter := new(SandboxFilter) filter.SetPipelineConfig(pConfig) filter.name = "memstats" conf := filter.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/filters/memstats.lua" conf.ModuleDirectory = "../lua/modules" conf.Config = make(map[string]interface{}) conf.Config["rows"] = int64(3) conf.Config["sec_per_row"] = int64(1) timer := make(chan time.Time, 1) errChan := make(chan error, 1) retPackChan := make(chan *pipeline.PipelinePack, 1) recycleChan := make(chan *pipeline.PipelinePack, 1) defer func() { close(errChan) close(retPackChan) }() msg := getTestMessage() field_names := []string{"MemFree", "Cached", "Active", "Inactive", "VmallocUsed", "Shmem", "SwapCached", "SwapTotal", "SwapFree"} fields := make([]*message.Field, len(field_names)) for i, name := range field_names { fields[i], _ = message.NewField(name, 100, "") } msg.Fields = fields pack := pipeline.NewPipelinePack(recycleChan) fth.MockHelper.EXPECT().PipelinePack(uint(0)).Return(pack) fth.MockFilterRunner.EXPECT().Ticker().Return(timer) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) fth.MockFilterRunner.EXPECT().Name().Return("memstats") fth.MockFilterRunner.EXPECT().Inject(pack).Do(func(pack *pipeline.PipelinePack) { retPackChan <- pack }).Return(true) err := filter.Init(conf) c.Assume(err, gs.IsNil) c.Specify("should fill a cbuf with memstats data", func() { go func() { errChan <- filter.Run(fth.MockFilterRunner, fth.MockHelper) }() for i := 1; i <= 3; i++ { // Fill in the data t := int64(i * 1000000000) pack.Message = msg pack.Message.SetTimestamp(t) // Feed in a pack inChan <- pack pack = <-recycleChan } timer <- time.Now() p := <-retPackChan // Check the result of the filter's inject pl := `{"time":1,"rows":3,"columns":9,"seconds_per_row":1,"column_info":[{"name":"MemFree","unit":"Count","aggregation":"max"},{"name":"Cached","unit":"Count","aggregation":"max"},{"name":"Active","unit":"Count","aggregation":"max"},{"name":"Inactive","unit":"Count","aggregation":"max"},{"name":"VmallocUsed","unit":"Count","aggregation":"max"},{"name":"Shmem","unit":"Count","aggregation":"max"},{"name":"SwapCached","unit":"Count","aggregation":"max"},{"name":"SwapFree","unit":"Count","aggregation":"max"},{"name":"SwapUsed","unit":"Count","aggregation":"max"}]} 100 100 100 100 100 100 100 100 0 100 100 100 100 100 100 100 100 0 100 100 100 100 100 100 100 100 0 ` c.Expect(p.Message.GetPayload(), gs.Equals, pl) }) close(inChan) c.Expect(<-errChan, gs.IsNil) }) c.Specify("A diskstats filter", func() { filter := new(SandboxFilter) filter.SetPipelineConfig(pConfig) filter.name = "diskstats" conf := filter.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/filters/diskstats.lua" conf.ModuleDirectory = "../lua/modules" conf.Config = make(map[string]interface{}) conf.Config["rows"] = int64(3) timer := make(chan time.Time, 1) errChan := make(chan error, 1) retMsgChan := make(chan *message.Message, 1) recycleChan := make(chan *pipeline.PipelinePack, 1) defer func() { close(errChan) close(retMsgChan) }() msg := getTestMessage() field_names := []string{ "WritesCompleted", "ReadsCompleted", "SectorsWritten", "SectorsRead", "WritesMerged", "ReadsMerged", "TimeWriting", "TimeReading", "TimeDoingIO", "WeightedTimeDoingIO", } num_fields := len(field_names) + 1 fields := make([]*message.Field, num_fields) msg.Fields = fields timeInterval, _ := message.NewField("TickerInterval", 1, "") fields[num_fields-1] = timeInterval fieldVal := 100 pack := pipeline.NewPipelinePack(recycleChan) fth.MockHelper.EXPECT().PipelineConfig().AnyTimes() fth.MockHelper.EXPECT().PipelinePack(uint(0)).Return(pack).AnyTimes() fth.MockFilterRunner.EXPECT().Ticker().Return(timer).AnyTimes() fth.MockFilterRunner.EXPECT().InChan().Return(inChan).AnyTimes() fth.MockFilterRunner.EXPECT().Name().Return("diskstats").AnyTimes() fth.MockFilterRunner.EXPECT().Inject(pack).Do(func(pack *pipeline.PipelinePack) { msg := pack.Message pack.Message = new(message.Message) retMsgChan <- msg }).Return(true).AnyTimes() err := filter.Init(conf) c.Assume(err, gs.IsNil) c.Specify("should fill a cbuf with diskstats data", func() { go func() { errChan <- filter.Run(fth.MockFilterRunner, fth.MockHelper) }() // Iterate 4 times since the first one doesn't actually set the cbuf // in order to set the delta in the cbuf for i := 1; i <= 4; i++ { // Fill in the fields for i, name := range field_names { fields[i], _ = message.NewField(name, fieldVal, "") } // Scale up the value so we can see the delta growing // by 100 each iteration fieldVal += i * 100 t := int64(i * 1000000000) pack.Message = msg pack.Message.SetTimestamp(t) // Feed in a pack inChan <- pack pack = <-recycleChan } testExpects := map[string]string{ "Time doing IO": `{"time":2,"rows":3,"columns":4,"seconds_per_row":1,"column_info":[{"name":"TimeWriting","unit":"ms","aggregation":"max"},{"name":"TimeReading","unit":"ms","aggregation":"max"},{"name":"TimeDoingIO","unit":"ms","aggregation":"max"},{"name":"WeightedTimeDoi","unit":"ms","aggregation":"max"}]} 200 200 200 200 400 400 400 400 700 700 700 700 `, "Disk Stats": `{"time":2,"rows":3,"columns":6,"seconds_per_row":1,"column_info":[{"name":"WritesCompleted","unit":"per_1_s","aggregation":"none"},{"name":"ReadsCompleted","unit":"per_1_s","aggregation":"none"},{"name":"SectorsWritten","unit":"per_1_s","aggregation":"none"},{"name":"SectorsRead","unit":"per_1_s","aggregation":"none"},{"name":"WritesMerged","unit":"per_1_s","aggregation":"none"},{"name":"ReadsMerged","unit":"per_1_s","aggregation":"none"}]} 100 100 100 100 100 100 200 200 200 200 200 200 300 300 300 300 300 300 `, } timer <- time.Now() for i := 0; i < 2; i++ { m := <-retMsgChan name, ok := m.GetFieldValue("payload_name") c.Assume(ok, gs.IsTrue) nameVal, ok := name.(string) c.Assume(ok, gs.IsTrue) c.Expect(m.GetPayload(), gs.Equals, testExpects[nameVal]) } }) close(inChan) c.Expect(<-errChan, gs.IsNil) }) c.Specify("http_status filter", func() { filter := new(SandboxFilter) filter.SetPipelineConfig(pConfig) filter.name = "http_status" conf := filter.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/filters/http_status.lua" conf.ModuleDirectory = "../lua/modules" conf.Config = make(map[string]interface{}) conf.Config["rows"] = int64(2) conf.Config["sec_per_row"] = int64(1) timer := make(chan time.Time, 1) errChan := make(chan error, 1) retPackChan := make(chan *pipeline.PipelinePack, 1) recycleChan := make(chan *pipeline.PipelinePack, 1) defer func() { close(errChan) close(retPackChan) }() field, _ := message.NewField("status", 0, "") msg := &message.Message{} msg.SetTimestamp(0) msg.AddField(field) pack := pipeline.NewPipelinePack(recycleChan) fth.MockHelper.EXPECT().PipelinePack(uint(0)).Return(pack) fth.MockFilterRunner.EXPECT().Ticker().Return(timer) fth.MockFilterRunner.EXPECT().InChan().Return(inChan) fth.MockFilterRunner.EXPECT().Name().Return("http_status") fth.MockFilterRunner.EXPECT().Inject(pack).Do(func(pack *pipeline.PipelinePack) { retPackChan <- pack }).Return(true) err := filter.Init(conf) c.Assume(err, gs.IsNil) c.Specify("should fill a cbuf with http status data", func() { go func() { errChan <- filter.Run(fth.MockFilterRunner, fth.MockHelper) }() for i := 0; i <= 6; i++ { msg.Fields[0].ValueInteger[0] = int64(i * 100) // iterate through the status codes with a bogus status on each end pack.Message = msg inChan <- pack pack = <-recycleChan } timer <- time.Now() p := <-retPackChan // Check the result of the filter's inject pl := `{"time":0,"rows":2,"columns":6,"seconds_per_row":1,"column_info":[{"name":"HTTP_100","unit":"count","aggregation":"sum"},{"name":"HTTP_200","unit":"count","aggregation":"sum"},{"name":"HTTP_300","unit":"count","aggregation":"sum"},{"name":"HTTP_400","unit":"count","aggregation":"sum"},{"name":"HTTP_500","unit":"count","aggregation":"sum"},{"name":"HTTP_UNKNOWN","unit":"count","aggregation":"sum"}]} 1 1 1 1 1 2 nan nan nan nan nan nan ` c.Expect(p.Message.GetPayload(), gs.Equals, pl) }) close(inChan) c.Expect(<-errChan, gs.IsNil) }) }
func DecoderSpec(c gs.Context) { t := new(ts.SimpleT) ctrl := gomock.NewController(t) defer ctrl.Finish() pConfig := pipeline.NewPipelineConfig(nil) c.Specify("A SandboxDecoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) c.Specify("that uses lpeg and inject_message", func() { dRunner.EXPECT().Name().Return("serialize") conf.ScriptFilename = "../lua/testsupport/decoder.lua" err := decoder.Init(conf) c.Assume(err, gs.IsNil) c.Specify("decodes simple messages", func() { data := "1376389920 debug id=2321 url=example.com item=1" decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1376389920000000000)) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("id") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "2321") value, ok = pack.Message.GetFieldValue("url") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "example.com") value, ok = pack.Message.GetFieldValue("item") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "1") decoder.Shutdown() }) c.Specify("decodes an invalid messages", func() { data := "1376389920 bogus id=2321 url=example.com item=1" decoder.SetDecoderRunner(dRunner) pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) c.Specify("Preserves data", func() { conf.ScriptFilename = "../lua/testsupport/serialize.lua" conf.PreserveData = true err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) decoder.Shutdown() _, err = os.Stat("sandbox_preservation/serialize.data") c.Expect(err, gs.IsNil) err = os.Remove("sandbox_preservation/serialize.data") c.Expect(err, gs.IsNil) }) }) c.Specify("that only uses write_message", func() { conf.ScriptFilename = "../lua/testsupport/write_message_decoder.lua" dRunner.EXPECT().Name().Return("write_message") err := decoder.Init(conf) decoder.SetDecoderRunner(dRunner) c.Assume(err, gs.IsNil) c.Specify("adds a string field to the message", func() { data := "string field scribble" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) value, ok := pack.Message.GetFieldValue("scribble") c.Expect(ok, gs.IsTrue) c.Expect(value.(string), gs.Equals, "foo") }) c.Specify("adds a numeric field to the message", func() { data := "num field scribble" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) value, ok := pack.Message.GetFieldValue("scribble") c.Expect(ok, gs.IsTrue) c.Expect(value.(float64), gs.Equals, float64(1)) }) c.Specify("adds a boolean field to the message", func() { data := "bool field scribble" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) value, ok := pack.Message.GetFieldValue("scribble") c.Expect(ok, gs.IsTrue) c.Expect(value.(bool), gs.Equals, true) }) c.Specify("sets type and payload", func() { data := "set type and payload" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) c.Expect(pack.Message.GetType(), gs.Equals, "my_type") c.Expect(pack.Message.GetPayload(), gs.Equals, "my_payload") }) c.Specify("sets field value with representation", func() { data := "set field value with representation" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) fields := pack.Message.FindAllFields("rep") c.Expect(len(fields), gs.Equals, 1) field := fields[0] values := field.GetValueString() c.Expect(len(values), gs.Equals, 1) c.Expect(values[0], gs.Equals, "foo") c.Expect(field.GetRepresentation(), gs.Equals, "representation") }) c.Specify("sets multiple field string values", func() { data := "set multiple field string values" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) fields := pack.Message.FindAllFields("multi") c.Expect(len(fields), gs.Equals, 2) values := fields[0].GetValueString() c.Expect(len(values), gs.Equals, 1) c.Expect(values[0], gs.Equals, "first") values = fields[1].GetValueString() c.Expect(len(values), gs.Equals, 1) c.Expect(values[0], gs.Equals, "second") }) c.Specify("sets field string array value", func() { data := "set field string array value" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(err, gs.IsNil) c.Expect(len(packs), gs.Equals, 1) c.Expect(packs[0], gs.Equals, pack) fields := pack.Message.FindAllFields("array") c.Expect(len(fields), gs.Equals, 1) values := fields[0].GetValueString() c.Expect(len(values), gs.Equals, 2) c.Expect(values[0], gs.Equals, "first") c.Expect(values[1], gs.Equals, "second") }) }) }) c.Specify("A Multipack SandboxDecoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/testsupport/multipack_decoder.lua" supply := make(chan *pipeline.PipelinePack, 3) pack := pipeline.NewPipelinePack(supply) pack.Message = getTestMessage() pack1 := pipeline.NewPipelinePack(supply) pack2 := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") c.Specify("decodes into multiple packs", func() { err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) gomock.InOrder( dRunner.EXPECT().NewPack().Return(pack1), dRunner.EXPECT().NewPack().Return(pack2), ) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 3) c.Expect(packs[0].Message.GetPayload(), gs.Equals, "message one") c.Expect(packs[1].Message.GetPayload(), gs.Equals, "message two") c.Expect(packs[2].Message.GetPayload(), gs.Equals, "message three") for i := 0; i < 1; i++ { c.Expect(packs[i].Message.GetType(), gs.Equals, "TEST") c.Expect(packs[i].Message.GetHostname(), gs.Equals, "my.host.name") c.Expect(packs[i].Message.GetLogger(), gs.Equals, "GoSpec") c.Expect(packs[i].Message.GetSeverity(), gs.Equals, int32(6)) } decoder.Shutdown() }) }) c.Specify("Linux Cpu Stats decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/linux_loadavg.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes a message", func() { payload := "0.00 0.01 0.05 3/153 660\n" pack.Message.SetPayload(payload) f, err := message.NewField("FilePath", "/proc/loadavg", "") c.Assume(err, gs.IsNil) pack.Message.AddField(f) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("1MinAvg") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, 0.00) value, ok = pack.Message.GetFieldValue("5MinAvg") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, 0.01) value, ok = pack.Message.GetFieldValue("15MinAvg") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, 0.05) value, ok = pack.Message.GetFieldValue("NumProcesses") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(3)) value, ok = pack.Message.GetFieldValue("FilePath") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, "/proc/loadavg") }) c.Specify("decodes an invalid message", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) c.Specify("Linux Mem Stats decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/linux_memstats.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes a message", func() { payload := `MemTotal: 4047616 kB MemFree: 3135780 kB HugePages_Free: 0 ` pack.Message.SetPayload(payload) f, err := message.NewField("FilePath", "/proc/meminfo", "") c.Assume(err, gs.IsNil) pack.Message.AddField(f) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("MemTotal") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, 4.047616e+06) value, ok = pack.Message.GetFieldValue("MemFree") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, 3.13578e+06) value, ok = pack.Message.GetFieldValue("HugePages_Free") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(0)) value, ok = pack.Message.GetFieldValue("FilePath") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, "/proc/meminfo") }) c.Specify("decodes an invalid message", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) c.Specify("Linux Disk Stats decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/linux_diskstats.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes a message", func() { payload := " 13903 11393 969224 49444 10780 10161 1511920 4104 0 5064 53468\n" pack.Message.SetPayload(payload) f, err := message.NewField("FilePath", "/sys/block/sda/stat", "") c.Assume(err, gs.IsNil) pack.Message.AddField(f) f, err = message.NewField("TickerInterval", int64(2), "") c.Assume(err, gs.IsNil) pack.Message.AddField(f) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} // These are in the same order the payload should be value, ok = pack.Message.GetFieldValue("ReadsCompleted") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(13903)) value, ok = pack.Message.GetFieldValue("ReadsMerged") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(11393)) value, ok = pack.Message.GetFieldValue("SectorsRead") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(969224)) value, ok = pack.Message.GetFieldValue("TimeReading") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(49444)) value, ok = pack.Message.GetFieldValue("WritesCompleted") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(10780)) value, ok = pack.Message.GetFieldValue("WritesMerged") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(10161)) value, ok = pack.Message.GetFieldValue("SectorsWritten") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(1511920)) value, ok = pack.Message.GetFieldValue("TimeWriting") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(4104)) value, ok = pack.Message.GetFieldValue("NumIOInProgress") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(0)) value, ok = pack.Message.GetFieldValue("TimeDoingIO") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(5064)) value, ok = pack.Message.GetFieldValue("WeightedTimeDoingIO") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(53468)) value, ok = pack.Message.GetFieldValue("TickerInterval") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(2)) value, ok = pack.Message.GetFieldValue("FilePath") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, "/sys/block/sda/stat") }) c.Specify("decodes a message with no leading space", func() { payload := "19092852 0 510563170 15817012 46452019 0 1546950712 262535124 0 23823976 278362684\n" pack.Message.SetPayload(payload) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) value, ok := pack.Message.GetFieldValue("ReadsCompleted") c.Expect(ok, gs.IsTrue) c.Expect(value, gs.Equals, float64(19092852)) }) c.Specify("decodes an invalid message", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) c.Specify("Nginx access log decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/nginx_access.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) conf.Config["log_format"] = "$remote_addr - $remote_user [$time_local] \"$request\" $status $body_bytes_sent \"$http_referer\" \"$http_user_agent\"" conf.Config["user_agent_transform"] = true supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes simple messages", func() { data := "127.0.0.1 - - [10/Feb/2014:08:46:41 -0800] \"GET / HTTP/1.1\" 304 0 \"-\" \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:26.0) Gecko/20100101 Firefox/26.0\"" pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1392050801000000000)) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("remote_addr") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "127.0.0.1") value, ok = pack.Message.GetFieldValue("user_agent_browser") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "Firefox") value, ok = pack.Message.GetFieldValue("user_agent_version") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(26)) value, ok = pack.Message.GetFieldValue("user_agent_os") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "Linux") _, ok = pack.Message.GetFieldValue("http_user_agent") c.Expect(ok, gs.Equals, false) value, ok = pack.Message.GetFieldValue("body_bytes_sent") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(0)) value, ok = pack.Message.GetFieldValue("status") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(304)) decoder.Shutdown() }) c.Specify("decodes an invalid messages", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) c.Specify("Apache access log decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/apache_access.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) conf.Config["log_format"] = "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" conf.Config["user_agent_transform"] = true supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes simple messages", func() { data := "127.0.0.1 - - [10/Feb/2014:08:46:41 -0800] \"GET / HTTP/1.1\" 304 0 \"-\" \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:26.0) Gecko/20100101 Firefox/26.0\"" pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1392050801000000000)) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(7)) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("remote_addr") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "127.0.0.1") value, ok = pack.Message.GetFieldValue("user_agent_browser") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "Firefox") value, ok = pack.Message.GetFieldValue("user_agent_version") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(26)) value, ok = pack.Message.GetFieldValue("user_agent_os") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "Linux") _, ok = pack.Message.GetFieldValue("http_user_agent") c.Expect(ok, gs.Equals, false) value, ok = pack.Message.GetFieldValue("body_bytes_sent") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(0)) value, ok = pack.Message.GetFieldValue("status") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(304)) decoder.Shutdown() }) c.Specify("decodes an invalid messages", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) c.Specify("rsyslog decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/rsyslog.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) conf.Config["type"] = "MyTestFormat" conf.Config["template"] = "%pri% %TIMESTAMP% %TIMEGENERATED:::date-rfc3339% %HOSTNAME% %syslogtag%%msg:::sp-if-no-1st-sp%%msg:::drop-last-lf%\n" conf.Config["tz"] = "America/Los_Angeles" supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decodes simple messages", func() { data := "28 Feb 10 12:58:58 2014-02-10T12:58:59-08:00 testhost widget[4322]: test message.\n" pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) // Syslog timestamp doesn't support year, so we have to calculate // it for the current year or else this test will fail every // January. year := time.Now().Year() tStr := fmt.Sprintf("%d Feb 10 12:58:58 -0800", year) t, err := time.Parse("2006 Jan 02 15:04:05 -0700", tStr) c.Assume(err, gs.IsNil) unixT := t.UnixNano() c.Expect(pack.Message.GetTimestamp(), gs.Equals, unixT) c.Expect(pack.Message.GetSeverity(), gs.Equals, int32(4)) c.Expect(pack.Message.GetHostname(), gs.Equals, "testhost") c.Expect(pack.Message.GetPid(), gs.Equals, int32(4322)) c.Expect(pack.Message.GetPayload(), gs.Equals, "test message.") c.Expect(pack.Message.GetType(), gs.Equals, conf.Config["type"]) var ok bool var value interface{} value, ok = pack.Message.GetFieldValue("programname") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, "widget") value, ok = pack.Message.GetFieldValue("syslogfacility") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(3)) value, ok = pack.Message.GetFieldValue("timegenerated") c.Expect(ok, gs.Equals, true) c.Expect(value, gs.Equals, float64(1392065939000000000)) decoder.Shutdown() }) c.Specify("decodes an invalid messages", func() { data := "bogus message" pack.Message.SetPayload(data) packs, err := decoder.Decode(pack) c.Expect(len(packs), gs.Equals, 0) c.Expect(err.Error(), gs.Equals, "Failed parsing: "+data) c.Expect(decoder.processMessageFailures, gs.Equals, int64(1)) decoder.Shutdown() }) }) c.Specify("mysql decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/mysql_slow_query.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) conf.Config["truncate_sql"] = int64(5) supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decode standard slow query log", func() { data := `# User@Host: syncrw[syncrw] @ [127.0.0.1] # Query_time: 2.964652 Lock_time: 0.000050 Rows_sent: 251 Rows_examined: 9773 use widget; SET last_insert_id=999,insert_id=1000,timestamp=1399500744; # administrator command: do something /* [queryName=FIND_ITEMS] */ SELECT * FROM widget WHERE id = 10;` pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1399500744000000000)) c.Expect(pack.Message.GetPayload(), gs.Equals, "/* [q...") c.Expect(pack.Message.GetType(), gs.Equals, "mysql.slow-query") decoder.Shutdown() }) }) c.Specify("mariadb decoder", func() { decoder := new(SandboxDecoder) decoder.SetPipelineConfig(pConfig) conf := decoder.ConfigStruct().(*sandbox.SandboxConfig) conf.ScriptFilename = "../lua/decoders/mariadb_slow_query.lua" conf.ModuleDirectory = "../../../../../../modules" conf.MemoryLimit = 8e6 conf.Config = make(map[string]interface{}) conf.Config["truncate_sql"] = int64(5) supply := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(supply) dRunner := pm.NewMockDecoderRunner(ctrl) dRunner.EXPECT().Name().Return("SandboxDecoder") err := decoder.Init(conf) c.Assume(err, gs.IsNil) decoder.SetDecoderRunner(dRunner) c.Specify("decode standard slow query log", func() { data := `# User@Host: syncrw[syncrw] @ [127.0.0.1] # Thread_id: 110804 Schema: weave0 QC_hit: No # Query_time: 1.178108 Lock_time: 0.000053 Rows_sent: 198 Rows_examined: 198 SET timestamp=1399500744; /* [queryName=FIND_ITEMS] */ SELECT * FROM widget WHERE id = 10;` pack.Message.SetPayload(data) _, err = decoder.Decode(pack) c.Assume(err, gs.IsNil) c.Expect(pack.Message.GetTimestamp(), gs.Equals, int64(1399500744000000000)) c.Expect(pack.Message.GetPayload(), gs.Equals, "/* [q...") c.Expect(pack.Message.GetType(), gs.Equals, "mariadb.slow-query") decoder.Shutdown() }) }) }
func ProcessDirectoryInputSpec(c gs.Context) { t := &pipeline_ts.SimpleT{} ctrl := gomock.NewController(t) defer ctrl.Finish() pConfig := NewPipelineConfig(nil) ith := new(plugins_ts.InputTestHelper) ith.Msg = pipeline_ts.GetTestMessage() ith.Pack = NewPipelinePack(pConfig.InputRecycleChan()) // set up mock helper, decoder set, and packSupply channel ith.MockHelper = pipelinemock.NewMockPluginHelper(ctrl) ith.MockInputRunner = pipelinemock.NewMockInputRunner(ctrl) ith.MockDeliverer = pipelinemock.NewMockDeliverer(ctrl) ith.MockSplitterRunner = pipelinemock.NewMockSplitterRunner(ctrl) ith.PackSupply = make(chan *PipelinePack, 1) ith.PackSupply <- ith.Pack err := pConfig.RegisterDefault("NullSplitter") c.Assume(err, gs.IsNil) c.Specify("A ProcessDirectoryInput", func() { pdiInput := ProcessDirectoryInput{} pdiInput.SetPipelineConfig(pConfig) config := pdiInput.ConfigStruct().(*ProcessDirectoryInputConfig) workingDir, err := os.Getwd() c.Assume(err, gs.IsNil) config.ProcessDir = filepath.Join(workingDir, "testsupport", "processes") // `Ticker` is the last thing called during the setup part of the // input's `Run` method, so it triggers a waitgroup that tests can // wait on when they need to ensure initialization has finished. var started sync.WaitGroup started.Add(1) tickChan := make(chan time.Time, 1) ith.MockInputRunner.EXPECT().Ticker().Return(tickChan).Do( func() { started.Done() }) // Similarly we use a waitgroup to signal when LogMessage has been // called to know when reloads have completed. Warning: If you call // expectLogMessage with a msg that is never passed to LogMessage and // then you call loaded.Wait() then your test will hang and never // complete. var loaded sync.WaitGroup expectLogMessage := func(msg string) { loaded.Add(1) ith.MockInputRunner.EXPECT().LogMessage(msg).Do( func(msg string) { loaded.Done() }) } // Same name => same content. paths := []string{ filepath.Join(config.ProcessDir, "100", "h0.toml"), filepath.Join(config.ProcessDir, "100", "h1.toml"), filepath.Join(config.ProcessDir, "200", "h0.toml"), filepath.Join(config.ProcessDir, "300", "h1.toml"), } copyFile := func(src, dest string) { inFile, err := os.Open(src) c.Assume(err, gs.IsNil) outFile, err := os.Create(dest) c.Assume(err, gs.IsNil) _, err = io.Copy(outFile, inFile) c.Assume(err, gs.IsNil) inFile.Close() outFile.Close() } err = pdiInput.Init(config) c.Expect(err, gs.IsNil) for _, p := range paths { expectLogMessage("Added: " + p) } go pdiInput.Run(ith.MockInputRunner, ith.MockHelper) defer func() { pdiInput.Stop() for _, entry := range pdiInput.inputs { entry.ir.Input().Stop() } }() started.Wait() c.Specify("loads scheduled jobs", func() { pathIndex := func(name string) (i int) { var p string for i, p = range paths { if name == p { return } } return -1 } for name, entry := range pdiInput.inputs { i := pathIndex(name) // Make sure each file path got registered. c.Expect(i, gs.Not(gs.Equals), -1) dirName := filepath.Base(filepath.Dir(name)) dirInt, err := strconv.Atoi(dirName) c.Expect(err, gs.IsNil) // And that the ticker interval was read correctly. c.Expect(uint(dirInt), gs.Equals, entry.config.TickerInterval) } }) c.Specify("discovers and adds a new job", func() { // Copy one of the files to register a new process. newPath := filepath.Join(config.ProcessDir, "300", "h0.toml") copyFile(paths[0], newPath) defer func() { err := os.Remove(newPath) c.Assume(err, gs.IsNil) }() // Set up expectations and trigger process dir reload. expectLogMessage("Added: " + newPath) tickChan <- time.Now() loaded.Wait() // Make sure our plugin was loaded. c.Expect(len(pdiInput.inputs), gs.Equals, 5) newEntry, ok := pdiInput.inputs[newPath] c.Expect(ok, gs.IsTrue) c.Expect(newEntry.config.TickerInterval, gs.Equals, uint(300)) }) c.Specify("removes a deleted job", func() { err := os.Remove(paths[3]) c.Assume(err, gs.IsNil) defer func() { copyFile(paths[1], paths[3]) }() // Set up expectations and trigger process dir reload. expectLogMessage("Removed: " + paths[3]) tickChan <- time.Now() loaded.Wait() // Make sure our plugin was deleted. c.Expect(len(pdiInput.inputs), gs.Equals, 3) }) c.Specify("notices a changed job", func() { // Overwrite one job w/ a slightly different one. copyFile(paths[0], paths[3]) defer copyFile(paths[1], paths[3]) // Set up expectations and trigger process dir reload. expectLogMessage("Removed: " + paths[3]) expectLogMessage("Added: " + paths[3]) tickChan <- time.Now() loaded.Wait() // Make sure the new config was loaded. c.Expect(pdiInput.inputs[paths[3]].config.Command["0"].Args[0], gs.Equals, "hello world\n") }) }) }
func DashboardOutputSpec(c gs.Context) { t := new(pipeline_ts.SimpleT) ctrl := gomock.NewController(t) defer ctrl.Finish() pConfig := pipeline.NewPipelineConfig(nil) dashboardOutput := new(DashboardOutput) dashboardOutput.pConfig = pConfig oth := plugins_ts.NewOutputTestHelper(ctrl) oth.MockHelper = pipelinemock.NewMockPluginHelper(ctrl) oth.MockOutputRunner = pipelinemock.NewMockOutputRunner(ctrl) errChan := make(chan error, 1) startOutput := func() { go func() { errChan <- dashboardOutput.Run(oth.MockOutputRunner, oth.MockHelper) }() } if runtime.GOOS != "windows" { c.Specify("A DashboardOutput", func() { tmpdir, err := ioutil.TempDir("", "dashboard_output_test") c.Assume(err, gs.IsNil) config := dashboardOutput.ConfigStruct().(*DashboardOutputConfig) config.WorkingDirectory = tmpdir c.Specify("Init halts if basedirectory is not writable", func() { err := os.MkdirAll(tmpdir, 0400) c.Assume(err, gs.IsNil) defer os.RemoveAll(tmpdir) err = dashboardOutput.Init(config) c.Assume(err, gs.Not(gs.IsNil)) }) c.Specify("that is running", func() { startedChan := make(chan bool, 1) defer close(startedChan) ts := httptest.NewUnstartedServer(nil) dashboardOutput.starterFunc = func(hli *DashboardOutput) error { ts.Start() startedChan <- true return nil } ticker := make(chan time.Time) inChan := make(chan *pipeline.PipelinePack, 1) recycleChan := make(chan *pipeline.PipelinePack, 1) pack := pipeline.NewPipelinePack(recycleChan) pack.Message = pipeline_ts.GetTestMessage() oth.MockOutputRunner.EXPECT().InChan().Return(inChan) oth.MockOutputRunner.EXPECT().Ticker().Return(ticker) err := os.MkdirAll(tmpdir, 0700) c.Assume(err, gs.IsNil) defer os.RemoveAll(tmpdir) dashboardOutput.handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // Noop }) c.Specify("sets custom http headers", func() { config.Headers = http.Header{ "One": []string{"two", "three"}, "Four": []string{"five", "six", "seven"}, } err = dashboardOutput.Init(config) c.Assume(err, gs.IsNil) ts.Config = dashboardOutput.server startOutput() inChan <- pack <-startedChan resp, err := http.Get(ts.URL) c.Assume(err, gs.IsNil) resp.Body.Close() c.Assume(resp.StatusCode, gs.Equals, 200) // Verify headers are there eq := reflect.DeepEqual(resp.Header["One"], config.Headers["One"]) c.Expect(eq, gs.IsTrue) eq = reflect.DeepEqual(resp.Header["Four"], config.Headers["Four"]) c.Expect(eq, gs.IsTrue) }) close(inChan) c.Expect(<-errChan, gs.IsNil) ts.Close() }) }) } }
func SmtpOutputSpec(c gs.Context) { t := new(pipeline_ts.SimpleT) ctrl := gomock.NewController(t) defer ctrl.Finish() oth := plugins_ts.NewOutputTestHelper(ctrl) var wg sync.WaitGroup inChan := make(chan *PipelinePack, 1) pConfig := NewPipelineConfig(nil) encoder := new(plugins.PayloadEncoder) econfig := encoder.ConfigStruct().(*plugins.PayloadEncoderConfig) econfig.AppendNewlines = false encoder.Init(econfig) c.Specify("A SmtpOutput", func() { smtpOutput := new(SmtpOutput) config := smtpOutput.ConfigStruct().(*SmtpOutputConfig) config.SendTo = []string{"root"} msg := pipeline_ts.GetTestMessage() pack := NewPipelinePack(pConfig.InputRecycleChan()) pack.Message = msg inChanCall := oth.MockOutputRunner.EXPECT().InChan().AnyTimes() inChanCall.Return(inChan) runnerName := oth.MockOutputRunner.EXPECT().Name().AnyTimes() runnerName.Return("SmtpOutput") oth.MockOutputRunner.EXPECT().Encoder().Return(encoder) encCall := oth.MockOutputRunner.EXPECT().Encode(pack) c.Specify("send email payload message", func() { err := smtpOutput.Init(config) c.Assume(err, gs.IsNil) smtpOutput.sendFunction = testSendMail outStr := "Write me out to the network" pack.Message.SetPayload(outStr) encCall.Return(encoder.Encode(pack)) wg.Add(1) go func() { smtpOutput.Run(oth.MockOutputRunner, oth.MockHelper) wg.Done() }() inChan <- pack close(inChan) wg.Wait() }) }) c.Specify("SmtpOutput Message Body Encoding", func() { smtpOutput := new(SmtpOutput) chars := "123456789012345678901234567890123456789012345678901234567" charsE := "MTIzNDU2Nzg5MDEyMzQ1Njc4OTAxMjM0NTY3ODkwMTIzNDU2Nzg5MDEyMzQ1Njc4OTAxMjM0NTY3" examples := [][]string{ {"Hello", "SGVsbG8="}, {chars, charsE}, {chars + chars, charsE + "\r\n" + charsE}, {chars + chars + "Hello", charsE + "\r\n" + charsE + "\r\n" + "SGVsbG8="}, {"", ""}, {"1", "MQ=="}, } for _, example := range examples { smtpOutput.encodeFullMsg([]byte(example[0])) c.Expect(string(smtpOutput.fullMsg), gs.Equals, example[1]) } }) // // Use this test with a real server // c.Specify("Real SmtpOutput output", func() { // smtpOutput := new(SmtpOutput) // config := smtpOutput.ConfigStruct().(*SmtpOutputConfig) // config.SendTo = []string{"root"} // msg := pipeline_ts.GetTestMessage() // pack := NewPipelinePack(pConfig.InputRecycleChan()) // pack.Message = msg // pack.Decoded = true // inChanCall := oth.MockOutputRunner.EXPECT().InChan().AnyTimes() // inChanCall.Return(inChan) // runnerName := oth.MockOutputRunner.EXPECT().Name().AnyTimes() // runnerName.Return("SmtpOutput") // oth.MockOutputRunner.EXPECT().Encoder().Return(encoder) // encCall := oth.MockOutputRunner.EXPECT().Encode(pack) // c.Specify("send a real email essage", func() { // err := smtpOutput.Init(config) // c.Assume(err, gs.IsNil) // outStr := "Write me out to the network" // pack.Message.SetPayload(outStr) // encCall.Return(encoder.Encode(pack)) // go func() { // wg.Add(1) // smtpOutput.Run(oth.MockOutputRunner, oth.MockHelper) // wg.Done() // }() // inChan <- pack // time.Sleep(1000) // allow time for the message output // close(inChan) // wg.Wait() // // manually check the mail // }) // }) }