func TestReceivePayloadMessage(t *testing.T) { b1 := sarama.NewMockBroker(t, 1) b2 := sarama.NewMockBroker(t, 2) ctrl := gomock.NewController(t) tmpDir, tmpErr := ioutil.TempDir("", "kafkainput-tests") if tmpErr != nil { t.Errorf("Unable to create a temporary directory: %s", tmpErr) } defer func() { if err := os.RemoveAll(tmpDir); err != nil { t.Errorf("Cleanup failed: %s", err) } ctrl.Finish() }() topic := "test" mdr := new(sarama.MetadataResponse) mdr.AddBroker(b2.Addr(), b2.BrokerID()) mdr.AddTopicPartition(topic, 0, 2) b1.Returns(mdr) or := new(sarama.OffsetResponse) or.AddTopicPartition(topic, 0, 0) b2.Returns(or) fr := new(sarama.FetchResponse) fr.AddMessage(topic, 0, nil, sarama.ByteEncoder([]byte{0x41, 0x42}), 0) b2.Returns(fr) pConfig := NewPipelineConfig(nil) pConfig.Globals.BaseDir = tmpDir ki := new(KafkaInput) ki.SetName(topic) ki.SetPipelineConfig(pConfig) config := ki.ConfigStruct().(*KafkaInputConfig) config.Addrs = append(config.Addrs, b1.Addr()) config.Topic = topic ith := new(plugins_ts.InputTestHelper) ith.Pack = NewPipelinePack(pConfig.InputRecycleChan()) ith.MockHelper = pipelinemock.NewMockPluginHelper(ctrl) ith.MockInputRunner = pipelinemock.NewMockInputRunner(ctrl) ith.MockSplitterRunner = pipelinemock.NewMockSplitterRunner(ctrl) err := ki.Init(config) if err != nil { t.Fatalf("%s", err) } ith.MockInputRunner.EXPECT().NewSplitterRunner("").Return(ith.MockSplitterRunner) ith.MockSplitterRunner.EXPECT().UseMsgBytes().Return(false) decChan := make(chan func(*PipelinePack), 1) decCall := ith.MockSplitterRunner.EXPECT().SetPackDecorator(gomock.Any()) decCall.Do(func(dec func(pack *PipelinePack)) { decChan <- dec }) bytesChan := make(chan []byte, 1) splitCall := ith.MockSplitterRunner.EXPECT().SplitBytes(gomock.Any(), nil) splitCall.Do(func(recd []byte, del Deliverer) { bytesChan <- recd }) errChan := make(chan error) go func() { errChan <- ki.Run(ith.MockInputRunner, ith.MockHelper) }() recd := <-bytesChan if string(recd) != "AB" { t.Errorf("Invalid Payload Expected: AB received: %s", string(recd)) } packDec := <-decChan packDec(ith.Pack) if ith.Pack.Message.GetType() != "heka.kafka" { t.Errorf("Invalid Type %s", ith.Pack.Message.GetType()) } // There is a hang on the consumer close with the mock broker // closing the brokers before the consumer works around the issue // and is good enough for this test. b1.Close() b2.Close() ki.Stop() err = <-errChan if err != nil { t.Fatal(err) } filename := filepath.Join(tmpDir, "kafka", "test.test.0.offset.bin") if o, err := readCheckpoint(filename); err != nil { t.Errorf("Could not read the checkpoint file: %s", filename) } else { if o != 1 { t.Errorf("Incorrect offset Expected: 1 Received: %d", o) } } }
func TestSendMessage(t *testing.T) { ctrl := gomock.NewController(t) b1 := sarama.NewMockBroker(t, 1) b2 := sarama.NewMockBroker(t, 2) defer func() { b1.Close() b2.Close() ctrl.Finish() }() topic := "test" globals := DefaultGlobals() pConfig := NewPipelineConfig(globals) mdr := new(sarama.MetadataResponse) mdr.AddBroker(b2.Addr(), b2.BrokerID()) mdr.AddTopicPartition(topic, 0, 2) b1.Returns(mdr) pr := new(sarama.ProduceResponse) pr.AddTopicPartition(topic, 0, sarama.NoError) b2.Returns(pr) ko := new(KafkaOutput) ko.SetPipelineConfig(pConfig) config := ko.ConfigStruct().(*KafkaOutputConfig) config.Addrs = append(config.Addrs, b1.Addr()) config.Topic = topic err := ko.Init(config) if err != nil { t.Fatal(err) } oth := plugins_ts.NewOutputTestHelper(ctrl) encoder := new(plugins.PayloadEncoder) encoder.Init(encoder.ConfigStruct().(*plugins.PayloadEncoderConfig)) inChan := make(chan *PipelinePack, 1) msg := pipeline_ts.GetTestMessage() pack := NewPipelinePack(pConfig.InputRecycleChan()) pack.Message = msg pack.Decoded = true outStr := "Write me out to the network" newpack := NewPipelinePack(nil) newpack.Message = msg inChanCall := oth.MockOutputRunner.EXPECT().InChan().AnyTimes() inChanCall.Return(inChan) errChan := make(chan error) startOutput := func() { go func() { err := ko.Run(oth.MockOutputRunner, oth.MockHelper) errChan <- err }() } oth.MockOutputRunner.EXPECT().Encoder().Return(encoder) oth.MockOutputRunner.EXPECT().Encode(pack).Return(encoder.Encode(pack)) pack.Message.SetPayload(outStr) startOutput() msgcount := atomic.LoadInt64(&ko.processMessageCount) if msgcount != 0 { t.Errorf("Invalid starting processMessageCount %d", msgcount) } msgcount = atomic.LoadInt64(&ko.processMessageFailures) if msgcount != 0 { t.Errorf("Invalid starting processMessageFailures %d", msgcount) } inChan <- pack close(inChan) err = <-errChan if err != nil { t.Errorf("Error running output %s", err) } msgcount = atomic.LoadInt64(&ko.processMessageCount) if msgcount != 1 { t.Errorf("Invalid ending processMessageCount %d", msgcount) } msgcount = atomic.LoadInt64(&ko.processMessageFailures) if msgcount != 0 { t.Errorf("Invalid ending processMessageFailures %d", msgcount) } }