Exemplo n.º 1
0
func (k *KafkaOutput) Run(or pipeline.OutputRunner, h pipeline.PluginHelper) (err error) {
	defer func() {
		k.producer.Close()
		k.client.Close()
	}()

	if or.Encoder() == nil {
		return errors.New("Encoder required.")
	}

	inChan := or.InChan()
	useBuffering := or.UsesBuffering()
	errChan := k.producer.Errors()
	var wg sync.WaitGroup
	wg.Add(1)
	go k.processKafkaErrors(or, errChan, &wg)

	var (
		pack  *pipeline.PipelinePack
		topic = k.config.Topic
		key   sarama.Encoder
	)

	for pack = range inChan {
		atomic.AddInt64(&k.processMessageCount, 1)

		if k.topicVariable != nil {
			topic = getMessageVariable(pack.Message, k.topicVariable)
		}
		if k.hashVariable != nil {
			key = sarama.StringEncoder(getMessageVariable(pack.Message, k.hashVariable))
		}

		msgBytes, err := or.Encode(pack)
		if err != nil {
			atomic.AddInt64(&k.processMessageFailures, 1)
			or.LogError(err)
			// Don't retry encoding errors.
			or.UpdateCursor(pack.QueueCursor)
			pack.Recycle(nil)
			continue
		}
		if msgBytes == nil {
			atomic.AddInt64(&k.processMessageDiscards, 1)
			or.UpdateCursor(pack.QueueCursor)
			pack.Recycle(nil)
			continue
		}
		err = k.producer.QueueMessage(topic, key, sarama.ByteEncoder(msgBytes))
		if err != nil {
			if !useBuffering {
				atomic.AddInt64(&k.processMessageFailures, 1)
			}
			or.LogError(err)
		}
		pack.Recycle(err)
	}

	errChan <- Shutdown
	wg.Wait()
	return
}
Exemplo n.º 2
0
func TestReceivePayloadMessage(t *testing.T) {
	b1 := sarama.NewMockBroker(t, 1)
	b2 := sarama.NewMockBroker(t, 2)
	ctrl := gomock.NewController(t)
	tmpDir, tmpErr := ioutil.TempDir("", "kafkainput-tests")
	if tmpErr != nil {
		t.Errorf("Unable to create a temporary directory: %s", tmpErr)
	}

	defer func() {
		if err := os.RemoveAll(tmpDir); err != nil {
			t.Errorf("Cleanup failed: %s", err)
		}
		ctrl.Finish()
	}()

	topic := "test"
	mdr := new(sarama.MetadataResponse)
	mdr.AddBroker(b2.Addr(), b2.BrokerID())
	mdr.AddTopicPartition(topic, 0, 2)
	b1.Returns(mdr)

	or := new(sarama.OffsetResponse)
	or.AddTopicPartition(topic, 0, 0)
	b2.Returns(or)

	fr := new(sarama.FetchResponse)
	fr.AddMessage(topic, 0, nil, sarama.ByteEncoder([]byte{0x41, 0x42}), 0)
	b2.Returns(fr)

	pConfig := NewPipelineConfig(nil)
	pConfig.Globals.BaseDir = tmpDir
	ki := new(KafkaInput)
	ki.SetName(topic)
	ki.SetPipelineConfig(pConfig)
	config := ki.ConfigStruct().(*KafkaInputConfig)
	config.Addrs = append(config.Addrs, b1.Addr())
	config.Topic = topic

	ith := new(plugins_ts.InputTestHelper)
	ith.Pack = NewPipelinePack(pConfig.InputRecycleChan())
	ith.MockHelper = pipelinemock.NewMockPluginHelper(ctrl)
	ith.MockInputRunner = pipelinemock.NewMockInputRunner(ctrl)

	ith.MockSplitterRunner = pipelinemock.NewMockSplitterRunner(ctrl)

	err := ki.Init(config)
	if err != nil {
		t.Fatalf("%s", err)
	}

	ith.MockInputRunner.EXPECT().NewSplitterRunner("").Return(ith.MockSplitterRunner)
	ith.MockSplitterRunner.EXPECT().UseMsgBytes().Return(false)
	ith.MockSplitterRunner.EXPECT().Done()

	decChan := make(chan func(*PipelinePack), 1)
	decCall := ith.MockSplitterRunner.EXPECT().SetPackDecorator(gomock.Any())
	decCall.Do(func(dec func(pack *PipelinePack)) {
		decChan <- dec
	})

	bytesChan := make(chan []byte, 1)
	splitCall := ith.MockSplitterRunner.EXPECT().SplitBytes(gomock.Any(), nil)
	splitCall.Do(func(recd []byte, del Deliverer) {
		bytesChan <- recd
	})

	errChan := make(chan error)
	go func() {
		errChan <- ki.Run(ith.MockInputRunner, ith.MockHelper)
	}()

	recd := <-bytesChan
	if string(recd) != "AB" {
		t.Errorf("Invalid Payload Expected: AB received: %s", string(recd))
	}

	packDec := <-decChan
	packDec(ith.Pack)
	if ith.Pack.Message.GetType() != "heka.kafka" {
		t.Errorf("Invalid Type %s", ith.Pack.Message.GetType())
	}

	// There is a hang on the consumer close with the mock broker
	// closing the brokers before the consumer works around the issue
	// and is good enough for this test.
	b1.Close()
	b2.Close()

	ki.Stop()
	err = <-errChan
	if err != nil {
		t.Fatal(err)
	}

	filename := filepath.Join(tmpDir, "kafka", "test.test.0.offset.bin")
	if o, err := readCheckpoint(filename); err != nil {
		t.Errorf("Could not read the checkpoint file: %s", filename)
	} else {
		if o != 1 {
			t.Errorf("Incorrect offset Expected: 1 Received: %d", o)
		}
	}
}