Esempio n. 1
0
func NewKafkaInst(w *logrus.Logger, conf kafka_config) *Kafka {
	var err error
	var offset int64
	config := sarama.NewConfig()
	// init partitioner
	switch conf.Partitioner {
	case "hash":
		config.Producer.Partitioner = sarama.NewHashPartitioner
	case "random":
		config.Producer.Partitioner = sarama.NewRandomPartitioner
	case "manual":
		config.Producer.Partitioner = sarama.NewManualPartitioner
		if conf.Partition == -1 {
			w.WithFields(logrus.Fields{
				"module": "kafka",
			}).Fatalln("Partition is required when partitioning manually.")
		}
	default:
		w.WithFields(logrus.Fields{
			"module": "kafka",
		}).Fatalf("Partitioner %s not supported.\n", conf.Partitioner)
	}
	if conf.Read_from_oldest {
		offset = sarama.OffsetOldest
	} else {
		offset = sarama.OffsetNewest
	}
	partition := int32(conf.Partition)
	// init topic
	topic := conf.Topics
	config.Producer.RequiredAcks = sarama.WaitForAll
	// init producer
	brokerlist := conf.Brokers
	producer, err := sarama.NewSyncProducer(brokerlist, config)
	if err != nil {
		w.WithFields(logrus.Fields{
			"module": "kafka",
		}).Fatalln("Init failed:", err)
	}
	w.WithFields(logrus.Fields{
		"module": "kafka",
	}).Infoln("Init completed")
	return &Kafka{producer: producer, topic: topic, partition: partition, brokerlist: brokerlist, logger: w, offset: offset}
}
Esempio n. 2
0
// Init initializes a avro package
func NewAvroInst(w *logrus.Logger, conf avro_config) *Avro {
	data, err := ioutil.ReadFile(conf.Schema)
	if err != nil {
		w.WithFields(logrus.Fields{
			"module": "avro",
		}).Fatalln("Failed to open schema file:", err)
	}
	recordSchemaJSON := string(data)
	codec, err := goavro.NewCodec(recordSchemaJSON)
	if err != nil {
		w.WithFields(logrus.Fields{
			"module": "avro",
		}).Fatalln("Failed to init codec from schema:", err)
	}
	w.WithFields(logrus.Fields{
		"module": "avro",
	}).Println("Init completed.")
	return &Avro{codec: codec, recordSchemaJSON: recordSchemaJSON, logger: w}
}