func NewKafkaAvroAdapter(route *router.Route) (router.LogAdapter, error) { brokers := readBrokers(route.Address) if len(brokers) == 0 { return nil, errorf("The Kafka broker host:port is missing. Did you specify it as a route address?") } topic := readTopic(route.Address, route.Options) if topic == "" { return nil, errorf("The Kafka topic is missing. Did you specify it as a route option?") } schemaUrl := readSchemaRegistryUrl(route.Options) if schemaUrl == "" { return nil, errorf("The schema registry url is missing. Did you specify it as a route option?") } registry := kafkaavro.NewKafkaAvroEncoder(schemaUrl) var schema avro.Schema schema, err := avro.ParseSchema(messageSchema) if err != nil { return nil, errorf("The schema could not be parsed") } if os.Getenv("DEBUG") != "" { log.Printf("Starting Kafka producer for address: %s, topic: %s.\n", brokers, topic) } var retries int retries, err = strconv.Atoi(os.Getenv("KAFKA_CONNECT_RETRIES")) if err != nil { retries = 3 } var producer sarama.AsyncProducer for i := 0; i < retries; i++ { producer, err = sarama.NewAsyncProducer(brokers, newConfig()) if err != nil { if os.Getenv("DEBUG") != "" { log.Println("Couldn't create Kafka producer. Retrying...", err) } if i == retries-1 { return nil, errorf("Couldn't create Kafka producer. %v", err) } } else { time.Sleep(1 * time.Second) } } return &KafkaAvroAdapter{ route: route, brokers: brokers, topic: topic, registry: registry, schema: schema, producer: producer, }, nil }
func NewCodahaleKafkaReporter(topic string, schemaRegistryUrl string, producerConfig *producer.ProducerConfig, connectorConfig *siesta.ConnectorConfig) (*CodahaleKafkaReporter, error) { encoder := kafkaavro.NewKafkaAvroEncoder(schemaRegistryUrl) connector, err := siesta.NewDefaultConnector(connectorConfig) if err != nil { return nil, err } return &CodahaleKafkaReporter{ topic: topic, producer: producer.NewKafkaProducer(producerConfig, producer.ByteSerializer, encoder.Encode, connector), }, nil }
// NewKafkaLogEmitter creates a new KafkaLogEmitter with a provided configuration. func NewKafkaLogEmitter(config *KafkaLogEmitterConfig) (*KafkaLogEmitter, error) { encoder := kafkaavro.NewKafkaAvroEncoder(config.SchemaRegistryUrl) connector, err := siesta.NewDefaultConnector(config.ConnectorConfig) if err != nil { return nil, err } emitter := &KafkaLogEmitter{ config: config, producer: producer.NewKafkaProducer(config.ProducerConfig, producer.ByteSerializer, encoder.Encode, connector), } return emitter, nil }
func validate() *syslog.SyslogProducerConfig { if brokerList == "" { fmt.Println("broker.list is required.") os.Exit(1) } if topic == "" { fmt.Println("Topic is required.") os.Exit(1) } if sendAvro && schemaRegistryUrl == "" { fmt.Println("Schema Registry URL is required for --avro flag") os.Exit(1) } config := syslog.NewSyslogProducerConfig() config.ProducerConfig = producer.NewProducerConfig() config.ProducerConfig.RequiredAcks = requiredAcks config.ProducerConfig.AckTimeoutMs = int32(acksTimeout) config.BrokerList = brokerList config.NumProducers = numProducers config.Topic = topic config.TCPAddr = fmt.Sprintf("%s:%d", tcpHost, tcpPort) config.UDPAddr = fmt.Sprintf("%s:%d", udpHost, udpPort) if sendAvro { serializer := kafkaavro.NewKafkaAvroEncoder(schemaRegistryUrl) config.ValueSerializer = serializer.Encode config.Transformer = avroTransformer } if sendProtobuf { config.ValueSerializer = producer.ByteSerializer config.Transformer = protobufTransformer } return config }