Ejemplo n.º 1
0
func parseAndValidateArgs() *kafka.SyslogProducerConfig {
	tag = make(map[string]string)
	flag.Var(tag, "tag", "")
	flag.Parse()

	setLogLevel()
	runtime.GOMAXPROCS(*maxProcs)

	if *brokerList == "" {
		fmt.Println("broker.list is required.")
		os.Exit(1)
	}

	if *topic == "" {
		fmt.Println("Topic is required.")
		os.Exit(1)
	}

	if *queueSize < 0 {
		fmt.Println("Queue size should be equal or greater than 0")
		os.Exit(1)
	}

	config := kafka.NewSyslogProducerConfig()
	conf, err := kafka.ProducerConfigFromFile(*producerConfig)
	useFile := true
	if err != nil {
		//we dont have a producer configuraiton which is ok
		useFile = false
	} else {
		if err = conf.Validate(); err != nil {
			panic(err)
		}
	}

	if useFile {
		config.ProducerConfig = conf
	} else {
		config.ProducerConfig = kafka.DefaultProducerConfig()
		config.ProducerConfig.Acks = *requiredAcks
		config.ProducerConfig.Timeout = time.Duration(*acksTimeout) * time.Millisecond
	}
	config.NumProducers = *numProducers
	config.ChannelSize = *queueSize
	config.Topic = *topic
	config.BrokerList = *brokerList
	config.TCPAddr = fmt.Sprintf("%s:%s", *tcpHost, *tcpPort)
	config.UDPAddr = fmt.Sprintf("%s:%s", *udpHost, *udpPort)

	if !(*source == "" && len(tag) == 0 && *logtypeid == math.MinInt64) {
		config.Transformer = protobufTransformer
	}

	return config
}
func parseAndValidateArgs() *kafka.MarathonEventProducerConfig {
	flag.Parse()

	setLogLevel()
	runtime.GOMAXPROCS(*maxProcs)

	if *brokerList == "" {
		fmt.Println("broker.list is required.")
		os.Exit(1)
	}

	if *topic == "" {
		fmt.Println("Topic is required.")
		os.Exit(1)
	}

	config := kafka.NewMarathonEventProducerConfig()
	conf, err := kafka.ProducerConfigFromFile(*producerConfig)
	useFile := true
	if err != nil {
		//we dont have a producer configuraiton which is ok
		useFile = false
	} else {
		if err = conf.Validate(); err != nil {
			panic(err)
		}
	}

	if useFile {
		config.ProducerConfig = conf
	} else {
		config.ProducerConfig = kafka.DefaultProducerConfig()
		config.ProducerConfig.Acks = *requiredAcks
		config.ProducerConfig.Timeout = time.Duration(*acksTimeout) * time.Millisecond
	}

	config.Topic = *topic
	config.BrokerList = *brokerList
	config.Port = *port
	config.Pattern = *pattern
	config.SchemaRegistryUrl = *registry

	if config.SchemaRegistryUrl != "" {
		schema, err := avro.ParseSchemaFile(*avroSchema)
		if err != nil {
			fmt.Printf("Could not parse schema file: %s\n", err)
			os.Exit(1)
		}
		config.AvroSchema = schema
	}

	return config
}
Ejemplo n.º 3
0
func (this *TransformExecutor) startProducer() {
	producerConfig, err := kafka.ProducerConfigFromFile(this.config.ProducerConfig)
	if err != nil {
		panic(err)
	}

	cfgMap := make(map[string]string)
	err = cfg.Load(this.config.ProducerConfig, cfgMap)
	if err != nil {
		panic(err)
	}

	this.avroDecoder = kafka.NewKafkaAvroDecoder(cfgMap["schema.registry.url"])

	producerConfig.KeyEncoder = kafka.NewKafkaAvroEncoder(cfgMap["schema.registry.url"])
	producerConfig.ValueEncoder = producerConfig.KeyEncoder
	producerConfig.SendBufferSize = 10000
	producerConfig.BatchSize = 2000
	producerConfig.MaxMessagesPerRequest = 5000

	this.producer = kafka.NewSaramaProducer(producerConfig)
	go this.produceRoutine()
}