Exemple #1
0
func cmdBenchmarkSet() error {

	if len(globalBrokerList) == 0 {
		return errors.NotValidf("broker list")
	}
	if len(globalTopic) == 0 {
		return errors.NotValidf("Topic")
	}

	sendString := utils.GenTestMessage(globalMsgLength)
	producerConfig := siesta_producer.NewProducerConfig()
	producerConfig.Linger = time.Millisecond
	connConfig := siesta.NewConnectorConfig()
	brokerList := strings.Split(globalBrokerList, ",")
	producerConfig.BrokerList = brokerList
	connConfig.BrokerList = brokerList

	log.Printf("%v", brokerList)
	connector, err := siesta.NewDefaultConnector(connConfig)
	if err != nil {
		return errors.Trace(err)
	}
	//	go func() {
	//		timeout := time.Tick(producerConfig.MetadataExpire / 2)
	//		for {
	//			<-timeout
	//			connector.RefreshMetadata([]string{globalTopic})
	//		}
	//	}()

	producer := siesta_producer.NewKafkaProducer(producerConfig,
		siesta_producer.ByteSerializer,
		siesta_producer.ByteSerializer,
		connector)

	bt := utils.NewBenchmarkTester(globalConcurrentLevel, globalDuration,
		func(bt *utils.BenchmarkTester, index int) error {

			record := &siesta_producer.ProducerRecord{
				Topic: globalTopic,
				Value: []byte(sendString),
			}

			recordMetadata := <-producer.Send(record)
			if recordMetadata.Error == siesta.ErrNoError {
				return nil
			}
			return recordMetadata.Error
		}, nil)
	return errors.Trace(bt.Run())
}
Exemple #2
0
func cmdBenchmarkSetNoAck() error {

	if len(globalBrokerList) == 0 {
		return errors.NotValidf("broker list")
	}
	if len(globalTopic) == 0 {
		return errors.NotValidf("Topic")
	}

	sendString := utils.GenTestMessage(globalMsgLength)
	producerConfig := siesta_producer.NewProducerConfig()
	producerConfig.ClientID = "Benchmark"
	producerConfig.RequiredAcks = 0
	connConfig := siesta.NewConnectorConfig()
	brokerList := strings.Split(globalBrokerList, ",")
	producerConfig.BrokerList = brokerList
	connConfig.BrokerList = brokerList

	log.Printf("%v", brokerList)
	connector, err := siesta.NewDefaultConnector(connConfig)
	if err != nil {
		return errors.Trace(err)
	}

	producer := siesta_producer.NewKafkaProducer(producerConfig,
		siesta_producer.ByteSerializer,
		siesta_producer.ByteSerializer,
		connector)

	bt := utils.NewBenchmarkTester(globalConcurrentLevel, globalDuration,
		func(bt *utils.BenchmarkTester, index int) error {

			record := &siesta_producer.ProducerRecord{
				Topic: globalTopic,
				Value: []byte(sendString),
			}

			recordMetadata := <-producer.Send(record)
			if recordMetadata.Error == siesta.ErrNoError {
				return nil
			}
			return recordMetadata.Error
		}, nil)
	return errors.Trace(bt.Run())
}
Exemple #3
0
func validate() *syslog.SyslogProducerConfig {
	if brokerList == "" {
		fmt.Println("broker.list is required.")
		os.Exit(1)
	}

	if topic == "" {
		fmt.Println("Topic is required.")
		os.Exit(1)
	}

	if sendAvro && schemaRegistryUrl == "" {
		fmt.Println("Schema Registry URL is required for --avro flag")
		os.Exit(1)
	}

	config := syslog.NewSyslogProducerConfig()
	config.ProducerConfig = producer.NewProducerConfig()
	config.ProducerConfig.RequiredAcks = requiredAcks
	config.ProducerConfig.AckTimeoutMs = int32(acksTimeout)
	config.BrokerList = brokerList
	config.NumProducers = numProducers
	config.Topic = topic
	config.TCPAddr = fmt.Sprintf("%s:%d", tcpHost, tcpPort)
	config.UDPAddr = fmt.Sprintf("%s:%d", udpHost, udpPort)

	if sendAvro {
		serializer := kafkaavro.NewKafkaAvroEncoder(schemaRegistryUrl)
		config.ValueSerializer = serializer.Encode
		config.Transformer = avroTransformer
	}

	if sendProtobuf {
		config.ValueSerializer = producer.ByteSerializer
		config.Transformer = protobufTransformer
	}

	return config
}