Пример #1
0
func (this *pubStore) Start() (err error) {
	if ctx.KafkaHome() == "" {
		return fmt.Errorf("empty kafka_home in ~/.gafka.cf")
	}
	if !gio.DirExists(ctx.KafkaHome()) {
		return fmt.Errorf("kafka not installed in %s, run 'gk deploy -kfkonly'", ctx.KafkaHome())
	}

	// warmup: create pools according the current kafka topology
	for _, cluster := range meta.Default.ClusterNames() {
		this.pubPools[cluster] = newPubPool(this, cluster,
			meta.Default.BrokerList(cluster), this.pubPoolsCapcity)
	}

	this.wg.Add(1)
	go func() {
		defer this.wg.Done()

		for {
			select {
			case <-meta.Default.RefreshEvent():
				this.doRefresh()

			case <-this.shutdownCh:
				log.Trace("pub store[%s] stopped", this.Name())
				return
			}
		}
	}()

	return
}
Пример #2
0
func (this *Migrate) executeReassignment() {
	/*
		1. kafka-reassign-partitions.sh write /admin/reassign_partitions
		2. controller listens to the path above
		3. For each topic partition, the controller does the following:
		  3.1. Start new replicas in RAR – AR (RAR = Reassigned Replicas, AR = original list of Assigned Replicas)
		  3.2. Wait until new replicas are in sync with the leader
		  3.3. If the leader is not in RAR, elect a new leader from RAR
		  3.4 4. Stop old replicas AR – RAR
		  3.5. Write new AR
		  3.6. Remove partition from the /admin/reassign_partitions path

	*/
	cmd := pipestream.New(fmt.Sprintf("%s/bin/kafka-reassign-partitions.sh", ctx.KafkaHome()),
		fmt.Sprintf("--zookeeper %s", this.zkcluster.ZkConnectAddr()),
		fmt.Sprintf("--reassignment-json-file %s", reassignNodeFilename),
		fmt.Sprintf("--execute"),
	)
	err := cmd.Open()
	if err != nil {
		return
	}
	defer cmd.Close()

	scanner := bufio.NewScanner(cmd.Reader())
	scanner.Split(bufio.ScanLines)
	for scanner.Scan() {
		this.Ui.Output(color.Yellow(scanner.Text()))
	}
}
Пример #3
0
func (this *Topics) resetTopicConfig(zkcluster *zk.ZkCluster, topic string) {
	zkAddrs := zkcluster.ZkConnectAddr()
	key := "retention.ms"
	cmd := pipestream.New(fmt.Sprintf("%s/bin/kafka-topics.sh", ctx.KafkaHome()),
		fmt.Sprintf("--zookeeper %s", zkAddrs),
		fmt.Sprintf("--alter"),
		fmt.Sprintf("--topic %s", topic),
		fmt.Sprintf("--deleteConfig %s", key),
	)
	err := cmd.Open()
	swallow(err)
	defer cmd.Close()

	scanner := bufio.NewScanner(cmd.Reader())
	scanner.Split(bufio.ScanLines)

	output := make([]string, 0)
	for scanner.Scan() {
		output = append(output, scanner.Text())
	}
	swallow(scanner.Err())

	path := zkcluster.GetTopicConfigPath(topic)
	this.Ui.Info(path)

	for _, line := range output {
		this.Ui.Output(line)
	}
}
Пример #4
0
func (this *Partition) addPartition(zkAddrs string, topic string, partitions int) error {
	log.Info("adding partitions to topic: %s", topic)

	cmd := pipestream.New(fmt.Sprintf("%s/bin/kafka-topics.sh", ctx.KafkaHome()),
		fmt.Sprintf("--zookeeper %s", zkAddrs),
		fmt.Sprintf("--alter"),
		fmt.Sprintf("--topic %s", topic),
		fmt.Sprintf("--partitions %d", partitions),
	)
	err := cmd.Open()
	if err != nil {
		return err
	}

	scanner := bufio.NewScanner(cmd.Reader())
	scanner.Split(bufio.ScanLines)
	for scanner.Scan() {
		this.Ui.Output(color.Yellow(scanner.Text()))
	}
	err = scanner.Err()
	if err != nil {
		return err
	}
	cmd.Close()

	log.Info("added partitions to topic: %s", topic)
	return nil
}
Пример #5
0
func (this *ZkCluster) AlterTopic(topic string, ts *sla.TopicSla) (output []string, err error) {
	zkAddrs := this.ZkConnectAddr()
	args := []string{
		fmt.Sprintf("--zookeeper %s", zkAddrs),
		fmt.Sprintf("--alter"),
		fmt.Sprintf("--topic %s", topic),
	}
	configs := ts.DumpForAlterTopic()
	if len(configs) == 0 {
		err = errors.New("no alter topic configs")
		return
	}

	args = append(args, configs...)
	cmd := pipestream.New(fmt.Sprintf("%s/bin/kafka-topics.sh", ctx.KafkaHome()),
		args...)
	if err = cmd.Open(); err != nil {
		return
	}
	defer cmd.Close()

	scanner := bufio.NewScanner(cmd.Reader())
	scanner.Split(bufio.ScanLines)

	output = make([]string, 0)
	for scanner.Scan() {
		output = append(output, scanner.Text())
	}
	if err = scanner.Err(); err != nil {
		return
	}

	return
}
Пример #6
0
func (this *ZkCluster) DeleteTopic(topic string) (output []string, err error) {
	zkAddrs := this.ZkConnectAddr()
	args := []string{
		fmt.Sprintf("--zookeeper %s", zkAddrs),
		fmt.Sprintf("--delete"),
		fmt.Sprintf("--topic %s", topic),
	}
	cmd := pipestream.New(fmt.Sprintf("%s/bin/kafka-topics.sh", ctx.KafkaHome()), args...)
	if err = cmd.Open(); err != nil {
		return
	}
	defer cmd.Close()

	scanner := bufio.NewScanner(cmd.Reader())
	scanner.Split(bufio.ScanLines)

	output = make([]string, 0)
	for scanner.Scan() {
		output = append(output, scanner.Text())
	}
	if err = scanner.Err(); err != nil {
		return
	}

	return
}
Пример #7
0
func (this *Deploy) Help() string {
	help := fmt.Sprintf(`
Usage: %s deploy -z zone -c cluster [options]

    %s

Options:

    -kfkonly
      Only install kafka runtime on localhost.

    -influx host:port
      InfluxDB server address used for kafka metrics reporter.
      If empty, InfluxDB metrics reporter will be disabled.

    -uninstall base dir
      Uninstall a kafka broker on localhost.

    -demo
      Demonstrate how to use this command.

    -dryrun
      Default is true.

    -root dir
      Root directory of the kafka broker.
      Defaults to /var/wd

    -ip addr
      Advertised host name of this new broker.	

    -port port
      Tcp port the broker will listen on.

    -broker.id id

    -user runAsUser
      The deployed kafka broker will run as this user.
      Defaults to sre

    -ver [2.10-0.8.1.1|2.10-0.8.2.2]
      Defaults to 2.10-0.8.2.2

    -kafka.base dir
      Kafka installation prefix dir.
      Defaults to %s

    -log.dirs dirs
      A comma seperated list of directories under which to store log files.

`, this.Cmd, this.Synopsis(), ctx.KafkaHome())
	return strings.TrimSpace(help)
}
Пример #8
0
func (this *Rebalance) executeReassignment() {
	cmd := pipestream.New(fmt.Sprintf("%s/bin/kafka-preferred-replica-election.sh", ctx.KafkaHome()),
		fmt.Sprintf("--zookeeper %s", this.zkcluster.ZkConnectAddr()),
		fmt.Sprintf("--path-to-json-file %s", preferredReplicaJsonFile),
	)
	err := cmd.Open()
	if err != nil {
		return
	}
	defer cmd.Close()

	scanner := bufio.NewScanner(cmd.Reader())
	scanner.Split(bufio.ScanLines)
	for scanner.Scan() {
		this.Ui.Output(color.Yellow(scanner.Text()))
	}
}
Пример #9
0
func (this *Migrate) verify() {
	cmd := pipestream.New(fmt.Sprintf("%s/bin/kafka-reassign-partitions.sh", ctx.KafkaHome()),
		fmt.Sprintf("--zookeeper %s", this.zkcluster.ZkConnectAddr()),
		fmt.Sprintf("--reassignment-json-file %s", reassignNodeFilename),
		fmt.Sprintf("--verify"),
	)
	err := cmd.Open()
	if err != nil {
		return
	}
	defer cmd.Close()

	scanner := bufio.NewScanner(cmd.Reader())
	scanner.Split(bufio.ScanLines)
	for scanner.Scan() {
		if strings.Contains(scanner.Text(), "successfully") {
			this.Ui.Info(scanner.Text())
		} else {
			this.Ui.Warn(scanner.Text())
		}
	}

}
Пример #10
0
// TODO
// 1. broker id assignment
// 2. port assignment
func (this *Deploy) Run(args []string) (exitCode int) {
	cmdFlags := flag.NewFlagSet("deploy", flag.ContinueOnError)
	cmdFlags.Usage = func() { this.Ui.Output(this.Help()) }
	cmdFlags.StringVar(&this.zone, "z", "", "")
	cmdFlags.StringVar(&this.cluster, "c", "", "")
	cmdFlags.StringVar(&this.kafkaBaseDir, "kafka.base", ctx.KafkaHome(), "")
	cmdFlags.StringVar(&this.brokerId, "broker.id", "", "")
	cmdFlags.StringVar(&this.tcpPort, "port", "", "")
	cmdFlags.StringVar(&this.rootPah, "root", "/var/wd", "")
	cmdFlags.StringVar(&this.ip, "ip", "", "")
	cmdFlags.StringVar(&this.logDirs, "log.dirs", "", "")
	cmdFlags.StringVar(&this.runAs, "user", "sre", "")
	cmdFlags.StringVar(&this.uninstall, "uninstall", "", "")
	cmdFlags.BoolVar(&this.demoMode, "demo", false, "")
	cmdFlags.BoolVar(&this.installKafkaOnly, "kfkonly", false, "")
	cmdFlags.BoolVar(&this.dryRun, "dryrun", true, "")
	cmdFlags.StringVar(&this.influxDbAddr, "influx", "", "")
	cmdFlags.StringVar(&this.kafkaVer, "ver", "2.10-0.8.2.2", "")
	if err := cmdFlags.Parse(args); err != nil {
		return 1
	}

	if this.uninstall != "" {
		serverProperties := fmt.Sprintf("%s/config/server.properties", this.uninstall)
		lines, err := gio.ReadLines(serverProperties)
		if err != nil {
			this.Ui.Error(err.Error())
			return 2
		}
		var logDirs []string
		for _, line := range lines {
			if strings.HasPrefix(line, "log.dirs") {
				parts := strings.SplitN(line, "=", 2)
				logDirs = strings.Split(parts[1], ",")
				break
			}
		}
		if len(logDirs) == 0 {
			this.Ui.Error("empty log.dirs")
			return 2
		}

		for _, logDir := range logDirs {
			this.Ui.Output(fmt.Sprintf("rm -rf %s", logDir))
		}
		name := filepath.Base(this.uninstall)
		this.Ui.Output(fmt.Sprintf("chkconfig --del %s", name))
		this.Ui.Output(fmt.Sprintf("rm -f /etc/init.d/%s", name))
		this.Ui.Output(fmt.Sprintf("rm -rf %s", this.uninstall))
		return 0
	}

	if !ctx.CurrentUserIsRoot() {
		this.Ui.Error("requires root priviledges!")
		return 1
	}

	if !strings.HasSuffix(this.kafkaBaseDir, this.kafkaVer) {
		this.Ui.Error(fmt.Sprintf("kafka.base[%s] does not match ver[%s]",
			this.kafkaBaseDir, this.kafkaVer))
		return 1
	}

	if this.installKafkaOnly {
		this.installKafka()
		return
	}

	if validateArgs(this, this.Ui).
		require("-z", "-c").
		invalid(args) {
		return 2
	}

	this.zkzone = zk.NewZkZone(zk.DefaultConfig(this.zone, ctx.ZoneZkAddrs(this.zone)))
	clusers := this.zkzone.Clusters()
	zkchroot, present := clusers[this.cluster]
	if !present {
		this.Ui.Error(fmt.Sprintf("run 'gk clusters -z %s -add %s -p $zkchroot' first!",
			this.zone, this.cluster))
		return 1
	}

	var err error
	this.userInfo, err = user.Lookup(this.runAs)
	swallow(err)

	if this.demoMode {
		this.demo()
		return
	}

	if validateArgs(this, this.Ui).
		require("-broker.id", "-port", "-ip", "-log.dirs").
		invalid(args) {
		return 2
	}

	if this.dryRun {
		this.Ui.Output(fmt.Sprintf("mkdir %s/logs and chown to %s",
			this.instanceDir(), this.runAs))
	}
	err = os.MkdirAll(fmt.Sprintf("%s/logs", this.instanceDir()), 0755)
	swallow(err)
	chown(fmt.Sprintf("%s/logs", this.instanceDir()), this.userInfo)

	invalidDir := this.validateLogDirs(this.logDirs)
	if invalidDir != "" {
		this.Ui.Error(fmt.Sprintf("%s in log.dirs not exists!", invalidDir))
		return 2
	}

	// prepare the root directory
	this.rootPah = strings.TrimSuffix(this.rootPah, "/")
	if this.dryRun {
		this.Ui.Output(fmt.Sprintf("mkdir %s/bin and chown to %s",
			this.instanceDir(), this.runAs))
	}
	err = os.MkdirAll(fmt.Sprintf("%s/bin", this.instanceDir()), 0755)
	swallow(err)
	chown(fmt.Sprintf("%s/bin", this.instanceDir()), this.userInfo)

	if this.dryRun {
		this.Ui.Output(fmt.Sprintf("mkdir %s/config and chown to %s",
			this.instanceDir(), this.runAs))
	}
	err = os.MkdirAll(fmt.Sprintf("%s/config", this.instanceDir()), 0755)
	swallow(err)
	chown(fmt.Sprintf("%s/config", this.instanceDir()), this.userInfo)

	type templateVar struct {
		KafkaBase             string
		BrokerId              string
		TcpPort               string
		Ip                    string
		User                  string
		ZkChroot              string
		ZkAddrs               string
		InstanceDir           string
		LogDirs               string
		IoThreads             string
		NetworkThreads        string
		InfluxReporterEnabled string
		InfluxDbHost          string
		InfluxDbPort          string
	}
	if this.influxDbAddr != "" {
		this.influxDbHost, this.influxdbPort, err = net.SplitHostPort(this.influxDbAddr)
		if err != nil {
			this.Ui.Error(err.Error())
			return 2
		}
		if this.influxDbHost == "" || this.influxdbPort == "" {
			this.Ui.Error("empty influxdb host or port")
			return 2
		}
	}

	influxReporterEnabled := "false"
	if this.influxDbHost != "" {
		influxReporterEnabled = "true"
	}
	data := templateVar{
		ZkAddrs:               this.zkzone.ZkAddrs(),
		ZkChroot:              zkchroot,
		KafkaBase:             this.kafkaBaseDir,
		BrokerId:              this.brokerId,
		Ip:                    this.ip,
		InstanceDir:           this.instanceDir(),
		User:                  this.runAs,
		TcpPort:               this.tcpPort,
		LogDirs:               this.logDirs,
		InfluxReporterEnabled: influxReporterEnabled,
		InfluxDbHost:          this.influxDbHost,
		InfluxDbPort:          this.influxdbPort,
	}
	data.IoThreads = strconv.Itoa(3 * len(strings.Split(data.LogDirs, ",")))
	networkThreads := ctx.NumCPU() / 2
	if networkThreads < 2 {
		networkThreads = 2
	}
	data.NetworkThreads = strconv.Itoa(networkThreads) // TODO not used yet

	// create the log.dirs directory and chown to sre
	logDirs := strings.Split(this.logDirs, ",")
	for _, logDir := range logDirs {
		if this.dryRun {
			this.Ui.Output(fmt.Sprintf("mkdir %s and chown to %s",
				logDir, this.runAs))
		}

		swallow(os.MkdirAll(logDir, 0755))
		chown(logDir, this.userInfo)
	}

	// package the kafka runtime together
	if !gio.DirExists(this.kafkaLibDir()) {
		this.installKafka()
	}

	// bin
	writeFileFromTemplate("template/bin/kafka-topics.sh",
		fmt.Sprintf("%s/bin/kafka-topics.sh", this.instanceDir()), 0755, nil, this.userInfo)
	writeFileFromTemplate("template/bin/kafka-reassign-partitions.sh",
		fmt.Sprintf("%s/bin/kafka-reassign-partitions.sh", this.instanceDir()), 0755, nil, this.userInfo)
	writeFileFromTemplate("template/bin/kafka-preferred-replica-election.sh",
		fmt.Sprintf("%s/bin/kafka-preferred-replica-election.sh", this.instanceDir()), 0755, nil, this.userInfo)

	writeFileFromTemplate("template/bin/kafka-run-class.sh",
		fmt.Sprintf("%s/bin/kafka-run-class.sh", this.instanceDir()), 0755, data, this.userInfo)
	writeFileFromTemplate("template/bin/kafka-server-start.sh",
		fmt.Sprintf("%s/bin/kafka-server-start.sh", this.instanceDir()), 0755, data, this.userInfo)
	writeFileFromTemplate("template/bin/setenv.sh",
		fmt.Sprintf("%s/bin/setenv.sh", this.instanceDir()), 0755, data, this.userInfo)

	// /etc/init.d/
	writeFileFromTemplate("template/init.d/kafka",
		fmt.Sprintf("/etc/init.d/%s", this.clusterName()), 0755, data, nil)

	// config
	writeFileFromTemplate("template/config/server.properties",
		fmt.Sprintf("%s/config/server.properties", this.instanceDir()), 0644, data, this.userInfo)
	writeFileFromTemplate("template/config/log4j.properties",
		fmt.Sprintf("%s/config/log4j.properties", this.instanceDir()), 0644, data, this.userInfo)

	this.Ui.Warn(fmt.Sprintf("NOW, please run the following command:"))
	this.Ui.Output(color.Red("confirm log.retention.hours"))
	this.Ui.Output(color.Red("chkconfig --add %s", this.clusterName()))
	this.Ui.Output(color.Red("/etc/init.d/%s start", this.clusterName()))

	return
}