示例#1
0
func (d WorkerDistinctName) Run() {
	logger.Infof("[%s] worker started at %s", d.name, time.Now().UTC())

	currentDate := time.Now().UTC()
	// distinct_name:YYYY:MM
	monthlyBucketKey := "distinct_name:" + strconv.Itoa(currentDate.Year()) + ":" + strconv.Itoa(int(currentDate.Month())-1)
	// distinct_name:YYYY:MM:DD
	dailyBucketKey := monthlyBucketKey + ":" + strconv.Itoa(currentDate.Day())

	// Metrics that are older than 30 days are merged into a monthly bucket, then cleared.
	// The go rountine might fail, so I am running it multiple times in a day
	go model.MergeToMonthlyBucket(dailyBucketKey, monthlyBucketKey)

	ch, err := RabbitChannel.Consume(
		Q_DISTINCT_NAME, // queue
		"",              // consumer
		false,           // auto-ack
		false,           // exclusive
		false,           // no-local
		false,           // no-wait
		nil,             // args
	)
	if err != nil {
		logger.Errf("[%s] Error while creating Consume channel, %s", d.name, err)
		return
	}

	for {
		select {

		case msg := <-ch:
			logger.Infof("[%s] Received metric: %s", d.name, msg.Body)
			metricData := new(model.MetricRedis)

			err = json.Unmarshal(msg.Body, metricData)
			if err != nil {
				logger.Errf("[%s] Error while doing JSON Unmarshal, %s", d.name, err)
				msg.Reject(true)
				break
			}

			err = metricData.Insert()
			if err != nil {
				logger.Errf("[%s] Error while Inserting data into Redis, %s", d.name, err)
				msg.Reject(true)
				break
			}
			msg.Ack(false)
			logger.Infof("[%s] Metric successfully added to Redis - %s", d.name, msg.Body)

		case <-time.After(1 * time.Minute):
			logger.Infof("[%s] Worker completed at %s", d.name, time.Now().UTC())
			return
		}
	}
}
示例#2
0
func (h WorkerAccountName) Run() {
	logger.Infof("[%s] worker started at %s", h.name, time.Now().UTC())

	ch, err := RabbitChannel.Consume(
		Q_ACCOUNT_NAME, // queue
		"",             // consumer
		false,          // auto-ack
		false,          // exclusive
		false,          // no-local
		false,          // no-wait
		nil,            // args
	)
	if err != nil {
		logger.Errf("[%s] Error while creating Consume channel, %s", h.name, err)
		return
	}

	for {
		select {

		case msg := <-ch:
			logger.Infof("[%s] Received metric: %s", h.name, msg.Body)
			metricData := new(model.MetricPg)

			err = json.Unmarshal(msg.Body, metricData)
			if err != nil {
				logger.Errf("[%s] Error while doing JSON Unmarshal, %s", h.name, err)
				msg.Reject(true)
				break
			}

			err = metricData.Insert()
			if err == nil {
				logger.Infof("[%s] Metric successfully added to psql - %s", h.name, msg.Body)
				msg.Ack(false)
			} else if strings.Contains(err.Error(), "duplicate key value violates unique constraint") {
				logger.Infof("[%s] %s already exists in psql", h.name, metricData.Username)
				msg.Ack(false)
			} else {
				logger.Errf("[%s] Error while Inserting data into psql, %s", h.name, err)
				msg.Reject(true)
			}
		case <-time.After(1 * time.Minute):
			logger.Infof("[%s] Worker completed at %s", h.name, time.Now().UTC())
			return
		}
	}
}
示例#3
0
func (h WorkerHourlyLog) Run() {
	logger.Infof("[%s] worker started at %s", h.name, time.Now().UTC())

	ch, err := RabbitChannel.Consume(
		Q_HOURLY_LOG, // queue
		"",           // consumer
		false,        // auto-ack
		false,        // exclusive
		false,        // no-local
		false,        // no-wait
		nil,          // args
	)
	if err != nil {
		logger.Errf("[%s] Error while creating Consume channel in HourlyLog worker, %s", h.name, err)
		return
	}

	for {
		select {

		case msg := <-ch:
			logger.Infof("[%s] Received metric: %s", h.name, msg.Body)
			metricData := new(model.MetricMongo)
			metricData.Id = bson.NewObjectId()

			err = json.Unmarshal(msg.Body, metricData)
			if err != nil {
				logger.Errf("[%s] Error while doing JSON Unmarshal, %s", h.name, err)
				msg.Reject(true)
				break
			}

			err = metricData.Insert()
			if err != nil {
				logger.Errf("[%s] Error while Inserting data into Mongo, %s", h.name, err)
				msg.Reject(true)
				break
			}
			msg.Ack(false)
			logger.Infof("[%s] Metric successfully added to mongoDb - %s", h.name, msg.Body)

		case <-time.After(1 * time.Minute):
			logger.Infof("[%s] Worker completed at %s", h.name, time.Now().UTC())
			return
		}
	}
}
示例#4
0
func PostMetric(c *gin.Context) {
	metric := new(model.MetricStruct)
	err := c.BindJSON(metric)
	if err != nil {
		logger.Errf("Error while JSON Binding, %s", err)
		c.String(http.StatusBadRequest, "Invalid JSON, %s", err)
		return
	}

	if metric.Metric == "" || metric.Username == "" || metric.Count == 0 {
		logger.Errf("Invalid metric, %s", *metric)
		c.String(http.StatusBadRequest, "Invalid metric")
		return
	}

	metric.CreatedAt = time.Now().UTC()
	metric.UpdatedAt = metric.CreatedAt
	dataBytes, _ := json.Marshal(metric)

	err = worker.RabbitChannel.Publish(
		worker.E_METRIC_EXCHANGE, // exchange
		"",    // routing key
		false, // mandatory
		false, //immediate
		amqp.Publishing{
			DeliveryMode: amqp.Persistent,
			ContentType:  "text/plain",
			Body:         dataBytes,
		})

	if err != nil {
		logger.Errf("Failed to Publish Message %s, %s", *metric, err)
		c.String(http.StatusInternalServerError, "Failed to Publish Message.")
		return
	}

	logger.Infof("Message is published to exchange, %s", string(dataBytes))
	c.JSON(http.StatusOK, metric)
}
示例#5
0
func MergeToMonthlyBucket(dailyBucketKey, monthlyBucketKey string) {
	count, err := redis.Int(db.GetRedisConnection().Do("SCARD", dailyBucketKey))
	if count == 0 || err != nil {
		return
	}

	logger.Debugf("Merge Daily Bucket %s -> Monthly Bucket %s", dailyBucketKey, monthlyBucketKey)
	_, err = db.GetRedisConnection().Do("SUNIONSTORE", monthlyBucketKey, monthlyBucketKey, dailyBucketKey)
	if err != nil {
		logger.Errf("Error during SUNIONSTORE %s", err.Error())
		return
	}

	db.GetRedisConnection().Do("DEL", dailyBucketKey)
}