func calcDailyBytesServed(client influx.Client, bp influx.BatchPoints, startTime time.Time, endTime time.Time, config StartupConfig) {
	bytesToTerabytes := 1000000000.00
	sampleTimeSecs := 60.00
	bitsTobytes := 8.00
	queryString := fmt.Sprintf(`select mean(value) from "monthly"."bandwidth.cdn.1min" where time > '%s' and time < '%s' group by time(1m), cdn`, startTime.Format(time.RFC3339), endTime.Format(time.RFC3339))
	log.Infof("queryString = %v\n", queryString)
	res, err := queryDB(client, queryString, "cache_stats")
	if err != nil {
		log.Error("An error occured getting max bandwidth!\n")
		return
	}
	if res != nil && len(res[0].Series) > 0 {
		for _, row := range res[0].Series {
			bytesServed := float64(0)
			cdn := row.Tags["cdn"]
			for _, record := range row.Values {
				if record[1] != nil {
					value, err := record[1].(json.Number).Float64()
					if err != nil {
						log.Errorf("Couldn't parse value from record %v\n", record)
						continue
					}
					bytesServed += value * sampleTimeSecs / bitsTobytes
				}
			}
			bytesServedTB := bytesServed / bytesToTerabytes
			log.Infof("TBytes served for cdn %v = %v", cdn, bytesServedTB)
			//write to Traffic Ops
			var statsSummary traffic_ops.StatsSummary
			statsSummary.CDNName = cdn
			statsSummary.DeliveryService = "all"
			statsSummary.StatName = "daily_bytesserved"
			statsSummary.StatValue = strconv.FormatFloat(bytesServedTB, 'f', 2, 64)
			statsSummary.SummaryTime = time.Now().Format(time.RFC3339)
			statsSummary.StatDate = startTime.Format("2006-01-02")
			go writeSummaryStats(config, statsSummary)
			//write to Influxdb
			tags := map[string]string{"cdn": cdn, "deliveryservice": "all"}
			fields := map[string]interface{}{
				"value": bytesServedTB, //converted to TB
			}
			pt, err := influx.NewPoint(
				"daily_bytesserved",
				tags,
				fields,
				startTime,
			)
			if err != nil {
				log.Errorf("error adding creating data point for max Gbps...%v\n", err)
				continue
			}
			bp.AddPoint(pt)
		}
		config.BpsChan <- bp
	}
}
func calcDailyMaxGbps(client influx.Client, bp influx.BatchPoints, startTime time.Time, endTime time.Time, config StartupConfig) {
	kilobitsToGigabits := 1000000.00
	queryString := fmt.Sprintf(`select time, cdn, max(value) from "monthly"."bandwidth.cdn.1min" where time > '%s' and time < '%s' group by cdn`, startTime.Format(time.RFC3339), endTime.Format(time.RFC3339))
	log.Infof("queryString = %v\n", queryString)
	res, err := queryDB(client, queryString, "cache_stats")
	if err != nil {
		log.Errorf("An error occured getting max bandwidth! %v\n", err)
		return
	}
	if res != nil && len(res[0].Series) > 0 {
		for _, row := range res[0].Series {
			for _, record := range row.Values {
				t := record[0].(string)
				if record[1] != nil {
					cdn := record[1].(string)
					value, err := record[2].(json.Number).Float64()
					if err != nil {
						log.Errorf("Couldn't parse value from record %v\n", record)
						continue
					}
					value = value / kilobitsToGigabits
					statTime, _ := time.Parse(time.RFC3339, t)
					log.Infof("max gbps for cdn %v = %v", cdn, value)
					var statsSummary traffic_ops.StatsSummary
					statsSummary.CDNName = cdn
					statsSummary.DeliveryService = "all"
					statsSummary.StatName = "daily_maxgbps"
					statsSummary.StatValue = strconv.FormatFloat(value, 'f', 2, 64)
					statsSummary.SummaryTime = time.Now().Format(time.RFC3339)
					statsSummary.StatDate = statTime.Format("2006-01-02")
					go writeSummaryStats(config, statsSummary)

					//write to influxdb
					tags := map[string]string{"cdn": cdn, "deliveryservice": "all"}
					fields := map[string]interface{}{
						"value": value,
					}
					pt, err := influx.NewPoint(
						"daily_maxgbps",
						tags,
						fields,
						statTime,
					)
					if err != nil {
						fmt.Printf("error adding creating data point for max Gbps...%v\n", err)
						continue
					}
					bp.AddPoint(pt)
				}
			}
		}
	}
	config.BpsChan <- bp
}
func calcDailySummary(now time.Time, config StartupConfig, runningConfig RunningConfig) {
	log.Infof("lastSummaryTime is %v", runningConfig.LastSummaryTime)
	if runningConfig.LastSummaryTime.Day() != now.Day() {
		startTime := now.Truncate(24 * time.Hour).Add(-24 * time.Hour)
		endTime := startTime.Add(24 * time.Hour)
		log.Info("Summarizing from ", startTime, " (", startTime.Unix(), ") to ", endTime, " (", endTime.Unix(), ")")

		// influx connection
		influxClient, err := influxConnect(config, runningConfig)
		if err != nil {
			log.Error("Could not connect to InfluxDb to get daily summary stats!!")
			errHndlr(err, ERROR)
			return
		}

		//create influxdb query
		q := fmt.Sprintf("SELECT sum(value)/6 FROM bandwidth where time > '%s' and time < '%s' group by time(60s), cdn fill(0)", startTime.Format(time.RFC3339), endTime.Format(time.RFC3339))
		log.Infof(q)
		res, err := queryDB(influxClient, q, "cache_stats")
		if err != nil {
			errHndlr(err, ERROR)
			return
		}

		bp, _ := influx.NewBatchPoints(influx.BatchPointsConfig{
			Database:        "daily_stats",
			Precision:       "s",
			RetentionPolicy: config.DailySummaryRetentionPolicy,
		})
		for _, row := range res[0].Series {
			prevtime := startTime
			max := float64(0)
			bytesServed := float64(0)
			cdn := row.Tags["cdn"]
			for _, record := range row.Values {
				kbps, err := record[1].(json.Number).Float64()
				if err != nil {
					errHndlr(err, ERROR)
					continue
				}
				sampleTime, err := time.Parse(time.RFC3339, record[0].(string))
				if err != nil {
					errHndlr(err, ERROR)
					continue
				}
				max = FloatMax(max, kbps)
				duration := sampleTime.Unix() - prevtime.Unix()
				bytesServed += float64(duration) * kbps / 8
				prevtime = sampleTime
			}
			maxGbps := max / 1000000
			bytesServedTb := bytesServed / 1000000000
			log.Infof("max gbps for cdn %v = %v", cdn, maxGbps)
			log.Infof("Tbytes served for cdn %v = %v", cdn, bytesServedTb)

			//write daily_maxgbps in traffic_ops
			var statsSummary traffic_ops.StatsSummary
			statsSummary.CdnName = cdn
			statsSummary.DeliveryService = "all"
			statsSummary.StatName = "daily_maxgbps"
			statsSummary.StatValue = strconv.FormatFloat(maxGbps, 'f', 2, 64)
			statsSummary.SummaryTime = now.Format(time.RFC3339)
			statsSummary.StatDate = startTime.Format("2006-01-02")
			go writeSummaryStats(config, statsSummary)

			tags := map[string]string{
				"deliveryservice": statsSummary.DeliveryService,
				"cdn":             statsSummary.CdnName,
			}

			fields := map[string]interface{}{
				"value": maxGbps,
			}
			pt, err := influx.NewPoint(
				statsSummary.StatName,
				tags,
				fields,
				startTime,
			)
			if err != nil {
				errHndlr(err, ERROR)
				continue
			}
			bp.AddPoint(pt)

			// write bytes served data to traffic_ops
			statsSummary.StatName = "daily_bytesserved"
			statsSummary.StatValue = strconv.FormatFloat(bytesServedTb, 'f', 2, 64)
			go writeSummaryStats(config, statsSummary)

			pt, err = influx.NewPoint(
				statsSummary.StatName,
				tags,
				fields,
				startTime,
			)
			if err != nil {
				errHndlr(err, ERROR)
				continue
			}
			bp.AddPoint(pt)
		}
		config.BpsChan <- bp
		log.Info("Collected daily stats @ ", now)
	}
}
func main() {
	configFile := flag.String("cfg", "", "The config file")
	test := flag.Bool("test", false, "Test mode")
	flag.Parse()
	file, err := os.Open(*configFile)
	errHndlr(err, FATAL)
	decoder := json.NewDecoder(file)
	config := &StartupConfig{}
	err = decoder.Decode(&config)
	errHndlr(err, FATAL)
	pollingInterval := 60
	if config.DailySummaryPollingInterval > 0 {
		pollingInterval = config.DailySummaryPollingInterval
	}

	logger, err := log.LoggerFromConfigAsFile(config.SeelogConfig)
	defer log.Flush()
	if err != nil {
		panic("error reading Seelog config " + config.SeelogConfig)
	}
	fmt.Println("Replacing logger, see log file according to " + config.SeelogConfig)
	if *test {
		fmt.Println("WARNING: test mode is on!")
	}
	log.ReplaceLogger(logger)

	runtime.GOMAXPROCS(runtime.NumCPU())

	t := time.Now().Add(-86400 * time.Second)
	startTime := time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) // reset to start of yesterday 00:00::00
	endTime := startTime.Add(86400 * time.Second)
	formatStartTime := startTime.Format("2006-01-02T15:04:05-00:00")
	formatEndTime := endTime.Format("2006-01-02T15:04:05-00:00")
	endUTime := endTime.Unix()
	startUTime := startTime.Unix()
	pts := make([]influx.Point, 0)

	<-time.NewTimer(time.Now().Truncate(time.Duration(pollingInterval) * time.Second).Add(time.Duration(pollingInterval) * time.Second).Sub(time.Now())).C
	tickerChan := time.Tick(time.Duration(pollingInterval) * time.Second)
	for now := range tickerChan {
		//get TrafficOps Data
		trafOpsData, err := getToData(config, false)
		if err != nil {
			errHndlr(err, FATAL)
		}
		lastSummaryTime, err := time.Parse("2006-01-02 15:04:05", trafOpsData.LastSummaryTime)
		if err != nil {
			errHndlr(err, ERROR)
		}
		log.Infof("lastSummaryTime is %v", lastSummaryTime)
		if lastSummaryTime.Day() != now.Day() {
			log.Info("Summarizing from ", startTime, " (", startUTime, ") to ", endTime, " (", endUTime, ")")
			// influx connection
			influxClient, err := influxConnect(config, trafOpsData)
			if err != nil {
				log.Error("Could not connect to InfluxDb to get daily summary stats!!")
				errHndlr(err, ERROR)
				continue
			}
			//create influxdb query
			log.Infof("SELECT sum(value)/6 FROM bandwidth where time > '%v' and time < '%v' group by time(60s), cdn fill(0)", formatStartTime, formatEndTime)
			q := fmt.Sprintf("SELECT sum(value)/6 FROM bandwidth where time > '%v' and time < '%v' group by time(60s), cdn fill(0)", formatStartTime, formatEndTime)
			res, err := queryDB(influxClient, q, "cache_stats")
			if err != nil {
				fmt.Printf("err = %v\n", err)
				errHndlr(err, ERROR)
				continue
			}
			//loop throgh series
			for _, row := range res[0].Series {
				prevUtime := startUTime
				var cdn string
				max := float64(0)
				bytesServed := float64(0)
				cdn = row.Tags["cdn"]
				for _, record := range row.Values {
					kbps, err := record[1].(json.Number).Float64()
					if err != nil {
						errHndlr(err, ERROR)
						continue
					}
					sampleTime, err := time.Parse("2006-01-02T15:04:05Z", record[0].(string))
					if err != nil {
						errHndlr(err, ERROR)
						continue
					}
					sampleUTime := sampleTime.Unix()
					if kbps > max {
						max = kbps
					}
					duration := sampleUTime - prevUtime
					bytesServed += float64(duration) * kbps / 8
					prevUtime = sampleUTime
				}
				maxGbps := max / 1000000
				bytesServedTb := bytesServed / 1000000000
				log.Infof("max gbps for cdn %v = %v", cdn, maxGbps)
				log.Infof("Tbytes served for cdn %v = %v", cdn, bytesServedTb)
				//write daily_maxgbps in traffic_ops
				var statsSummary traffic_ops.StatsSummary
				statsSummary.CdnName = cdn
				statsSummary.DeliveryService = "all"
				statsSummary.StatName = "daily_maxgbps"
				statsSummary.StatValue = strconv.FormatFloat(maxGbps, 'f', 2, 64)
				statsSummary.SummaryTime = now.Format("2006-01-02 15:04:05")
				statsSummary.StatDate = startTime.Format("2006-01-02")
				err = writeSummaryStats(config, statsSummary)
				if err != nil {
					log.Error("Could not store daily_maxgbps stats in traffic ops!")
					errHndlr(err, ERROR)
				}
				//write to influxdb
				pts = append(pts,
					influx.Point{
						Measurement: statsSummary.StatName,
						Tags: map[string]string{
							"deliveryservice": statsSummary.DeliveryService,
							"cdn":             statsSummary.CdnName,
						},
						Fields: map[string]interface{}{
							"value": maxGbps,
						},
						Time:      startTime,
						Precision: "s",
					},
				)
				//write bytes served data to traffic_ops
				statsSummary.StatName = "daily_bytesserved"
				statsSummary.StatValue = strconv.FormatFloat(bytesServedTb, 'f', 2, 64)
				err = writeSummaryStats(config, statsSummary)
				if err != nil {
					log.Error("Could not store daily_bytesserved stats in traffic ops!")
					errHndlr(err, ERROR)
				}
				pts = append(pts,
					influx.Point{
						Measurement: statsSummary.StatName,
						Tags: map[string]string{
							"deliveryservice": statsSummary.DeliveryService,
							"cdn":             statsSummary.CdnName,
						},
						Fields: map[string]interface{}{
							"value": bytesServedTb,
						},
						Time:      startTime,
						Precision: "s",
					},
				)
			}
			log.Infof("Writing daily stats to influxDb")
			bps := influx.BatchPoints{
				Points:          pts,
				Database:        "daily_stats",
				RetentionPolicy: "daily_stats",
			}
			_, err = influxClient.Write(bps)
			if err != nil {
				errHndlr(err, ERROR)
			}
		}
	}
}