//Tss vs Duration func tvd(user types.UserSettings, filter Filter) ([]Tvd, string) { user_id := user.Id tvd_data_points := make([]Tvd_data_point, 0) tvd_data := make([]Tvd, 0) var user_data types.Metrics var end_summary_json []byte var activity_start time.Time var tvdLegend string cluster := gocql.NewCluster(config.DbHost) cluster.Keyspace = "joulepersecond" cluster.Consistency = gocql.Quorum session, _ := cluster.CreateSession() defer session.Close() //get all of the user's data (at least all for now) TODO limit these queries by date if poss. Done! timeNow := time.Now() timeThen := timeNow.AddDate(0, 0, -filter.Historylen) iter := session.Query(`SELECT activity_start, end_summary_json FROM joulepersecond.user_activity WHERE user_id = ? AND activity_start > ? ORDER BY activity_start ASC`, user_id, timeThen).Iter() for iter.Scan(&activity_start, &end_summary_json) { var tvd_data_point Tvd_data_point json.Unmarshal(end_summary_json, &user_data) tvd_data_point.Date = user_data.StartTime tvd_data_point.Dur = user_data.Dur if user_data.Utss > 0 { tvd_data_point.Tss = user_data.Utss } else if user_data.Tss > 0 { tvd_data_point.Tss = user_data.Tss } else if user_data.Etss > 0 { tvd_data_point.Tss = user_data.Etss } else { tvd_data_point.Tss = 0 } tvd_data_points = append(tvd_data_points, tvd_data_point) } //we now have all the data... Now sort it sumTss := 0 var sumDur time.Duration var lastActivity time.Time //loope through each retrieved activity for i := 1; i < len(tvd_data_points); i++ { //set last activity on first iteration only if i == 1 { lastActivity = tvd_data_points[i].Date } //if we want to show data in monthly format if filter.Historylen > 366 { //show over 365 days as monthly thisDate := tvd_data_points[i].Date prevDate := lastActivity //if we're still in the current month sum these values if thisDate.Month() != prevDate.Month() || i == len(tvd_data_points)-1 { var summedMonthlyTvd Tvd summedMonthlyTvd.TotalTss = sumTss summedMonthlyTvd.TotalDur = utility.Round(sumDur.Hours(), .5, 2) var month time.Month var year string if thisDate.Month() != prevDate.Month() { month = prevDate.Month() year = strconv.Itoa(prevDate.Year()) } else { month = thisDate.Month() year = strconv.Itoa(thisDate.Year()) } monthStr := month.String() summedMonthlyTvd.TimeLabel = monthStr[0:3] + " '" + year[2:4] sumTss = 0 sumDur = 0 tvd_data = append(tvd_data, summedMonthlyTvd) //reset the last activity date for the next loop lastActivity = thisDate } sumTss += tvd_data_points[i].Tss sumDur += tvd_data_points[i].Dur tvdLegend = "By Month" } else { thisDate := tvd_data_points[i].Date //we use this to compare the activity being scanned with the last to see if it is in the same week prevDate := lastActivity //this is the last activity that we scanned that was the first of the new week, last week. Confusing init? we have to get the value now, and change if the weeks are not equal (new week) prevIterDate := tvd_data_points[i-1].Date //get week number for this and last activity _, thisDateWeek := thisDate.ISOWeek() prevDateYear, prevDateWeek := prevDate.ISOWeek() if thisDateWeek != prevDateWeek || i == len(tvd_data_points)-1 { var summedWeeklyTvd Tvd summedWeeklyTvd.TotalTss = sumTss summedWeeklyTvd.TotalDur = utility.Round(sumDur.Hours(), .5, 2) monthS := prevDate.Month() var monthF time.Month var dayF string if thisDateWeek != prevDateWeek { monthF = prevIterDate.Month() dayF = strconv.Itoa(prevIterDate.Day()) } else { monthF = thisDate.Month() dayF = strconv.Itoa(thisDate.Day()) } dayS := strconv.Itoa(prevDate.Day()) monthStrS := monthS.String() monthAbrS := monthStrS[0:3] monthStrF := monthF.String() monthAbrF := monthStrF[0:3] //format labels according to number to displau if filter.Historylen < 120 { summedWeeklyTvd.TimeLabel = dayS + " " + monthAbrS + " - " + dayF + " " + monthAbrF } else { summedWeeklyTvd.TimeLabel = dayS + " " + monthAbrS } sumTss = 0 sumDur = 0 tvd_data = append(tvd_data, summedWeeklyTvd) //reset the last activity date for the next loop lastActivity = thisDate } //sum the values sumTss += tvd_data_points[i].Tss sumDur += tvd_data_points[i].Dur tvdLegend = "By week number: Series ending wk" + strconv.Itoa(prevDateWeek) + ", " + strconv.Itoa(prevDateYear) } } return tvd_data, tvdLegend }
func processActivity(activityId string, user types.UserSettings) { //define one second second := time.Second //data is now of type []map[string]interface {} data := getResults(activityId) //vars to hold a single data row (instances of our struct types) var row SampleRow var lapSummary types.Metrics var endSummary types.Metrics //slices to hold mulitple rows rows := make([]SampleRow, 0) //and lap summary data lapSummaries := make([]types.Metrics, 0) //cp data cpRows := make([]CpRow, 0) //init vars var activityStart time.Time //first lap start time var activity Samples //aggregated activity samples var lap Samples //aggregated lap samples var laptime time.Time //last interation's lap time marker var sampletime time.Time //last iteration's sample time var sampleDistance time.Duration //time duration between this and last iteration's sample time //var timeSubtract time.Duration //total time to subtract from sample time to give continuous line when using continuous axes var pedalcount int //temporary var storing number of samples with cadence value of 0 var ElapsedTime time.Duration hasPower := true hasHeart := true hasCadence := true powerSeries := make([]int, 0) //power time series data heartSeries := make([]int, 0) //heart rate time series data cadenceSeries := make([]int, 0) //cadence time series data //see http://golang.org/pkg/time/#example_Parse const layout = "15:04:05" for _, val := range data { //val is now of type map[string]interface {} //e.g. val["tp_heartrate"] is type interface{}, but these values need to be asserted to their correct type for use in calculations etc. row.Heartrate = val["tp_heartrate"].(int) row.Power = val["tp_watts"].(int) row.Cadence = val["tp_cadence"].(int) row.Lapnumber = val["lap_number"].(int) row.Lapstart = val["lap_start"].(time.Time) //set the activity start time to that of the first lap if activityStart.IsZero() { activityStart = row.Lapstart } row.Timestamp = val["tp_timestamp"].(time.Time) //remember 'e.g. timestamp'.sub('e.g. lapstart') returns type time.Duration //subtract last sample time from this sample time to give a remainder duration sampleDistance = row.Timestamp.Sub(sampletime) //function summing metrics, repeated for each 'missing data point' sum := func() { //sum some of the metrics for averaging activity.Power += row.Power activity.Hr += row.Heartrate activity.Cad += row.Cadence //don't add to the average cadence val when freewheeling if activity.Cad == 0 { activity.Freewheelcount++ } activity.Samplecount++ //and for each lap too... lap.Power += row.Power lap.Hr += row.Heartrate lap.Cad += row.Cadence //don't add to the average cadence val when freewheeling if row.Cadence == 0 { lap.Freewheelcount++ activity.Freewheelcount++ } lap.Samplecount++ } setZero := func() { //sum some of the metrics for averaging row.Power = 0 row.Heartrate = 0 row.Cadence = 0 lap.Freewheelcount++ activity.Freewheelcount++ activity.Samplecount++ lap.Samplecount++ } //if time period is set and is greater than n seconds if ((sampleDistance / second) > user.Stopgap) && !(sampletime.IsZero()) { //check for error //fmt.Printf("Seconds: %v \n", sampleDistance) } else if ((sampleDistance / second) < user.Stopgap) && !(sampletime.IsZero()) { /** * Add samples / and missing samples if required by user (up until user defined stop duration) **/ //define one second duration second := time.Second //get number of seconds between samples (all based on 1hz sample frequency) seconds := int(sampleDistance / second) for i := 0; i < seconds; i++ { //only add samples up until the user defined number of seconds if i < int(user.Stopgap) { //fmt.Printf("adding second %d\n", i) //add the missing samples if user.Autofill == "autofill" { sum() } else if user.Autofill == "setzero" { //only zero for more than 1 second else we'd never have any data! if seconds > 1 { setZero() } else { sum() } } //add a second for each second past, but not if autofill set to 'remove' if (user.Autofill == "autofill" || user.Autofill == "setzero") || (user.Autofill == "remove" && seconds == 1) { ElapsedTime += second row.NewTimestamp[0] = int(ElapsedTime.Hours()) row.NewTimestamp[1] = int(ElapsedTime.Minutes()) % 60 row.NewTimestamp[2] = int(ElapsedTime.Seconds()) % 60 if user.Autofill == "remove" && seconds == 1 { sum() } //add this row's data to the slice rows = append(rows, row) //add the power value to the power time series powerSeries = append(powerSeries, row.Power) heartSeries = append(heartSeries, row.Heartrate) cadenceSeries = append(cadenceSeries, row.Cadence) } } } } sampletime = row.Timestamp //this might need to move position //if a new lap if row.Lapstart != laptime && lap.Samplecount > 0 && !(laptime.IsZero()) { //calculate lap totals lapSummary.Avpower = lap.Power / lap.Samplecount lapSummary.Avheart = lap.Hr / lap.Samplecount lapSummary.Dur = time.Duration(lap.Samplecount) * time.Second pedalcount = (lap.Samplecount - lap.Freewheelcount) if pedalcount > 0 { lapSummary.Avcad = lap.Cad / (lap.Samplecount - lap.Freewheelcount) } else { lapSummary.Avcad = 0 } //append the summary lap data lapSummaries = append(lapSummaries, lapSummary) //reset lap laptime = row.Lapstart lap.Samplecount = 0 lap.Freewheelcount = 0 lap.Power = 0 lap.Hr = 0 lap.Cad = 0 } if laptime.IsZero() { laptime = row.Lapstart } } //get the number of samples (these are already processed and are at one second intervals) seriesLen := len(rows) //***would be good to save this data in cassandra*** activityDuration := (time.Duration(seriesLen) * time.Second).Hours() //and this /*** * Critical power ***/ var cpms types.CPMs var cpRow CpRow var maxCpVal int var maxCpHrVal int var maxCpCadVal int var sumCpVal int var sumHrVal int var sumCadVal int const accuracyVal = 2.25 //controls the accuracy of the output (l is more accurate [more sample points]) var isPreset bool //make a set of preset timecodes/snapshot times - accuracyVal should not be changed once in production! presets := make([]int, 0) for i := 0; i < seriesLen; i++ { preset := int(math.Pow(float64(i), accuracyVal)) if preset <= seriesLen { presets = append(presets, preset) } } //set initial val logVal := math.Log(float64(seriesLen)) //TODO remove this - not required/used? //this loop determines the length of the rolling sampling period to average (from the length of the activity to 1 second) for i := seriesLen; i > 0; i-- { //3600, 3599, 3598...= //check it this is one of our snapshots for _, presetTime := range presets { if i == presetTime { isPreset = true break } isPreset = false } //don't calculate ALL samples if isPreset || i == 1 || i == 2 || i == 3 || i == 4 || i == 5 || i == 10 || i == 20 || i == 30 || i == 60 || i == 5*60 || i == 20*60 || i == 30*60 || i == 60*60 || i == 120*60 || i == 240*60 || i == 360*60 || i == 480*60 || i == 600*60 { //reset max for each duration calculated maxCpVal = 0 //this loop determines the point at which to start searching for j := 0; j <= (seriesLen - i); j++ { ///j=0; j<1; j++ ... j=1; j<2; j++ etc //rolling power slice is from start pos to smapling period rollingPowerSlice := powerSeries[j : j+i] // eg [1:2], [2:3], [3:4] etc... rollingHeartSlice := heartSeries[j : j+i] rollingCadenceSlice := cadenceSeries[j : j+i] //reset sum of slice vals sumCpVal = 0 sumHrVal = 0 sumCadVal = 0 for _, val := range rollingPowerSlice { //sum the sliding slice values sumCpVal += val } for _, val := range rollingHeartSlice { //sum the sliding slice values sumHrVal += val } for _, val := range rollingCadenceSlice { //sum the sliding slice values sumCadVal += val } if (sumCpVal / i) > maxCpVal { maxCpVal = (sumCpVal / i) maxCpHrVal = (sumHrVal / i) //calucate the averate heart rate that accompanies the high critical power maxCpCadVal = (sumCadVal / i) } } //preset duration vals switch i { case 5: cpms.FiveSecondCP = maxCpVal cpms.FiveSecondCPHR = maxCpHrVal cpms.FiveSecondCPCAD = maxCpCadVal break case 20: cpms.TwentySecondCP = maxCpVal cpms.TwentySecondCPHR = maxCpHrVal cpms.TwentySecondCPCAD = maxCpCadVal break case 60: cpms.SixtySecondCP = maxCpVal cpms.SixtySecondCPHR = maxCpHrVal cpms.SixtySecondCPCAD = maxCpCadVal break case 300: cpms.FiveMinuteCP = maxCpVal cpms.FiveMinuteCPHR = maxCpHrVal cpms.FiveMinuteCPCAD = maxCpCadVal break case 1200: cpms.TwentyMinuteCP = maxCpVal cpms.TwentyMinuteCPHR = maxCpHrVal cpms.TwentyMinuteCPCAD = maxCpCadVal break case 3600: cpms.SixtyMinuteCP = maxCpVal cpms.SixtyMinuteCPHR = maxCpHrVal cpms.SixtyMinuteCPCAD = maxCpCadVal break } //should be able to do a plot from this info //fmt.Printf("Max Mean power for %d seconds is %d Watts. Log of i: %v\n", i, maxCpVal, math.Log(float64(i))) //plotinfo ElapsedTime = time.Duration(i) * time.Second //convert iterator (seconds) to Duration type cpRow.CpTime[0] = int(ElapsedTime.Hours()) cpRow.CpTime[1] = int(ElapsedTime.Minutes()) % 60 cpRow.CpTime[2] = int(ElapsedTime.Seconds()) % 60 cpRow.CpVal = maxCpVal cpRow.CpAhr = maxCpHrVal cpRow.CpAcad = maxCpCadVal cpRows = append(cpRows, cpRow) logVal -= accuracyVal } } //post loop calculations if activity.Samplecount > 0 { //calculate lap totals lapSummary.Avpower = lap.Power / lap.Samplecount lapSummary.Avheart = lap.Hr / lap.Samplecount lapSummary.Dur = time.Duration(lap.Samplecount) * time.Second pedalcount = (lap.Samplecount - lap.Freewheelcount) if pedalcount > 0 { lapSummary.Avcad = lap.Cad / (lap.Samplecount - lap.Freewheelcount) } else { lapSummary.Avcad = 0 } //append the summary lap data lapSummaries = append(lapSummaries, lapSummary) //calculate totals endSummary.Avpower = activity.Power / activity.Samplecount endSummary.Avheart = activity.Hr / activity.Samplecount pedalcount = (activity.Samplecount - activity.Freewheelcount) if pedalcount > 0 { endSummary.Avcad = activity.Cad / (activity.Samplecount - activity.Freewheelcount) } else { endSummary.Avcad = 0 } } /*** * normalised power ***/ var fourthPower float64 var thirtySecondSum int var thirtySecondAv float64 for i := 30; i < seriesLen; i++ { //reset total thirtySecondSum = 0 //get thirty second rolling slice rollingPowerSlice := powerSeries[i-30 : i] for _, val := range rollingPowerSlice { //sum the sliding slice values thirtySecondSum += val } thirtySecondAv = float64(thirtySecondSum / 30) //multply by the power of 4 fourthPower += math.Pow(thirtySecondAv, 4) } //normalised power = 4th root of total of 30 second averages divided my number of averages taken (total - 30 to allow for start offset and slice length) normalisedPower := int(math.Pow(fourthPower/float64(seriesLen-30), 0.25)) //4th root is power 1/4 (0.25) endSummary.Np = normalisedPower /*** * Intensity factor ***/ intensity := float64(normalisedPower) / float64(user.Ftp) endSummary.If = utility.Round(intensity, .5, 2) * 100 //times by 100 and use as a percentage to avoid troubles /*** * Intensity factor from heart rate (as a percentage) ***/ maxHr := float64(user.Thr) * 1.06 endSummary.IfHr = int((float64(endSummary.Avheart) / maxHr) * 100) /*** * TSS ***/ endSummary.Tss = int((float64(seriesLen) * float64(normalisedPower) * intensity) / (float64(user.Ftp) * 3600) * 100) /*** * Estimated TSS ***/ var etssSum int var inc int for _, val := range heartSeries { if val == 0 { inc = 0 //no hr data } else if float64(val) < 0.81*float64(user.Thr) { inc = 55 //zone 1 } else if float64(val) > 0.81*float64(user.Thr) && float64(val) <= 0.89*float64(user.Thr) { inc = 60 //zone 2 } else if float64(val) > 0.89*float64(user.Thr) && float64(val) <= 0.93*float64(user.Thr) { inc = 69 //zone 3 } else if float64(val) > 0.93*float64(user.Thr) && float64(val) <= 0.99*float64(user.Thr) { inc = 87 //zone 4 } else if float64(val) > 0.99*float64(user.Thr) && float64(val) <= 1.02*float64(user.Thr) { inc = 100 //zone 5a } else if float64(val) > 1.02*float64(user.Thr) && float64(val) <= 1.06*float64(user.Thr) { inc = 118 //zone 5b } else if float64(val) > 1.06*float64(user.Thr) { inc = 140 //zone 5c } etssSum += inc } if len(heartSeries) > 0 { endSummary.Etss = int(float64(etssSum/len(heartSeries)) * activityDuration) } //set page var stuff if endSummary.Avpower == 0 { hasPower = false } if endSummary.Avheart == 0 { hasHeart = false } if endSummary.Avcad == 0 { hasCadence = false } /*** * Energy ***/ if endSummary.Avpower > 0 { endSummary.WorkDone = int(float64(endSummary.Avpower)*float64(seriesLen)) / 1000 //KJ endSummary.EnergyUsedKj = int(float64(endSummary.WorkDone) * 4.444444444) //KJoules 0.4444' is 1/22.5 (22.5% efficiency) endSummary.EnergyUsedKc = int(float64(endSummary.EnergyUsedKj) / 4.186) //KCals 4.186 convert KJoules -> KCals } else if endSummary.Avheart > 0 && endSummary.Avpower == 0 { //if not power present, but vo2 Max is, use that instead (http://www.shapesense.com/fitness-exercise/calculators/heart-rate-based-calorie-burn-calculator.aspx) if user.Vo2 > 0 && user.Gender == "male" { endSummary.EnergyUsedKc = int(((-95.7735 + (0.634 * float64(endSummary.Avheart)) + (0.404 * float64(user.Vo2)) + (0.394 * float64(user.Weight)) + (0.271 * float64(user.Age))) / 4.184) * 60.00 * (float64(seriesLen) / 3600.00)) endSummary.EnergyUsedKj = int(float64(endSummary.EnergyUsedKc) * 4.186) endSummary.WorkDone = int(float64(endSummary.EnergyUsedKj) * 0.225) } else if user.Vo2 > 0 && user.Gender == "female" { endSummary.EnergyUsedKc = int(((-59.3954 + (0.45 * float64(endSummary.Avheart)) + (0.380 * float64(user.Vo2)) + (0.103 * float64(user.Weight)) + (0.274 * float64(user.Age))) / 4.184) * 60.00 * (float64(seriesLen) / 3600.00)) endSummary.EnergyUsedKj = int(float64(endSummary.EnergyUsedKc) * 4.186) endSummary.WorkDone = int(float64(endSummary.EnergyUsedKj) * 0.225) } //if user.Vo2 Max not known, but we have hr data if user.Vo2 == 0 && user.Gender == "male" { endSummary.EnergyUsedKc = int(((-55.0969 + (0.6309 * float64(endSummary.Avheart)) + (0.1988 * float64(user.Weight)) + (0.2017 * float64(user.Age))) / 4.184) * 60 * (float64(seriesLen) / 3600.00)) endSummary.EnergyUsedKj = int(float64(endSummary.EnergyUsedKc) * 4.186) endSummary.WorkDone = int(float64(endSummary.EnergyUsedKj) * 0.225) } else if user.Vo2 == 0 && user.Gender == "female" { endSummary.EnergyUsedKc = int(((-20.4022 + (0.4472 * float64(endSummary.Avheart)) + (0.1263 * float64(user.Weight)) + (0.074 * float64(user.Age))) / 4.184) * 60 * (float64(seriesLen) / 3600.00)) endSummary.EnergyUsedKj = int(float64(endSummary.EnergyUsedKc) * 4.186) endSummary.WorkDone = int(float64(endSummary.EnergyUsedKj) * 0.225) } } const longForm = "Monday Jan 2, 2006 at 3:04pm" const shortForm = "2006, 0, 2" var title = activityStart.Format(longForm) endSummary.Dur = time.Duration(seriesLen) * time.Second endSummary.StartTime = activityStart //**** Marshal JSON and save to Cassandra ********// row_json, err := json.Marshal(rows) //raw processed data power_json, err := json.Marshal(powerSeries) //for chart heart_json, err := json.Marshal(heartSeries) //for chart cadence_json, err := json.Marshal(cadenceSeries) //for chart cp_row_json, err := json.Marshal(cpRows) //rows for chart cp_data_json, err := json.Marshal(cpms) //critical power metrics lap_summaries_json, err := json.Marshal(lapSummaries) end_summary_json, err := json.Marshal(endSummary) if err != nil { fmt.Println("error:", err) } saveProcessed(user, activityId, title, row_json, power_json, heart_json, cadence_json, cp_row_json, cp_data_json, lap_summaries_json, end_summary_json, hasPower, hasHeart, hasCadence, user.Ftp, user.Thr, activityStart) }
//heart/Power by zone func hpbz(user types.UserSettings, filter Filter) ([]Hbz, []Pbz) { user_id := user.Id var user_data types.Metrics var end_summary_json []byte var heart_json []byte var power_json []byte var cur_ftp int var cur_thr int var power_series []int var heart_series []int var has_power, has_heart bool var activity_id string var activity_start time.Time hbz_data := make([]Hbz, 0) pbz_data := make([]Pbz, 0) var temp_row Hpbz temp_rows := make([]Hpbz, 0) cluster := gocql.NewCluster(config.DbHost) cluster.Keyspace = "joulepersecond" cluster.Consistency = gocql.Quorum session, _ := cluster.CreateSession() defer session.Close() var sH1, sH2, sH3, sH4, sH5a, sH5b, sH5c, sP1, sP2, sP3, sP4, sP5, sP6 int timeNow := time.Now() timeThen := timeNow.AddDate(0, 0, -filter.Historylen) //get all of the user's data (at least all for now) TODO limit these queries by date if poss. iter := session.Query(`SELECT activity_start, activity_id FROM joulepersecond.user_activity WHERE user_id = ? AND activity_start > ? ORDER BY activity_start ASC`, user_id, timeThen).Iter() for iter.Scan(&activity_start, &activity_id) { iter := session.Query(`SELECT power_json, heart_json, end_summary_json, has_power, has_heart, cur_ftp, cur_thr FROM joulepersecond.proc_activity WHERE activity_id = ? `, activity_id).Iter() for iter.Scan(&power_json, &heart_json, &end_summary_json, &has_power, &has_heart, &cur_ftp, &cur_thr) { json.Unmarshal(end_summary_json, &user_data) json.Unmarshal(power_json, &power_series) json.Unmarshal(heart_json, &heart_series) temp_row.StartTime = activity_start //TODO next: Split all time series data in to zones and add it to temp_row/(s) for further date processing if has_power { temp_row.Samples = len(power_series) temp_row.Has_power = true } if has_heart { temp_row.Samples = len(heart_series) temp_row.Has_heart = true } if !has_heart && !has_power { break } //clear the values temp_row.CountPZ1 = 0 temp_row.CountPZ2 = 0 temp_row.CountPZ3 = 0 temp_row.CountPZ4 = 0 temp_row.CountPZ5 = 0 temp_row.CountPZ6 = 0 temp_row.CountHZ1 = 0 temp_row.CountHZ2 = 0 temp_row.CountHZ3 = 0 temp_row.CountHZ4 = 0 temp_row.CountHZ5a = 0 temp_row.CountHZ5b = 0 temp_row.CountHZ5c = 0 if has_power { var sum int var average float64 for i := user.SampleSize; i < temp_row.Samples; i++ { //reset total sum = 0 //get thirty second rolling slice rollingPowerSlice := power_series[i-user.SampleSize : i] for _, val := range rollingPowerSlice { //sum the sliding slice values sum += val } average = float64(sum / user.SampleSize) if average < 0.55*float64(cur_ftp) { temp_row.CountPZ1++ } else if average > 0.55*float64(cur_ftp) && average <= 0.74*float64(cur_ftp) { temp_row.CountPZ2++ } else if average > 0.74*float64(cur_ftp) && average <= 0.89*float64(cur_ftp) { temp_row.CountPZ3++ } else if average > 0.89*float64(cur_ftp) && average <= 1.04*float64(cur_ftp) { temp_row.CountPZ4++ } else if average > 1.04*float64(cur_ftp) && average <= 1.2*float64(cur_ftp) { temp_row.CountPZ5++ } else if average > 1.2*float64(cur_ftp) { temp_row.CountPZ6++ } } } //loop through each sample and post the value into the correct pidgeon hole for i := 0; i < temp_row.Samples; i++ { if has_heart { if float64(heart_series[i]) < 0.81*float64(cur_thr) { temp_row.CountHZ1++ } else if float64(heart_series[i]) > 0.81*float64(cur_thr) && float64(heart_series[i]) <= 0.89*float64(cur_thr) { temp_row.CountHZ2++ } else if float64(heart_series[i]) > 0.89*float64(cur_thr) && float64(heart_series[i]) <= 0.93*float64(cur_thr) { temp_row.CountHZ3++ } else if float64(heart_series[i]) > 0.93*float64(cur_thr) && float64(heart_series[i]) <= 0.99*float64(cur_thr) { temp_row.CountHZ4++ } else if float64(heart_series[i]) > 0.99*float64(cur_thr) && float64(heart_series[i]) <= 1.02*float64(cur_thr) { temp_row.CountHZ5a++ } else if float64(heart_series[i]) > 1.02*float64(cur_thr) && float64(heart_series[i]) <= 1.06*float64(cur_thr) { temp_row.CountHZ5b++ } else if float64(heart_series[i]) > 1.06*float64(cur_thr) { temp_row.CountHZ5c++ } } } temp_rows = append(temp_rows, temp_row) } } clearVals := func() { sH1 = 0 sH2 = 0 sH3 = 0 sH4 = 0 sH5a = 0 sH5b = 0 sH5c = 0 sP1 = 0 sP2 = 0 sP3 = 0 sP4 = 0 sP5 = 0 sP6 = 0 } //so now for each activity we have the sum of each of the zones (value for each second * number seconds) and the number of seconds to divide by later once summed by date //loope through each retrieved activity var lastActivity time.Time var numResult int for i := 1; i < len(temp_rows); i++ { if i == 1 { lastActivity = temp_rows[i].StartTime } //if we want to show data in monthly format if filter.Historylen > 366 { //show over 365 days as monthly //set last activity on first iteration only thisDate := temp_rows[i].StartTime prevDate := lastActivity //if we're still in the current month sum these values if thisDate.Month() != prevDate.Month() || i == len(temp_rows)-1 { var summedMonthlyHbz Hbz var summedMonthlyPbz Pbz summedMonthlyHbz.AvZ1 = utility.Round((float64(sH1) / 3600.0), .5, 2) summedMonthlyHbz.AvZ2 = utility.Round((float64(sH2) / 3600.0), .5, 2) summedMonthlyHbz.AvZ3 = utility.Round((float64(sH3) / 3600.0), .5, 2) summedMonthlyHbz.AvZ4 = utility.Round((float64(sH4) / 3600.0), .5, 2) summedMonthlyHbz.AvZ5a = utility.Round((float64(sH5a) / 3600.0), .5, 2) summedMonthlyHbz.AvZ5b = utility.Round((float64(sH5b) / 3600.0), .5, 2) summedMonthlyHbz.AvZ5c = utility.Round((float64(sH5c) / 3600.0), .5, 2) summedMonthlyPbz.AvZ1 = utility.Round((float64(sP1) / 3600.0), .5, 2) summedMonthlyPbz.AvZ2 = utility.Round((float64(sP2) / 3600.0), .5, 2) summedMonthlyPbz.AvZ3 = utility.Round((float64(sP3) / 3600.0), .5, 2) summedMonthlyPbz.AvZ4 = utility.Round((float64(sP4) / 3600.0), .5, 2) summedMonthlyPbz.AvZ5 = utility.Round((float64(sP5) / 3600.0), .5, 2) summedMonthlyPbz.AvZ6 = utility.Round((float64(sP6) / 3600.0), .5, 2) var month time.Month var year string if thisDate.Month() != prevDate.Month() { month = prevDate.Month() year = strconv.Itoa(prevDate.Year()) } else { month = thisDate.Month() year = strconv.Itoa(thisDate.Year()) } monthStr := month.String() summedMonthlyHbz.TimeLabel = monthStr[0:3] + " '" + year[2:4] summedMonthlyPbz.TimeLabel = monthStr[0:3] + " '" + year[2:4] clearVals() hbz_data = append(hbz_data, summedMonthlyHbz) pbz_data = append(pbz_data, summedMonthlyPbz) //reset the last activity date for the next loop lastActivity = thisDate } } else { thisDate := temp_rows[i].StartTime prevDate := lastActivity prevIterDate := temp_rows[i-1].StartTime _, thisDateWeek := thisDate.ISOWeek() //adding day keeps i in the correct week _, prevDateWeek := prevDate.ISOWeek() // " if thisDateWeek != prevDateWeek || i == len(temp_rows)-1 { //if new week or last activity var summedWeeklyHbz Hbz var summedWeeklyPbz Pbz numResult++ summedWeeklyHbz.AvZ1 = utility.Round((float64(sH1) / 3600.0), .5, 2) summedWeeklyHbz.AvZ2 = utility.Round((float64(sH2) / 3600.0), .5, 2) summedWeeklyHbz.AvZ3 = utility.Round((float64(sH3) / 3600.0), .5, 2) summedWeeklyHbz.AvZ4 = utility.Round((float64(sH4) / 3600.0), .5, 2) summedWeeklyHbz.AvZ5a = utility.Round((float64(sH5a) / 3600.0), .5, 2) summedWeeklyHbz.AvZ5b = utility.Round((float64(sH5b) / 3600.0), .5, 2) summedWeeklyHbz.AvZ5c = utility.Round((float64(sH5c) / 3600.0), .5, 2) summedWeeklyPbz.AvZ1 = utility.Round((float64(sP1) / 3600.0), .5, 2) summedWeeklyPbz.AvZ2 = utility.Round((float64(sP2) / 3600.0), .5, 2) summedWeeklyPbz.AvZ3 = utility.Round((float64(sP3) / 3600.0), .5, 2) summedWeeklyPbz.AvZ4 = utility.Round((float64(sP4) / 3600.0), .5, 2) summedWeeklyPbz.AvZ5 = utility.Round((float64(sP5) / 3600.0), .5, 2) summedWeeklyPbz.AvZ6 = utility.Round((float64(sP6) / 3600.0), .5, 2) monthS := prevDate.Month() dayS := strconv.Itoa(prevDate.Day()) var dayF string var monthF time.Month if thisDateWeek != prevDateWeek { monthF = prevIterDate.Month() dayF = strconv.Itoa(prevIterDate.Day()) } else { monthF = thisDate.Month() dayF = strconv.Itoa(thisDate.Day()) } monthStrS := monthS.String() monthAbrS := monthStrS[0:3] monthStrF := monthF.String() monthAbrF := monthStrF[0:3] //format labels according to number to displau if filter.Historylen < 120 { summedWeeklyHbz.TimeLabel = dayS + " " + monthAbrS + " - " + dayF + " " + monthAbrF summedWeeklyPbz.TimeLabel = dayS + " " + monthAbrS + " - " + dayF + " " + monthAbrF } else { summedWeeklyHbz.TimeLabel = dayS + " " + monthAbrS summedWeeklyPbz.TimeLabel = dayS + " " + monthAbrS } clearVals() hbz_data = append(hbz_data, summedWeeklyHbz) pbz_data = append(pbz_data, summedWeeklyPbz) //reset the last activity date for the next loop lastActivity = thisDate } } sP1 += temp_rows[i].CountPZ1 sP2 += temp_rows[i].CountPZ2 sP3 += temp_rows[i].CountPZ3 sP4 += temp_rows[i].CountPZ4 sP5 += temp_rows[i].CountPZ5 sP6 += temp_rows[i].CountPZ6 sH1 += temp_rows[i].CountHZ1 sH2 += temp_rows[i].CountHZ2 sH3 += temp_rows[i].CountHZ3 sH4 += temp_rows[i].CountHZ4 sH5a += temp_rows[i].CountHZ5a sH5b += temp_rows[i].CountHZ5b sH5c += temp_rows[i].CountHZ5c } return hbz_data, pbz_data }
//get the week's activities func dashboard(user types.UserSettings) (types.Tvd, types.Zones, types.ZoneLabels) { user_id := user.Id tvd_data_points := make([]types.Tvd_data_point, 0) tvd_data := make([]types.Tvd, 0) var user_data types.Metrics var activity_id string var end_summary_json []byte var activity_start time.Time var heart_json []byte var power_json []byte var cur_ftp int var cur_thr int var power_series []int var heart_series []int var has_power, has_heart bool var zoneData types.Zones cluster := gocql.NewCluster(config.DbHost) cluster.Keyspace = "joulepersecond" cluster.Consistency = gocql.Quorum session, _ := cluster.CreateSession() defer session.Close() //get all of the user's data (at least all for now) TODO limit these queries by date if poss. Done! timeNow := time.Now() //we can use TimeOffset to test from other dates timeNow = timeNow.AddDate(0, 0, user.TimeOffset) //timeTruncated is a time at the beginning of the day timeTruncated := timeNow.Truncate(time.Hour * 24) //we will use timeThen to refer to the beginning of the current week var timeThen time.Time dayOfWeek := int(timeTruncated.Weekday()) if int(timeTruncated.Weekday()) != 0 { //if not equal to Sunday... timeThen = timeTruncated.AddDate(0, 0, -(dayOfWeek - 1)) //fetch records for the week so far (second -1 to start from Monday) } else { timeThen = timeTruncated.AddDate(0, 0, -6) //if today is Sunday, query back to Monday } iter := session.Query(`SELECT activity_id, activity_start, end_summary_json FROM joulepersecond.user_activity WHERE user_id = ? AND activity_start <=? AND activity_start >= ? `, user_id, timeNow, timeThen).Iter() for iter.Scan(&activity_id, &activity_start, &end_summary_json) { var tvd_data_point types.Tvd_data_point json.Unmarshal(end_summary_json, &user_data) tvd_data_point.Date = user_data.StartTime tvd_data_point.Dur = user_data.Dur if user_data.Utss > 0 { tvd_data_point.Tss = user_data.Utss } else if user_data.Tss > 0 { tvd_data_point.Tss = user_data.Tss } else if user_data.Etss > 0 { tvd_data_point.Tss = user_data.Etss } else { tvd_data_point.Tss = 0 } tvd_data_points = append(tvd_data_points, tvd_data_point) //for each activity, get the exended data iter := session.Query(`SELECT power_json, heart_json, end_summary_json, has_power, has_heart, cur_ftp, cur_thr FROM joulepersecond.proc_activity WHERE activity_id = ? `, activity_id).Iter() for iter.Scan(&power_json, &heart_json, &end_summary_json, &has_power, &has_heart, &cur_ftp, &cur_thr) { json.Unmarshal(end_summary_json, &user_data) json.Unmarshal(power_json, &power_series) json.Unmarshal(heart_json, &heart_series) var samples int if has_power { samples = len(power_series) has_power = true } if has_heart { samples = len(heart_series) has_heart = true } if !has_heart && !has_power { break } if has_power { zoneData.HasPower = true var sum int var average float64 for i := user.SampleSize; i < samples; i++ { //reset total sum = 0 //get thirty second rolling slice rollingPowerSlice := power_series[i-user.SampleSize : i] for _, val := range rollingPowerSlice { //sum the sliding slice values sum += val } average = float64(sum / user.SampleSize) if average < 0.55*float64(cur_ftp) { zoneData.Z1++ } else if average > 0.55*float64(cur_ftp) && average <= 0.74*float64(cur_ftp) { zoneData.Z2++ } else if average > 0.74*float64(cur_ftp) && average <= 0.89*float64(cur_ftp) { zoneData.Z3++ } else if average > 0.89*float64(cur_ftp) && average <= 1.04*float64(cur_ftp) { zoneData.Z4++ } else if average > 1.04*float64(cur_ftp) && average <= 1.2*float64(cur_ftp) { zoneData.Z5++ } else if average > 1.2*float64(cur_ftp) { zoneData.Z6++ } } } //loop through each sample and post the value into the correct pidgeon hole if has_heart { zoneData.HasHeart = true for i := 0; i < samples; i++ { if float64(heart_series[i]) < 0.81*float64(cur_thr) { zoneData.HR1++ } else if float64(heart_series[i]) > 0.81*float64(cur_thr) && float64(heart_series[i]) <= 0.89*float64(cur_thr) { zoneData.HR2++ } else if float64(heart_series[i]) > 0.89*float64(cur_thr) && float64(heart_series[i]) <= 0.93*float64(cur_thr) { zoneData.HR3++ } else if float64(heart_series[i]) > 0.93*float64(cur_thr) && float64(heart_series[i]) <= 0.99*float64(cur_thr) { zoneData.HR4++ } else if float64(heart_series[i]) > 0.99*float64(cur_thr) && float64(heart_series[i]) <= 1.02*float64(cur_thr) { zoneData.HR5a++ } else if float64(heart_series[i]) > 1.02*float64(cur_thr) && float64(heart_series[i]) <= 1.06*float64(cur_thr) { zoneData.HR5b++ } else if float64(heart_series[i]) > 1.06*float64(cur_thr) { zoneData.HR5c++ } } } } } //we now have all the data... Now sort it sumTss := 0 var sumDur time.Duration var summedWeeklyTvd types.Tvd //loop through each retrieved activity for i := 0; i < len(tvd_data_points); i++ { tvd_data = append(tvd_data, summedWeeklyTvd) //sum the values sumTss += tvd_data_points[i].Tss sumDur += tvd_data_points[i].Dur } summedWeeklyTvd.TotalTss = sumTss summedWeeklyTvd.TotalDur = utility.Round(sumDur.Hours(), .5, 2) var zoneLabels types.ZoneLabels zoneLabels.PowerZ1 = int(0.55 * float64(user.Ftp)) zoneLabels.PowerZ2 = int(0.74 * float64(user.Ftp)) zoneLabels.PowerZ3 = int(0.89 * float64(user.Ftp)) zoneLabels.PowerZ4 = int(1.04 * float64(user.Ftp)) zoneLabels.PowerZ5 = int(1.2 * float64(user.Ftp)) zoneLabels.HeartZ1 = int(0.81 * float64(user.Thr)) zoneLabels.HeartZ2 = int(0.89 * float64(user.Thr)) zoneLabels.HeartZ3 = int(0.93 * float64(user.Thr)) zoneLabels.HeartZ4 = int(0.99 * float64(user.Thr)) zoneLabels.HeartZ5a = int(1.02 * float64(user.Thr)) zoneLabels.HeartZ5b = int(1.06 * float64(user.Thr)) //get the power and heartrate zone data return summedWeeklyTvd, zoneData, zoneLabels }