// Writes them all into a batch queue func publishAllComplaintsHandler(w http.ResponseWriter, r *http.Request) { ctx := appengine.NewContext(r) str := "" s, e, _ := widget.FormValueDateRange(r) days := date.IntermediateMidnights(s.Add(-1*time.Second), e) // decrement start, to include it url := "/backend/publish-complaints" for i, day := range days { dayStr := day.Format("2006.01.02") thisUrl := fmt.Sprintf("%s?datestring=%s", url, dayStr) if r.FormValue("skipload") != "" { thisUrl += "&skipload=" + r.FormValue("skipload") } t := taskqueue.NewPOSTTask(thisUrl, map[string][]string{}) // Give ourselves time to get all these tasks posted, and stagger them out a bit t.Delay = time.Minute + time.Duration(i)*15*time.Second if _, err := taskqueue.Add(ctx, t, "batch"); err != nil { log.Errorf(ctx, "publishAllComplaintsHandler: enqueue: %v", err) http.Error(w, err.Error(), http.StatusInternalServerError) return } str += " * posting for " + thisUrl + "\n" } w.Header().Set("Content-Type", "text/plain") w.Write([]byte(fmt.Sprintf("OK, enqueued %d\n--\n%s", len(days), str))) }
func DayWindows(s, e time.Time) [][]time.Time { out := [][]time.Time{} s = s.Add(-1 * time.Second) // Tip s into previous day, so that it counts as an 'intermediate' for _, tMidnight := range date.IntermediateMidnights(s, e) { out = append(out, []time.Time{tMidnight, tMidnight.AddDate(0, 0, 1).Add(-1 * time.Second)}) } return out }
// This enqueues tasks for each individual day, or flight func batchFlightScanHandler(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) tags := []string{} //"ADSB"} // Maybe make this configurable ... n := 0 str := "" s, e, _ := widget.FormValueDateRange(r) job := r.FormValue("job") if job == "" { http.Error(w, "Missing argument: &job=foo", http.StatusInternalServerError) } days := date.IntermediateMidnights(s.Add(-1*time.Second), e) // decrement start, to include it for _, day := range days { // Get the keys for all the flights on this day. fdb := oldfgae.FlightDB{C: oldappengine.NewContext(r)} dStart, dEnd := date.WindowForTime(day) dEnd = dEnd.Add(-1 * time.Second) keys, err := fdb.KeysInTimeRangeByTags(tags, dStart, dEnd) if err != nil { log.Errorf(c, "upgradeHandler: enqueue: %v", err) http.Error(w, err.Error(), http.StatusInternalServerError) return } singleFlightUrl := "/backend/fdb-batch/flight" for _, key := range keys { str += fmt.Sprintf("Enqueing day=%s: %s?job=%s&key=%s\n", day.Format("2006.01.02"), singleFlightUrl, job, key.Encode()) t := taskqueue.NewPOSTTask(singleFlightUrl, map[string][]string{ "date": {day.Format("2006.01.02")}, "key": {key.Encode()}, "job": {job}, }) if _, err := taskqueue.Add(c, t, "batch"); err != nil { log.Errorf(c, "upgradeHandler: enqueue: %v", err) http.Error(w, err.Error(), http.StatusInternalServerError) return } n++ } } log.Infof(c, "enqueued %d batch items for '%s'", n, job) w.Header().Set("Content-Type", "text/plain") w.Write([]byte(fmt.Sprintf("OK, batch, enqueued %d tasks for %s\n%s", n, job, str))) }
func (cdb ComplaintDB) ResetGlobalStats() { if err := cdb.DeletAllGlobalStats(); err != nil { cdb.Errorf("Reset/DeleteAll fail, %v", err) return } profiles, err := cdb.GetAllProfiles() if err != nil { return } // Upon reset (writing a fresh new singleton), we need to generate a key rootKey := datastore.NewKey(cdb.Ctx(), kGlobalStatsKind, "foo", 0, nil) key := datastore.NewIncompleteKey(cdb.Ctx(), kGlobalStatsKind, rootKey) gs := GlobalStats{ DatastoreKey: key.Encode(), } // This is too slow to recalculate this way; it runs into the 10m timeout //start := date.ArbitraryDatestring2MidnightPdt("2015/08/09", "2006/01/02").Add(-1 * time.Second) start := date.ArbitraryDatestring2MidnightPdt("2016/03/15", "2006/01/02").Add(-1 * time.Second) end, _ := date.WindowForYesterday() // end is the final day we count for; yesterday midnights := date.IntermediateMidnights(start, end.Add(time.Minute)) for _, m := range midnights { dayStart, dayEnd := date.WindowForTime(m) dc := DailyCount{Datestring: date.Time2Datestring(dayStart)} for _, p := range profiles { if keys, err := cdb.GetComplaintKeysInSpanByEmailAddress(dayStart, dayEnd, p.EmailAddress); err != nil { cdb.Errorf("Reset/Lookup fail, %v", err) } else if len(keys) > 0 { dc.NumComplaints += len(keys) dc.NumComplainers += 1 } } gs.Counts = append(gs.Counts, dc) } if err := cdb.SaveGlobalStats(gs); err != nil { cdb.Errorf("Reset/Save fail, %v", err) } }
func (cdb ComplaintDB) ResetGlobalStats() { if err := cdb.DeletAllGlobalStats(); err != nil { cdb.C.Errorf("Reset/DeleteAll fail, %v", err) return } profiles, err := cdb.GetAllProfiles() if err != nil { return } // Upon reset (writing a fresh new singleton), we need to generate a key rootKey := datastore.NewKey(cdb.C, kGlobalStatsKind, "foo", 0, nil) key := datastore.NewIncompleteKey(cdb.C, kGlobalStatsKind, rootKey) gs := GlobalStats{ DatastoreKey: key.Encode(), } end, _ := date.WindowForYesterday() // end is the final day we count for; yesterday start := end.AddDate(0, 0, -100) midnights := date.IntermediateMidnights(start, end.Add(time.Minute)) for _, m := range midnights { dayStart, dayEnd := date.WindowForTime(m) dc := DailyCount{Datestring: date.Time2Datestring(dayStart)} for _, p := range profiles { if comp, err := cdb.GetComplaintsInSpanByEmailAddress(p.EmailAddress, dayStart, dayEnd); err != nil { cdb.C.Errorf("Reset/Lookup fail, %v", err) } else if len(comp) > 0 { dc.NumComplaints += len(comp) dc.NumComplainers += 1 } } gs.Counts = append(gs.Counts, dc) } if err := cdb.SaveGlobalStats(gs); err != nil { cdb.C.Errorf("Reset/Save fail, %v", err) } cdb.C.Infof("-- reset !--") //cdb.LoadGlobalStats(); }
// Enqueues one 'day' task per day in the range func batchFlightDateRangeHandler(w http.ResponseWriter, r *http.Request) { ctx := req2ctx(r) n := 0 str := "" s, e, _ := widget.FormValueDateRange(r) job := r.FormValue("job") if job == "" { http.Error(w, "Missing argument: &job=foo", http.StatusInternalServerError) return } str += fmt.Sprintf("** s: %s\n** e: %s\n", s, e) days := date.IntermediateMidnights(s.Add(-1*time.Second), e) // decrement start, to include it for _, day := range days { dayUrl := "/backend/fdb-batch/day" dayStr := day.Format("2006/01/02") str += fmt.Sprintf(" * adding %s, %s via %s\n", job, dayStr, dayUrl) if r.FormValue("dryrun") == "" { t := taskqueue.NewPOSTTask(dayUrl, map[string][]string{ "day": {dayStr}, "job": {job}, }) if _, err := taskqueue.Add(ctx, t, "batch"); err != nil { log.Errorf(ctx, "upgradeHandler: enqueue: %v", err) http.Error(w, err.Error(), http.StatusInternalServerError) return } } n++ } log.Infof(ctx, "enqueued %d batch items for '%s'", n, job) w.Header().Set("Content-Type", "text/plain") w.Write([]byte(fmt.Sprintf("OK, batch, enqueued %d tasks for %s\n%s", n, job, str))) }
func (cdb *ComplaintDB) GetDailyCounts(email string) ([]DailyCount, error) { k := fmt.Sprintf("%s:dailycounts", email) // The 'counts' is so we can have diff memcache objects c := []DailyCount{} cdb.Debugf("GDC_001", "GetDailyCounts() starting") // might return: datastore.ErrNoSuchEntity if dcs, err := cdb.fetchDailyCountSingleton(k); err == datastore.ErrNoSuchEntity { // Singleton not found; we don't care; treat same as empty singleton. } else if err != nil { cdb.Errorf("error getting item: %v", err) return c, err } else { c = dcs } cdb.Debugf("GDC_002", "singleton lookup done (%d entries)", len(c)) end, _ := date.WindowForYesterday() // end is the final day we count for; yesterday start := end // by default, this will trigger no lookups (start=end means no missing) if len(c) > 0 { start = date.Datestring2MidnightPdt(c[0].Datestring) } else { cdb.Debugf("GDC_003", "counts empty ! track down oldest every, to start iteration range") if complaint, err := cdb.GetOldestComplaintByEmailAddress(email); err != nil { cdb.Errorf("error looking up first complaint for %s: %v", email, err) return c, err } else if complaint != nil { // We move a day into the past; the algo below assumes we have data for the day 'start', // but in this case we don't; so trick it into generating data for today. start = date.AtLocalMidnight(complaint.Timestamp).AddDate(0, 0, -1) } else { // cdb.Infof(" - lookup first ever, but empty\n") } cdb.Debugf("GDC_004", "start point found") } // Right after the first complaint: it set start to "now", but end is still yesterday. if start.After(end) { return c, nil } // We add a minute, to ensure that the day that contains 'end' is included missing := date.IntermediateMidnights(start, end.Add(time.Minute)) if len(missing) > 0 { for _, m := range missing { cdb.Debugf("GDC_005", "looking up a single span") dayStart, dayEnd := date.WindowForTime(m) if comp, err := cdb.GetComplaintsInSpanByEmailAddress(email, dayStart, dayEnd); err != nil { return []DailyCount{}, err } else { c = append(c, DailyCount{date.Time2Datestring(dayStart), len(comp), 1, false, false}) } } sort.Sort(DailyCountDesc(c)) // Now push back into datastore+memcache if err := cdb.putDailyCountSingleton(k, c); err != nil { cdb.Errorf("error storing counts singleton item: %v", err) } } cdb.Debugf("GDC_006", "all done") return c, nil }
func generateMonthlyCSV(cdb complaintdb.ComplaintDB, month, year int) (string, int, error) { ctx := cdb.Ctx() bucketname := "serfr0-reports" now := date.NowInPdt() s := time.Date(int(year), time.Month(month), 1, 0, 0, 0, 0, now.Location()) e := s.AddDate(0, 1, 0).Add(-1 * time.Second) log.Infof(ctx, "Starting /be/month: %s", s) // One time, at 00:00, for each day of the given month days := date.IntermediateMidnights(s.Add(-1*time.Second), e) filename := s.Format("complaints-20060102") + e.Format("-20060102.csv") gcsName := "gs://" + bucketname + "/" + filename if exists, err := gcs.Exists(ctx, bucketname, filename); err != nil { return gcsName, 0, fmt.Errorf("gcs.Exists=%v for gs://%s/%s (err=%v)", exists, bucketname, filename, err) } else if exists { return gcsName, 0, nil } gcsHandle, err := gcs.OpenRW(ctx, bucketname, filename, "text/plain") if err != nil { return gcsName, 0, err } csvWriter := csv.NewWriter(gcsHandle.IOWriter()) cols := []string{ "CallerCode", "Name", "Address", "Zip", "Email", "HomeLat", "HomeLong", "UnixEpoch", "Date", "Time(PDT)", "Notes", "ActivityDisturbed", "Flightnumber", "Notes", // Column names above are incorrect, but BKSV are used to them. // //"CallerCode", "Name", "Address", "Zip", "Email", "HomeLat", "HomeLong", //"UnixEpoch", "Date", "Time(PDT)", "Notes", "Flightnumber", //"ActivityDisturbed", "CcSFO", } csvWriter.Write(cols) tStart := time.Now() n := 0 for _, dayStart := range days { dayEnd := dayStart.AddDate(0, 0, 1).Add(-1 * time.Second) log.Infof(ctx, " /be/month: %s - %s", dayStart, dayEnd) tIter := time.Now() iter := cdb.NewLongBatchingIter(cdb.QueryInSpan(dayStart, dayEnd)) for { c, err := iter.NextWithErr() if err != nil { return gcsName, 0, fmt.Errorf("iterator failed after %s (%s): %v", err, time.Since(tIter), time.Since(tStart)) } if c == nil { break } r := []string{ c.Profile.CallerCode, c.Profile.FullName, c.Profile.Address, c.Profile.StructuredAddress.Zip, c.Profile.EmailAddress, fmt.Sprintf("%.4f", c.Profile.Lat), fmt.Sprintf("%.4f", c.Profile.Long), fmt.Sprintf("%d", c.Timestamp.UTC().Unix()), c.Timestamp.Format("2006/01/02"), c.Timestamp.Format("15:04:05"), c.Description, c.AircraftOverhead.FlightNumber, c.Activity, fmt.Sprintf("%v", c.Profile.CcSfo), } if err := csvWriter.Write(r); err != nil { return gcsName, 0, err } n++ } } csvWriter.Flush() if err := gcsHandle.Close(); err != nil { return gcsName, 0, err } log.Infof(ctx, "monthly CSV successfully written to %s, %d rows", gcsName, n) return gcsName, n, nil }
func monthTaskHandler(w http.ResponseWriter, r *http.Request) { //ctx,_ := context.WithTimeout(appengine.NewContext(r), 599*time.Second) ctx := appengine.NewContext(r) cdb := complaintdb.ComplaintDB{ //C: oldappengine.NewContext(r), C: oldappengine.Timeout(oldappengine.NewContext(r), 599*time.Second), } year, err := strconv.ParseInt(r.FormValue("year"), 10, 64) if err != nil { http.Error(w, "need arg 'year' (2015)", http.StatusInternalServerError) return } month, err := strconv.ParseInt(r.FormValue("month"), 10, 64) if err != nil { http.Error(w, "need arg 'month' (1-12)", http.StatusInternalServerError) return } now := date.NowInPdt() s := time.Date(int(year), time.Month(month), 1, 0, 0, 0, 0, now.Location()) e := s.AddDate(0, 1, 0).Add(-1 * time.Second) log.Infof(ctx, "Starting /be/month: %s", s) // One time, at 00:00, for each day of the given month days := date.IntermediateMidnights(s.Add(-1*time.Second), e) filename := s.Format("complaints-20060102") + e.Format("-20060102.csv") gcsHandle, err := gcs.OpenRW(ctx, "serfr0-reports", filename, "text/plain") if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } csvWriter := csv.NewWriter(gcsHandle.IOWriter()) cols := []string{ "CallerCode", "Name", "Address", "Zip", "Email", "HomeLat", "HomeLong", "UnixEpoch", "Date", "Time(PDT)", "Notes", "ActivityDisturbed", "Flightnumber", "Notes", // Column names above are incorrect, but BKSV are used to them. // //"CallerCode", "Name", "Address", "Zip", "Email", "HomeLat", "HomeLong", //"UnixEpoch", "Date", "Time(PDT)", "Notes", "Flightnumber", //"ActivityDisturbed", "CcSFO", } csvWriter.Write(cols) for _, dayStart := range days { dayEnd := dayStart.AddDate(0, 0, 1).Add(-1 * time.Second) log.Infof(ctx, " /be/month: %s - %s", dayStart, dayEnd) iter := cdb.NewIter(cdb.QueryInSpan(dayStart, dayEnd)) for { c, err := iter.NextWithErr() if err != nil { http.Error(w, fmt.Sprintf("iterator failed: %v", err), http.StatusInternalServerError) return } if c == nil { break } r := []string{ c.Profile.CallerCode, c.Profile.FullName, c.Profile.Address, c.Profile.StructuredAddress.Zip, c.Profile.EmailAddress, fmt.Sprintf("%.4f", c.Profile.Lat), fmt.Sprintf("%.4f", c.Profile.Long), fmt.Sprintf("%d", c.Timestamp.UTC().Unix()), c.Timestamp.Format("2006/01/02"), c.Timestamp.Format("15:04:05"), c.Description, c.AircraftOverhead.FlightNumber, c.Activity, fmt.Sprintf("%v", c.Profile.CcSfo), } if err := csvWriter.Write(r); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } } } csvWriter.Flush() if err := gcsHandle.Close(); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } log.Infof(ctx, "GCS report '%s' successfully written", filename) w.Header().Set("Content-Type", "text/plain") w.Write([]byte(fmt.Sprintf("OK!\nGCS file '%s' written to bucket", filename))) }
// http://stackoverflow.com/questions/13264555/store-an-object-in-memcache-of-gae-in-go func (cdb *ComplaintDB) GetDailyCounts(email string) ([]DailyCount, error) { // cdb.C.Infof("-- GetDaily for %s", email) k := fmt.Sprintf("%s:daily", email) c := []DailyCount{} if _, err := memcache.Gob.Get(cdb.C, k, &c); err == memcache.ErrCacheMiss { // cache miss, but we don't care } else if err != nil { cdb.C.Errorf("error getting item: %v", err) return c, err } end, _ := date.WindowForYesterday() // end is the final day we count for; yesterday start := end // by default, this will trigger no lookups (start=end means no missing) if len(c) > 0 { start = date.Datestring2MidnightPdt(c[0].Datestring) } else { if complaint, err := cdb.GetOldestComplaintByEmailAddress(email); err != nil { cdb.C.Errorf("error looking up first complaint for %s: %v", email, err) return c, err } else if complaint != nil { // We move a day into the past; the algo below assumes we have data for the day 'start', // but in this case we don't; so trick it into generating data for today. start = date.AtLocalMidnight(complaint.Timestamp).AddDate(0, 0, -1) //cdb.C.Infof(" - lookup first ever, %s", complaint.Timestamp) } else { // cdb.C.Infof(" - lookup first ever, but empty\n") } } // Right after the first complaint: it set start to "now", but end is still yesterday. if start.After(end) { // cdb.C.Infof("--- s>e {%s} > {%s}\n", start, end) return c, nil } // We add a minute, to ensure that the day that contains 'end' is included missing := date.IntermediateMidnights(start, end.Add(time.Minute)) // cdb.C.Infof("--- missing? --- {%s} -> {%s} == %d\n", start, end.Add(time.Minute), len(missing)) if len(missing) > 0 { for _, m := range missing { dayStart, dayEnd := date.WindowForTime(m) if comp, err := cdb.GetComplaintsInSpanByEmailAddress(email, dayStart, dayEnd); err != nil { return []DailyCount{}, err } else { // cdb.C.Infof(" - {%s} n=%d [%v]\n", dayStart, len(comp), m) c = append(c, DailyCount{date.Time2Datestring(dayStart), len(comp), 1, false, false}) } } sort.Sort(DailyCountDesc(c)) // Now push back into memcache item := memcache.Item{Key: k, Object: c} if err := memcache.Gob.Set(cdb.C, &item); err != nil { cdb.C.Errorf("error setting item: %v", err) } } // cdb.C.Infof("--- done") return c, nil }