Example #1
0
// Attempt to test timeouts in deployed appengine; but proved elusive.
func slowHandler(w http.ResponseWriter, r *http.Request) {
	c := appengine.NewContext(r)
	d, _ := time.ParseDuration(r.FormValue("d"))

	tStart := time.Now()

	start, end := date.WindowForTime(tStart)
	end = end.Add(-1 * time.Second)
	str := ""

	for time.Since(tStart) < d {
		q := datastore.
			NewQuery(oldfgae.KFlightKind).
			Filter("EnterUTC >= ", start).
			Filter("EnterUTC < ", end).
			KeysOnly()
		keys, err := q.GetAll(c, nil)
		if err != nil {
			log.Errorf(c, "batch/day: GetAll: %v", err)
			http.Error(w, err.Error(), http.StatusInternalServerError)
			return
		}
		str += fmt.Sprintf("Found %d flight objects at %s\n", len(keys), time.Now())
		time.Sleep(2 * time.Second)
	}

	w.Header().Set("Content-Type", "text/plain")
	w.Write([]byte(fmt.Sprintf("OK, waited for %s !\n%s", r.FormValue("d"), str)))
}
Example #2
0
// This enqueues tasks for each individual day, or flight
func batchFlightScanHandler(w http.ResponseWriter, r *http.Request) {
	c := appengine.NewContext(r)

	tags := []string{} //"ADSB"} // Maybe make this configurable ...

	n := 0
	str := ""
	s, e, _ := widget.FormValueDateRange(r)
	job := r.FormValue("job")
	if job == "" {
		http.Error(w, "Missing argument: &job=foo", http.StatusInternalServerError)
	}

	days := date.IntermediateMidnights(s.Add(-1*time.Second), e) // decrement start, to include it
	for _, day := range days {
		// Get the keys for all the flights on this day.
		fdb := oldfgae.FlightDB{C: oldappengine.NewContext(r)}

		dStart, dEnd := date.WindowForTime(day)
		dEnd = dEnd.Add(-1 * time.Second)
		keys, err := fdb.KeysInTimeRangeByTags(tags, dStart, dEnd)
		if err != nil {
			log.Errorf(c, "upgradeHandler: enqueue: %v", err)
			http.Error(w, err.Error(), http.StatusInternalServerError)
			return
		}

		singleFlightUrl := "/backend/fdb-batch/flight"
		for _, key := range keys {
			str += fmt.Sprintf("Enqueing day=%s: %s?job=%s&key=%s\n",
				day.Format("2006.01.02"), singleFlightUrl, job, key.Encode())

			t := taskqueue.NewPOSTTask(singleFlightUrl, map[string][]string{
				"date": {day.Format("2006.01.02")},
				"key":  {key.Encode()},
				"job":  {job},
			})

			if _, err := taskqueue.Add(c, t, "batch"); err != nil {
				log.Errorf(c, "upgradeHandler: enqueue: %v", err)
				http.Error(w, err.Error(), http.StatusInternalServerError)
				return
			}

			n++
		}
	}

	log.Infof(c, "enqueued %d batch items for '%s'", n, job)

	w.Header().Set("Content-Type", "text/plain")
	w.Write([]byte(fmt.Sprintf("OK, batch, enqueued %d tasks for %s\n%s", n, job, str)))
}
Example #3
0
// Dequeue a single day, and enqueue a job for each flight on that day
func batchFlightDayHandler(w http.ResponseWriter, r *http.Request) {
	ctx := req2ctx(r)

	tags := []string{} //"ADSB"} // Maybe make this configurable ...

	n := 0
	str := ""
	job := r.FormValue("job")
	if job == "" {
		http.Error(w, "Missing argument: &job=foo", http.StatusInternalServerError)
	}

	day := date.ArbitraryDatestring2MidnightPdt(r.FormValue("day"), "2006/01/02")

	fdb := oldfgae.NewDB(r)

	dStart, dEnd := date.WindowForTime(day)
	dEnd = dEnd.Add(-1 * time.Second)
	keys, err := fdb.KeysInTimeRangeByTags(tags, dStart, dEnd)
	if err != nil {
		log.Errorf(ctx, "upgradeHandler: enqueue: %v", err)
		http.Error(w, err.Error(), http.StatusInternalServerError)
		return
	}

	singleFlightUrl := "/backend/fdb-batch/flight"
	for _, key := range keys {
		str += fmt.Sprintf("Enqueing day=%s: %s?job=%s&key=%s\n",
			day.Format("2006.01.02"), singleFlightUrl, job, key.Encode())

		if r.FormValue("dryrun") == "" {
			t := taskqueue.NewPOSTTask(singleFlightUrl, map[string][]string{
				// "date": {day.Format("2006.01.02")},
				"key": {key.Encode()},
				"job": {job},
			})

			if _, err := taskqueue.Add(ctx, t, "batch"); err != nil {
				log.Errorf(ctx, "upgradeHandler: enqueue: %v", err)
				http.Error(w, err.Error(), http.StatusInternalServerError)
				return
			}
		}

		n++
	}

	log.Infof(ctx, "enqueued %d batch items for '%s'", n, job)

	w.Header().Set("Content-Type", "text/plain")
	w.Write([]byte(fmt.Sprintf("OK, batch, enqueued %d tasks for %s\n%s", n, job, str)))
}
Example #4
0
func (cdb ComplaintDB) ResetGlobalStats() {
	if err := cdb.DeletAllGlobalStats(); err != nil {
		cdb.Errorf("Reset/DeleteAll fail, %v", err)
		return
	}

	profiles, err := cdb.GetAllProfiles()
	if err != nil {
		return
	}

	// Upon reset (writing a fresh new singleton), we need to generate a key
	rootKey := datastore.NewKey(cdb.Ctx(), kGlobalStatsKind, "foo", 0, nil)
	key := datastore.NewIncompleteKey(cdb.Ctx(), kGlobalStatsKind, rootKey)
	gs := GlobalStats{
		DatastoreKey: key.Encode(),
	}

	// This is too slow to recalculate this way; it runs into the 10m timeout
	//start := date.ArbitraryDatestring2MidnightPdt("2015/08/09", "2006/01/02").Add(-1 * time.Second)
	start := date.ArbitraryDatestring2MidnightPdt("2016/03/15", "2006/01/02").Add(-1 * time.Second)
	end, _ := date.WindowForYesterday() // end is the final day we count for; yesterday

	midnights := date.IntermediateMidnights(start, end.Add(time.Minute))
	for _, m := range midnights {
		dayStart, dayEnd := date.WindowForTime(m)

		dc := DailyCount{Datestring: date.Time2Datestring(dayStart)}

		for _, p := range profiles {
			if keys, err := cdb.GetComplaintKeysInSpanByEmailAddress(dayStart, dayEnd, p.EmailAddress); err != nil {
				cdb.Errorf("Reset/Lookup fail, %v", err)
			} else if len(keys) > 0 {
				dc.NumComplaints += len(keys)
				dc.NumComplainers += 1
			}
		}
		gs.Counts = append(gs.Counts, dc)
	}

	if err := cdb.SaveGlobalStats(gs); err != nil {
		cdb.Errorf("Reset/Save fail, %v", err)
	}
}
Example #5
0
func (cdb ComplaintDB) ResetGlobalStats() {
	if err := cdb.DeletAllGlobalStats(); err != nil {
		cdb.C.Errorf("Reset/DeleteAll fail, %v", err)
		return
	}

	profiles, err := cdb.GetAllProfiles()
	if err != nil {
		return
	}

	// Upon reset (writing a fresh new singleton), we need to generate a key
	rootKey := datastore.NewKey(cdb.C, kGlobalStatsKind, "foo", 0, nil)
	key := datastore.NewIncompleteKey(cdb.C, kGlobalStatsKind, rootKey)
	gs := GlobalStats{
		DatastoreKey: key.Encode(),
	}

	end, _ := date.WindowForYesterday() // end is the final day we count for; yesterday
	start := end.AddDate(0, 0, -100)
	midnights := date.IntermediateMidnights(start, end.Add(time.Minute))
	for _, m := range midnights {
		dayStart, dayEnd := date.WindowForTime(m)

		dc := DailyCount{Datestring: date.Time2Datestring(dayStart)}

		for _, p := range profiles {
			if comp, err := cdb.GetComplaintsInSpanByEmailAddress(p.EmailAddress, dayStart, dayEnd); err != nil {
				cdb.C.Errorf("Reset/Lookup fail, %v", err)
			} else if len(comp) > 0 {
				dc.NumComplaints += len(comp)
				dc.NumComplainers += 1
			}
		}
		gs.Counts = append(gs.Counts, dc)
	}

	if err := cdb.SaveGlobalStats(gs); err != nil {
		cdb.C.Errorf("Reset/Save fail, %v", err)
	}
	cdb.C.Infof("-- reset !--")
	//cdb.LoadGlobalStats();
}
Example #6
0
func (cdb *ComplaintDB) GetDailyCounts(email string) ([]DailyCount, error) {
	k := fmt.Sprintf("%s:dailycounts", email) // The 'counts' is so we can have diff memcache objects
	c := []DailyCount{}

	cdb.Debugf("GDC_001", "GetDailyCounts() starting")

	// 	might return: datastore.ErrNoSuchEntity
	if dcs, err := cdb.fetchDailyCountSingleton(k); err == datastore.ErrNoSuchEntity {
		// Singleton not found; we don't care; treat same as empty singleton.
	} else if err != nil {
		cdb.Errorf("error getting item: %v", err)
		return c, err
	} else {
		c = dcs
	}
	cdb.Debugf("GDC_002", "singleton lookup done (%d entries)", len(c))

	end, _ := date.WindowForYesterday() // end is the final day we count for; yesterday
	start := end                        // by default, this will trigger no lookups (start=end means no missing)

	if len(c) > 0 {
		start = date.Datestring2MidnightPdt(c[0].Datestring)
	} else {
		cdb.Debugf("GDC_003", "counts empty ! track down oldest every, to start iteration range")
		if complaint, err := cdb.GetOldestComplaintByEmailAddress(email); err != nil {
			cdb.Errorf("error looking up first complaint for %s: %v", email, err)
			return c, err
		} else if complaint != nil {
			// We move a day into the past; the algo below assumes we have data for the day 'start',
			// but in this case we don't; so trick it into generating data for today.
			start = date.AtLocalMidnight(complaint.Timestamp).AddDate(0, 0, -1)
		} else {
			// cdb.Infof("  - lookup first ever, but empty\n")
		}
		cdb.Debugf("GDC_004", "start point found")
	}

	// Right after the first complaint: it set start to "now", but end is still yesterday.
	if start.After(end) {
		return c, nil
	}

	// We add a minute, to ensure that the day that contains 'end' is included
	missing := date.IntermediateMidnights(start, end.Add(time.Minute))
	if len(missing) > 0 {
		for _, m := range missing {
			cdb.Debugf("GDC_005", "looking up a single span")
			dayStart, dayEnd := date.WindowForTime(m)
			if comp, err := cdb.GetComplaintsInSpanByEmailAddress(email, dayStart, dayEnd); err != nil {
				return []DailyCount{}, err
			} else {
				c = append(c, DailyCount{date.Time2Datestring(dayStart), len(comp), 1, false, false})
			}
		}
		sort.Sort(DailyCountDesc(c))

		// Now push back into datastore+memcache
		if err := cdb.putDailyCountSingleton(k, c); err != nil {
			cdb.Errorf("error storing counts singleton item: %v", err)
		}
	}
	cdb.Debugf("GDC_006", "all done")

	return c, nil
}
Example #7
0
// http://stackoverflow.com/questions/13264555/store-an-object-in-memcache-of-gae-in-go
func (cdb *ComplaintDB) GetDailyCounts(email string) ([]DailyCount, error) {
	// cdb.C.Infof("-- GetDaily for %s", email)

	k := fmt.Sprintf("%s:daily", email)
	c := []DailyCount{}

	if _, err := memcache.Gob.Get(cdb.C, k, &c); err == memcache.ErrCacheMiss {
		// cache miss, but we don't care
	} else if err != nil {
		cdb.C.Errorf("error getting item: %v", err)
		return c, err
	}

	end, _ := date.WindowForYesterday() // end is the final day we count for; yesterday
	start := end                        // by default, this will trigger no lookups (start=end means no missing)

	if len(c) > 0 {
		start = date.Datestring2MidnightPdt(c[0].Datestring)
	} else {
		if complaint, err := cdb.GetOldestComplaintByEmailAddress(email); err != nil {
			cdb.C.Errorf("error looking up first complaint for %s: %v", email, err)
			return c, err
		} else if complaint != nil {
			// We move a day into the past; the algo below assumes we have data for the day 'start',
			// but in this case we don't; so trick it into generating data for today.
			start = date.AtLocalMidnight(complaint.Timestamp).AddDate(0, 0, -1)
			//cdb.C.Infof("  - lookup first ever, %s", complaint.Timestamp)
		} else {
			// cdb.C.Infof("  - lookup first ever, but empty\n")
		}
	}

	// Right after the first complaint: it set start to "now", but end is still yesterday.
	if start.After(end) {
		// cdb.C.Infof("--- s>e {%s} > {%s}\n", start, end)
		return c, nil
	}

	// We add a minute, to ensure that the day that contains 'end' is included
	missing := date.IntermediateMidnights(start, end.Add(time.Minute))
	// cdb.C.Infof("--- missing? --- {%s} -> {%s} == %d\n", start, end.Add(time.Minute), len(missing))
	if len(missing) > 0 {
		for _, m := range missing {
			dayStart, dayEnd := date.WindowForTime(m)
			if comp, err := cdb.GetComplaintsInSpanByEmailAddress(email, dayStart, dayEnd); err != nil {
				return []DailyCount{}, err
			} else {
				// cdb.C.Infof("  -  {%s}  n=%d [%v]\n", dayStart, len(comp), m)
				c = append(c, DailyCount{date.Time2Datestring(dayStart), len(comp), 1, false, false})
			}
		}
		sort.Sort(DailyCountDesc(c))

		// Now push back into memcache
		item := memcache.Item{Key: k, Object: c}
		if err := memcache.Gob.Set(cdb.C, &item); err != nil {
			cdb.C.Errorf("error setting item: %v", err)
		}
	}

	// cdb.C.Infof("--- done")
	return c, nil
}