Ejemplo n.º 1
0
func UpdateFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) {
	gn := goon.FromContext(c)
	q := datastore.NewQuery(gn.Key(&Feed{}).Kind()).KeysOnly()
	q = q.Filter("n <=", time.Now())

	q = q.Limit(1000)
	it := gn.Run(q)
	var keys []*datastore.Key
	var del []*datastore.Key
	for {
		k, err := it.Next(nil)
		if err == datastore.Done {
			break
		} else if err != nil {
			c.Errorf("next error: %v", err.Error())
			break
		} else if len(k.StringID()) == 0 {
			del = append(del, k)
			continue
		}
		keys = append(keys, k)
	}
	tasks := make([]*taskqueue.Task, len(keys))
	for i, k := range keys {
		tasks[i] = taskqueue.NewPOSTTask(routeUrl("update-feed"), url.Values{
			"feed": {k.StringID()},
		})
	}
	var ts []*taskqueue.Task
	const taskLimit = 100
	for len(tasks) > 0 {
		if len(tasks) > taskLimit {
			ts = tasks[:taskLimit]
			tasks = tasks[taskLimit:]
		} else {
			ts = tasks
			tasks = tasks[0:0]
		}
		if _, err := taskqueue.AddMulti(c, ts, "update-feed"); err != nil {
			c.Errorf("taskqueue error: %v", err.Error())
		}
	}
	c.Infof("updating %d feeds", len(keys))

	if len(del) > 0 {
		c.Errorf("attempt to delete %v feeds", len(del))
		if err := gn.DeleteMulti(del); err != nil {
			c.Errorf("delete error: %v", err.Error())
		}
		fmt.Fprintf(w, `<html><head><meta http-equiv="refresh" content="0"></head></html>`)
		fmt.Fprintf(w, "attempt to delete %v feeds", len(del))
		for _, k := range del {
			fmt.Fprintf(w, "\n<br>%v", k)
		}
	}
	fmt.Fprintf(w, "updating %d feeds", len(keys))
}
Ejemplo n.º 2
0
func taskHandler(w http.ResponseWriter, r *http.Request) {
	max := 12
	var tasks []*taskqueue.Task
	for i := 0; i < max; i++ {
		tasks = append(tasks, taskqueue.NewPOSTTask("/", nil))
	}
	c := appengine.NewContext(r)
	taskqueue.AddMulti(c, tasks, "read")
}
Ejemplo n.º 3
0
func TestTasks(t *testing.T) {

	c, err := NewContext(&Options{TaskQueues: []string{"testQueue"}})
	if err != nil {
		t.Fatalf("NewContext: %v", err)
	}
	defer c.Close()

	task := taskqueue.NewPOSTTask("/post", map[string][]string{})
	_, err = taskqueue.Add(c, task, "testQueue")
	if err != nil {
		t.Fatalf("Could not add task to queue")
	}
	stats, err := taskqueue.QueueStats(c, []string{"testQueue"}, 0) // fetch all of them
	if err != nil {
		t.Fatalf("Could not get taskqueue statistics")
	}
	t.Logf("TaskStatistics = %#v", stats)
	if len(stats) == 0 {
		t.Fatalf("Queue statistics are empty")
	} else if stats[0].Tasks != 1 {
		t.Fatalf("Could not find the task we just added")
	}

	err = taskqueue.Purge(c, "testQueue")
	if err != nil {
		t.Fatalf("Could not purge the queue")
	}
	stats, err = taskqueue.QueueStats(c, []string{"testQueue"}, 0) // fetch all of them
	if len(stats) == 0 {
		t.Fatalf("Queue statistics are empty")
	}
	if stats[0].Tasks != 0 {
		t.Fatalf("Purge command not successful")
	}

	tasks := []*taskqueue.Task{
		taskqueue.NewPOSTTask("/post1", map[string][]string{}),
		taskqueue.NewPOSTTask("/post2", map[string][]string{}),
	}
	_, err = taskqueue.AddMulti(c, tasks, "testQueue")
	if err != nil {
		t.Fatalf("Could not add bulk tasklist to queue")
	}
	stats, err = taskqueue.QueueStats(c, []string{"testQueue"}, 0) // fetch all of them
	if err != nil {
		t.Fatalf("Could not get taskqueue statistics")
	}
	if len(stats) == 0 {
		t.Fatalf("Could not find the tasks we just added")
	} else if stats[0].Tasks != 2 {
		t.Fatalf("Could not find the tasks we just added")
	}

}
Ejemplo n.º 4
0
// addTasks adds the provided tasks in batches of 100 or less.
// This is to sidestep a limitation in the taskqueue API.
func addTasks(c appengine.Context, tasks []*taskqueue.Task, queue string) error {
	n := 100
	for len(tasks) > 0 {
		if len(tasks) < n {
			n = len(tasks)
		}
		_, err := taskqueue.AddMulti(c, tasks[:n], queue)
		if err != nil {
			return err
		}
		tasks = tasks[n:]
	}
	return nil
}
Ejemplo n.º 5
0
func submitBulkUpdateUnique(c appengine.Context, w http.ResponseWriter, r *http.Request) {
	r.ParseForm()
	times := 1
	tstr := r.FormValue("times")
	if tstr != "" {
		var err error
		times, err = strconv.Atoi(tstr)
		if err != nil {
			times = 1
		}
	}

	header := http.Header{
		"Content-Type": []string{"application/x-www-form-urlencoded"},
	}

	tasks := []*taskqueue.Task{}
	for ; times > 0; times-- {
		tasks = append(tasks, &taskqueue.Task{
			Path:    "/batch/bulkUpdateUnique",
			Payload: []byte(r.Form.Encode()),
			Header:  header,
		})
		if len(tasks) >= 100 {
			_, err := taskqueue.AddMulti(c, tasks, "bulkupdateunique")
			c.Infof("Submitted %v tasks: %v", len(tasks), err)
			tasks = nil
		}
	}
	if len(tasks) >= 0 {
		_, err := taskqueue.AddMulti(c, tasks, "bulkupdateunique")
		c.Infof("Submitted %v tasks (final): %v", len(tasks), err)
	}

	w.WriteHeader(202)
}
Ejemplo n.º 6
0
func UpdateFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) {
	q := datastore.NewQuery("F").KeysOnly()
	q = q.Filter("n <=", time.Now())
	q = q.Limit(3000)
	var keys []*datastore.Key
	it := q.Run(c)
	retry := 0
	for {
		k, err := it.Next(nil)
		if err != nil {
			c.Errorf("next error: %v, retry: %v", err.Error(), retry)
			if retry == 5 {
				break
			}
			retry++
			it = q.Run(c)
		} else {
			keys = append(keys, k)
		}
	}
	if len(keys) == 0 {
		c.Errorf("giving up")
		return
	}

	tasks := make([]*taskqueue.Task, len(keys))
	for i, k := range keys {
		c.Infof("task: %v, %v", i, k)
		tasks[i] = taskqueue.NewPOSTTask(routeUrl("update-feed"), url.Values{
			"feed": {k.StringID()},
		})
	}
	var ts []*taskqueue.Task
	const taskLimit = 100
	for len(tasks) > 0 {
		if len(tasks) > taskLimit {
			ts = tasks[:taskLimit]
			tasks = tasks[taskLimit:]
		} else {
			ts = tasks
			tasks = tasks[0:0]
		}
		if _, err := taskqueue.AddMulti(c, ts, "update-feed"); err != nil {
			c.Errorf("taskqueue error: %v", err.Error())
		}
	}
	c.Infof("updating %d feeds", len(keys))
}
Ejemplo n.º 7
0
func UpdateFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) {
	q := datastore.NewQuery("F").KeysOnly().Filter("n <=", time.Now())
	q = q.Limit(10 * 60 * 20) // 10/s queue, 20 min cron
	var keys []*datastore.Key
	it := q.Run(appengine.Timeout(c, time.Second*60))
	for {
		k, err := it.Next(nil)
		if err == datastore.Done {
			break
		} else if err != nil {
			c.Errorf("next error: %v", err.Error())
			break
		}
		keys = append(keys, k)
	}

	if len(keys) == 0 {
		c.Errorf("no results")
		return
	}
	c.Infof("updating %d feeds", len(keys))

	var tasks []*taskqueue.Task
	for _, k := range keys {
		tasks = append(tasks, taskqueue.NewPOSTTask(routeUrl("update-feed"), url.Values{
			"feed": {k.StringID()},
		}))
	}
	var ts []*taskqueue.Task
	const taskLimit = 100
	for len(tasks) > 0 {
		if len(tasks) > taskLimit {
			ts = tasks[:taskLimit]
			tasks = tasks[taskLimit:]
		} else {
			ts = tasks
			tasks = tasks[0:0]
		}
		if _, err := taskqueue.AddMulti(c, ts, "update-feed"); err != nil {
			c.Errorf("taskqueue error: %v", err.Error())
		}
	}
}
Ejemplo n.º 8
0
func taskSender(c mpg.Context, queue string, tc chan *taskqueue.Task, done chan bool) {
	const taskLimit = 100
	tasks := make([]*taskqueue.Task, 0, taskLimit)
	send := func() {
		taskqueue.AddMulti(c, tasks, queue)
		c.Infof("added %v tasks", len(tasks))
		tasks = tasks[0:0]
	}
	for t := range tc {
		tasks = append(tasks, t)
		if len(tasks) == taskLimit {
			send()
		}
	}
	if len(tasks) > 0 {
		send()
	}
	done <- true
}
Ejemplo n.º 9
0
func UpdateFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) {
	gn := goon.FromContext(c)
	q := datastore.NewQuery(gn.Key(&Feed{}).Kind()).KeysOnly()
	q = q.Filter("n <=", time.Now())

	q = q.Limit(2500)
	it := gn.Run(q)
	var keys []*datastore.Key
	for {
		k, err := it.Next(nil)
		if err == datastore.Done {
			break
		} else if err != nil {
			c.Errorf("next error: %v", err.Error())
			break
		}
		keys = append(keys, k)
	}
	tasks := make([]*taskqueue.Task, len(keys))
	for i, k := range keys {
		tasks[i] = taskqueue.NewPOSTTask(routeUrl("update-feed"), url.Values{
			"feed": {k.StringID()},
		})
	}
	var ts []*taskqueue.Task
	const taskLimit = 100
	for len(tasks) > 0 {
		if len(tasks) > taskLimit {
			ts = tasks[:taskLimit]
			tasks = tasks[taskLimit:]
		} else {
			ts = tasks
			tasks = tasks[0:0]
		}
		if _, err := taskqueue.AddMulti(c, ts, "update-feed"); err != nil {
			c.Errorf("taskqueue error: %v", err.Error())
		}
	}
	c.Infof("updating %d feeds", len(keys))
	fmt.Fprintf(w, "updating %d feeds", len(keys))
}
Ejemplo n.º 10
0
func UpdateFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) {
	q := datastore.NewQuery("F").KeysOnly()
	q = q.Filter("n <=", time.Now())
	q = q.Limit(3000)
	var keys []*datastore.Key
	for i := 0; i < 5; i++ {
		if _keys, err := q.GetAll(c, nil); err != nil {
			c.Errorf("get all error: %v, retry %v", err.Error(), i)
		} else {
			c.Errorf("got %v keys", len(_keys))
			keys = _keys
			break
		}
	}
	if len(keys) == 0 {
		c.Errorf("giving up")
		return
	}

	tasks := make([]*taskqueue.Task, len(keys))
	for i, k := range keys {
		tasks[i] = taskqueue.NewPOSTTask(routeUrl("update-feed"), url.Values{
			"feed": {k.StringID()},
		})
	}
	var ts []*taskqueue.Task
	const taskLimit = 100
	for len(tasks) > 0 {
		if len(tasks) > taskLimit {
			ts = tasks[:taskLimit]
			tasks = tasks[taskLimit:]
		} else {
			ts = tasks
			tasks = tasks[0:0]
		}
		if _, err := taskqueue.AddMulti(c, ts, "update-feed"); err != nil {
			c.Errorf("taskqueue error: %v", err.Error())
		}
	}
	c.Infof("updating %d feeds", len(keys))
}
Ejemplo n.º 11
0
func RSVPMeetupEvents(events []MeetupEvent, r *http.Request) string {
	c := appengine.NewContext(r)
	queuetasks := make([]*taskqueue.Task, 0)

	taskqueue.Purge(c, "futurersvp")

	for _, event := range events {
		t := ProcessEvent(event, r)
		if t != nil {
			queuetasks = append(queuetasks, t)
		}
	}

	taskqueue.AddMulti(c, queuetasks, "futurersvp")

	str := fmt.Sprintf("Total Number of Meetups processed: %d\n", len(events))
	str = str + fmt.Sprintf("Total Number of future Meetups queued: %d\n", len(queuetasks))
	log.Print(str)

	return str
}
Ejemplo n.º 12
0
func load(w http.ResponseWriter, r *http.Request, c appengine.Context, output *crBuffer) int {
	// Determine how many tasks to queue
	count := 10
	countStr := r.URL.Query().Get("count")
	if countStr != "" {
		parsedCount, err := strconv.ParseInt(countStr, 10, 64)
		if err != nil {
			output.WriteLine("Could not parse count: %s", countStr)
			return http.StatusBadRequest
		}
		if parsedCount <= 0 {
			output.WriteLine("Invalid count specified: %d", parsedCount)
			return http.StatusBadRequest
		}
		count = int(parsedCount)
	}

	delayStr := r.URL.Query().Get("delay")
	if delayStr == "" {
		delayStr = "10"
	}

	tasks := make([]*taskqueue.Task, count)
	for i := 0; i < count; i++ {
		tasks[i] = taskqueue.NewPOSTTask(
			"/work",
			url.Values{
				"delay": []string{delayStr},
			})
	}
	_, err := taskqueue.AddMulti(c, tasks, "auto-worker-push")
	if err != nil {
		c.Errorf("Error loading tasks: %s", err)
		output.WriteLine("Error loading tasks")
		return http.StatusInternalServerError
	}

	output.WriteLine("Loaded %d tasks into queue with delay %s", count, delayStr)
	return http.StatusOK
}
Ejemplo n.º 13
0
func DeleteOldFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) {
	ctx := appengine.Timeout(c, time.Minute)
	gn := goon.FromContext(c)
	q := datastore.NewQuery(gn.Kind(&Feed{})).Filter("n=", timeMax).KeysOnly()
	if cur, err := datastore.DecodeCursor(r.FormValue("c")); err == nil {
		q = q.Start(cur)
	}
	it := q.Run(ctx)
	done := false
	var tasks []*taskqueue.Task
	for i := 0; i < 10000 && len(tasks) < 100; i++ {
		k, err := it.Next(nil)
		if err == datastore.Done {
			c.Criticalf("done")
			done = true
			break
		} else if err != nil {
			c.Errorf("err: %v", err)
			continue
		}
		values := make(url.Values)
		values.Add("f", k.StringID())
		tasks = append(tasks, taskqueue.NewPOSTTask("/tasks/delete-old-feed", values))
	}
	if len(tasks) > 0 {
		c.Errorf("deleting %v feeds", len(tasks))
		if _, err := taskqueue.AddMulti(c, tasks, ""); err != nil {
			c.Errorf("err: %v", err)
		}
	}
	if !done {
		if cur, err := it.Cursor(); err == nil {
			values := make(url.Values)
			values.Add("c", cur.String())
			taskqueue.Add(c, taskqueue.NewPOSTTask("/tasks/delete-old-feeds", values), "")
		} else {
			c.Errorf("err: %v", err)
		}
	}
}
Ejemplo n.º 14
0
func batchPrepareUnique(c appengine.Context, w http.ResponseWriter, r *http.Request) {
	keyStr := []string{}
	z, err := gzip.NewReader(r.Body)
	maybePanic(err)
	d := json.NewDecoder(z)
	maybePanic(d.Decode(&keyStr))
	c.Infof("Got %v keys to pull", len(keyStr))

	keys := []*datastore.Key{}
	obs := make([]oldLoader, len(keyStr))
	for i, k := range keyStr {
		key, err := datastore.DecodeKey(k)
		if err != nil {
			c.Errorf("Error decoding key: %v: %v", k, err)
			http.Error(w, err.Error(), 500)
			return
		}
		keys = append(keys, key)

		obs[i] = oldLoader{c: c, into: &Stats{}}
	}

	err = datastore.GetMulti(c, keys, obs)
	if err != nil {
		c.Errorf("Error grabbing the things: %v", err)
		http.Error(w, err.Error(), 500)
		return
	}

	tasks := []*taskqueue.Task{}
	for i, ol := range obs {
		s := ol.into.(*Stats)
		s.uncompress()
		uuid, err := getUUID(*s)
		if err != nil {
			c.Errorf("Couldn't find UUID from %v: %v", keyStr[i], err)
		}
		if uuid != "" {
			tasks = append(tasks, &taskqueue.Task{
				Method:  "PULL",
				Tag:     uuid,
				Payload: []byte(keyStr[i]),
			})
		}
	}

	_, err = taskqueue.AddMulti(c, tasks, "updateUniquePull")
	c.Infof("Response for adding %v pull tasks: %v", len(tasks), err)
	if err != nil {
		c.Errorf("Error queueing tasks:  %v", err)
		http.Error(w, err.Error(), 500)
		return
	}
	c.Infof("Queued %v tasks", len(tasks))

	// Try to submit a bulk updater.  This can cause a chain
	// reaction.  It'll be awesome
	taskqueue.Add(c, taskqueue.NewPOSTTask("/batch/bulkUpdateUnique", url.Values{}),
		"bulkupdateunique")

	w.WriteHeader(204)
}
Ejemplo n.º 15
0
func CFixer(c mpg.Context, w http.ResponseWriter, r *http.Request) {
	q := datastore.NewQuery("F").KeysOnly()
	q = q.Limit(1000)
	cs := r.FormValue("c")
	if len(cs) > 0 {
		if cur, err := datastore.DecodeCursor(cs); err == nil {
			q = q.Start(cur)
			c.Infof("starting at %v", cur)
		} else {
			c.Errorf("cursor error %v", err.Error())
		}
	}
	var keys []*datastore.Key
	it := q.Run(Timeout(c, time.Second*15))
	for {
		k, err := it.Next(nil)
		if err == datastore.Done {
			break
		} else if err != nil {
			c.Errorf("next error: %v", err.Error())
			break
		}
		keys = append(keys, k)
	}

	if len(keys) == 0 {
		c.Errorf("no results")
		return
	} else {
		cur, err := it.Cursor()
		if err != nil {
			c.Errorf("to cur error %v", err.Error())
		} else {
			c.Infof("add with cur %v", cur)
			t := taskqueue.NewPOSTTask("/tasks/cfixer", url.Values{
				"c": {cur.String()},
			})
			taskqueue.Add(c, t, "cfixer")
		}
	}
	c.Infof("fixing %d feeds", len(keys))

	var tasks []*taskqueue.Task
	for _, k := range keys {
		c.Infof("f: %v", k.StringID())
		tasks = append(tasks, taskqueue.NewPOSTTask("/tasks/cfix", url.Values{
			"feed": {k.StringID()},
		}))
	}
	var ts []*taskqueue.Task
	const taskLimit = 100
	for len(tasks) > 0 {
		if len(tasks) > taskLimit {
			ts = tasks[:taskLimit]
			tasks = tasks[taskLimit:]
		} else {
			ts = tasks
			tasks = tasks[0:0]
		}
		if _, err := taskqueue.AddMulti(c, ts, "cfixer"); err != nil {
			c.Errorf("taskqueue error: %v", err.Error())
		}
	}
}
Ejemplo n.º 16
0
func UpdateFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) {
	q := datastore.NewQuery("F").KeysOnly().Filter("n <=", time.Now())
	q = q.Limit(100)
	cs := r.FormValue("c")
	hasCursor := false
	if len(cs) > 0 {
		if cur, err := datastore.DecodeCursor(cs); err == nil {
			q = q.Start(cur)
			hasCursor = true
			c.Infof("starting at %v", cur)
		} else {
			c.Errorf("cursor error %v", err.Error())
		}
	}
	if !hasCursor {
		qs, err := taskqueue.QueueStats(c, []string{"update-feed"}, 0)
		if err != nil || !qs[0].OldestETA.IsZero() {
			c.Errorf("already %v (%v) tasks", qs[0].Tasks, qs[0].Executed1Minute)
			return
		}
	}
	var keys []*datastore.Key
	it := q.Run(Timeout(c, time.Second*60))
	for {
		k, err := it.Next(nil)
		if err == datastore.Done {
			break
		} else if err != nil {
			c.Errorf("next error: %v", err.Error())
			break
		}
		keys = append(keys, k)
	}

	if len(keys) == 0 {
		c.Errorf("no results")
		return
	} else {
		cur, err := it.Cursor()
		if err != nil {
			c.Errorf("to cur error %v", err.Error())
		} else {
			c.Infof("add with cur %v", cur)
			t := taskqueue.NewPOSTTask(routeUrl("update-feeds"), url.Values{
				"c": {cur.String()},
			})
			taskqueue.Add(c, t, "update-feed")
		}
	}
	c.Infof("updating %d feeds", len(keys))

	var tasks []*taskqueue.Task
	for _, k := range keys {
		tasks = append(tasks, taskqueue.NewPOSTTask(routeUrl("update-feed"), url.Values{
			"feed": {k.StringID()},
		}))
	}
	var ts []*taskqueue.Task
	const taskLimit = 100
	for len(tasks) > 0 {
		if len(tasks) > taskLimit {
			ts = tasks[:taskLimit]
			tasks = tasks[taskLimit:]
		} else {
			ts = tasks
			tasks = tasks[0:0]
		}
		if _, err := taskqueue.AddMulti(c, ts, "update-feed"); err != nil {
			c.Errorf("taskqueue error: %v", err.Error())
		}
	}
}
Ejemplo n.º 17
0
func ListFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) {
	cu := user.Current(c)
	gn := goon.FromContext(c)
	u := &User{Id: cu.ID}
	ud := &UserData{Id: "data", Parent: gn.Key(u)}
	gn.GetMulti([]interface{}{u, ud})
	putU := false
	putUD := false
	fixRead := false
	if time.Since(u.Read) > oldDuration {
		u.Read = time.Now().Add(-oldDuration)
		putU = true
		fixRead = true
	}

	read := make(Read)
	var uf Opml
	c.Step("unmarshal user data", func() {
		gob.NewDecoder(bytes.NewReader(ud.Read)).Decode(&read)
		json.Unmarshal(ud.Opml, &uf)
	})
	var feeds []*Feed
	opmlMap := make(map[string]*OpmlOutline)
	var merr error
	c.Step("fetch feeds", func() {
		for _, outline := range uf.Outline {
			if outline.XmlUrl == "" {
				for _, so := range outline.Outline {
					feeds = append(feeds, &Feed{Url: so.XmlUrl})
					opmlMap[so.XmlUrl] = so
				}
			} else {
				feeds = append(feeds, &Feed{Url: outline.XmlUrl})
				opmlMap[outline.XmlUrl] = outline
			}
		}
		merr = gn.GetMulti(feeds)
	})
	lock := sync.Mutex{}
	fl := make(map[string][]*Story)
	q := datastore.NewQuery(gn.Key(&Story{}).Kind())
	hasStories := false
	updatedLinks := false
	icons := make(map[string]string)
	noads := make(map[string]bool)
	now := time.Now()
	numStories := 0

	c.Step("feed fetch + wait", func() {
		queue := make(chan *Feed)
		tc := make(chan *taskqueue.Task)
		wg := sync.WaitGroup{}
		feedProc := func() {
			for f := range queue {
				defer wg.Done()
				var stories []*Story

				if u.Read.Before(f.Date) {
					fk := gn.Key(f)
					sq := q.Ancestor(fk).Filter(IDX_COL+" >", u.Read).KeysOnly().Order("-" + IDX_COL)
					keys, _ := gn.GetAll(sq, nil)
					stories = make([]*Story, len(keys))
					for j, key := range keys {
						stories[j] = &Story{
							Id:     key.StringID(),
							Parent: fk,
						}
					}
					gn.GetMulti(stories)
				}
				if f.Link != opmlMap[f.Url].HtmlUrl {
					updatedLinks = true
					opmlMap[f.Url].HtmlUrl = f.Link
				}
				manualDone := false
				if time.Since(f.LastViewed) > time.Hour*24*2 {
					if f.NextUpdate.Equal(timeMax) {
						tc <- taskqueue.NewPOSTTask(routeUrl("update-feed-manual"), url.Values{
							"feed": {f.Url},
							"last": {"1"},
						})
						manualDone = true
					} else {
						tc <- taskqueue.NewPOSTTask(routeUrl("update-feed-last"), url.Values{
							"feed": {f.Url},
						})
					}
				}
				if !manualDone && now.Sub(f.NextUpdate) >= 0 {
					tc <- taskqueue.NewPOSTTask(routeUrl("update-feed-manual"), url.Values{
						"feed": {f.Url},
					})
				}
				lock.Lock()
				fl[f.Url] = stories
				numStories += len(stories)
				if len(stories) > 0 {
					hasStories = true
				}
				if f.Image != "" {
					icons[f.Url] = f.Image
				}
				if f.NoAds {
					noads[f.Url] = true
				}
				lock.Unlock()
			}
		}
		go func() {
			var tasks []*taskqueue.Task
			for t := range tc {
				tasks = append(tasks, t)
				if len(tasks) == 100 {
					taskqueue.AddMulti(c, tasks, "update-manual")
					c.Infof("added %v tasks", len(tasks))
					tasks = tasks[0:0]
				}
			}
			if len(tasks) > 0 {
				taskqueue.AddMulti(c, tasks, "update-manual")
				c.Infof("added %v tasks", len(tasks))
			}
			wg.Done()
		}()
		for i := 0; i < 20; i++ {
			go feedProc()
		}
		for i, f := range feeds {
			if goon.NotFound(merr, i) {
				continue
			}
			wg.Add(1)
			queue <- f
		}
		close(queue)
		// wait for feeds to complete so there are no more tasks to queue
		wg.Wait()
		wg.Add(1)
		// then finish enqueuing tasks
		close(tc)
		wg.Wait()
	})
	if numStories > numStoriesLimit {
		c.Step("numStories", func() {
			c.Infof("too many stories: %v", numStories)
			stories := make([]*Story, 0, numStories)
			for _, v := range fl {
				stories = append(stories, v...)
			}
			sort.Sort(sort.Reverse(Stories(stories)))
			last := stories[numStoriesLimit].Created
			stories = stories[:numStoriesLimit]
			u.Read = last
			putU = true
			fixRead = true
			fl = make(map[string][]*Story)
			for _, s := range stories {
				fk := s.Parent.StringID()
				p := fl[fk]
				fl[fk] = append(p, s)
			}
			c.Infof("filtered: %v, %v", len(stories), last)
		})
	}
	if fixRead {
		c.Step("fix read", func() {
			nread := make(Read)
			for k, v := range fl {
				for _, s := range v {
					rs := readStory{Feed: k, Story: s.Id}
					if read[rs] {
						nread[rs] = true
					}
				}
			}
			read = nread
			var b bytes.Buffer
			gob.NewEncoder(&b).Encode(&read)
			ud.Read = b.Bytes()
			putUD = true
		})
	}
	for k, v := range fl {
		newStories := make([]*Story, 0, len(v))
		for _, s := range v {
			if !read[readStory{Feed: k, Story: s.Id}] {
				newStories = append(newStories, s)
			}
		}
		fl[k] = newStories
	}
	if !hasStories {
		var last time.Time
		for _, f := range feeds {
			if last.Before(f.Date) {
				last = f.Date
			}
		}
		if u.Read.Before(last) {
			c.Debugf("setting %v read to %v", cu.ID, last)
			putU = true
			putUD = true
			u.Read = last
			ud.Read = nil
		}
	}
	if updatedLinks {
		backupOPML(c)
		if o, err := json.Marshal(&uf); err == nil {
			ud.Opml = o
			putUD = true
		} else {
			saveError(c, fmt.Sprintf("%v", uf), err)
			c.Errorf("json UL err: %v, %v", err, uf)
		}
	}
	if putU {
		gn.Put(u)
	}
	if putUD {
		gn.Put(ud)
	}
	c.Step("json marshal", func() {
		o := struct {
			Opml    []*OpmlOutline
			Stories map[string][]*Story
			Icons   map[string]string
			NoAds   map[string]bool
			Options string
		}{
			Opml:    uf.Outline,
			Stories: fl,
			Icons:   icons,
			NoAds:   noads,
			Options: u.Options,
		}
		b, err := json.Marshal(o)
		if err != nil {
			c.Errorf("cleaning")
			for _, v := range fl {
				for _, s := range v {
					n := sanitizer.CleanNonUTF8(s.Summary)
					if n != s.Summary {
						s.Summary = n
						c.Errorf("cleaned %v", s.Id)
						gn.Put(s)
					}
				}
			}
			b, _ = json.Marshal(o)
		}
		w.Write(b)
	})
}