func AddSubscription(c mpg.Context, w http.ResponseWriter, r *http.Request) { backupOPML(c) cu := user.Current(c) url := r.FormValue("url") o := &OpmlOutline{ Outline: []*OpmlOutline{ {XmlUrl: url}, }, } if err := addFeed(c, cu.ID, o); err != nil { c.Errorf("add sub error (%s): %s", url, err.Error()) serveError(w, err) return } gn := goon.FromContext(c) ud := UserData{Id: "data", Parent: gn.Key(&User{Id: cu.ID})} gn.Get(&ud) if err := mergeUserOpml(c, &ud, o); err != nil { c.Errorf("add sub error opml (%v): %v", url, err) serveError(w, err) return } gn.PutMulti([]interface{}{&ud, &Log{ Parent: ud.Parent, Id: time.Now().UnixNano(), Text: fmt.Sprintf("add sub: %v", url), }}) if r.Method == "GET" { http.Redirect(w, r, routeUrl("main"), http.StatusFound) } backupOPML(c) }
func Main(c mpg.Context, w http.ResponseWriter, r *http.Request) { if err := templates.ExecuteTemplate(w, "base.html", includes(c, w, r)); err != nil { c.Errorf("%v", err) serveError(w, err) } return }
func UpdateFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) { q := datastore.NewQuery("F").KeysOnly().Filter("n <=", time.Now()) q = q.Limit(10 * 60 * 2) // 10/s queue, 2 min cron it := q.Run(appengine.Timeout(c, time.Minute)) tc := make(chan *taskqueue.Task) done := make(chan bool) i := 0 u := routeUrl("update-feed") go taskSender(c, "update-feed", tc, done) for { k, err := it.Next(nil) if err == datastore.Done { break } else if err != nil { c.Errorf("next error: %v", err.Error()) break } tc <- taskqueue.NewPOSTTask(u, url.Values{ "feed": {k.StringID()}, }) i++ } close(tc) <-done c.Infof("updating %d feeds", i) }
func Main(c mpg.Context, w http.ResponseWriter, r *http.Request) { if cu := user.Current(c); cu != nil { if !strings.HasSuffix(cu.Email, "pretlist.com") && (cu.Email != "*****@*****.**") && (cu.Email != "*****@*****.**") { http.Redirect(w, r, routeUrl("logout"), http.StatusFound) } } if err := templates.ExecuteTemplate(w, "base.html", includes(c, w, r)); err != nil { c.Errorf("%v", err) serveError(w, err) } return }
func Charge(c mpg.Context, w http.ResponseWriter, r *http.Request) { cu := user.Current(c) gn := goon.FromContext(c) u := User{Id: cu.ID} uc := &UserCharge{Id: 1, Parent: gn.Key(&u)} if err := gn.Get(&u); err != nil { serveError(w, err) return } else if u.Account != AFree { serveError(w, fmt.Errorf("You're already subscribed.")) return } if err := gn.Get(uc); err == nil && len(uc.Customer) > 0 { serveError(w, fmt.Errorf("You're already subscribed.")) return } else if err != datastore.ErrNoSuchEntity { serveError(w, err) return } resp, err := stripe(c, "POST", "customers", url.Values{ "email": {u.Email}, "description": {u.Id}, "card": {r.FormValue("token")}, "plan": {r.FormValue("plan")}, }.Encode()) if err != nil { serveError(w, err) return } else if resp.StatusCode != http.StatusOK { var se StripeError defer resp.Body.Close() b, _ := ioutil.ReadAll(resp.Body) if err := json.Unmarshal(b, &se); err == nil { serveError(w, fmt.Errorf(se.Error.Message)) } else { serveError(w, fmt.Errorf("Error")) } c.Errorf("status: %v, %s", resp.StatusCode, b) return } uc, err = setCharge(c, resp) if err != nil { serveError(w, err) return } b, _ := json.Marshal(&uc) w.Write(b) }
func SubscribeCallback(c mpg.Context, w http.ResponseWriter, r *http.Request) { gn := goon.FromContext(c) furl := r.FormValue("feed") b, _ := base64.URLEncoding.DecodeString(furl) f := Feed{Url: string(b)} c.Infof("url: %v", f.Url) if err := gn.Get(&f); err != nil { http.Error(w, "", http.StatusNotFound) return } if r.Method == "GET" { if f.NotViewed() || r.FormValue("hub.mode") != "subscribe" || r.FormValue("hub.topic") != f.Url { http.Error(w, "", http.StatusNotFound) return } w.Write([]byte(r.FormValue("hub.challenge"))) i, _ := strconv.Atoi(r.FormValue("hub.lease_seconds")) f.Subscribed = time.Now().Add(time.Second * time.Duration(i)) gn.PutMulti([]interface{}{&f, &Log{ Parent: gn.Key(&f), Id: time.Now().UnixNano(), Text: "SubscribeCallback - subscribed - " + f.Subscribed.String(), }}) c.Debugf("subscribed: %v - %v", f.Url, f.Subscribed) return } else if !f.NotViewed() { c.Infof("push: %v", f.Url) gn.Put(&Log{ Parent: gn.Key(&f), Id: time.Now().UnixNano(), Text: "SubscribeCallback - push update", }) defer r.Body.Close() b, _ := ioutil.ReadAll(r.Body) nf, ss, err := ParseFeed(c, r.Header.Get("Content-Type"), f.Url, f.Url, b) if err != nil { c.Errorf("parse error: %v", err) return } if err := updateFeed(c, f.Url, nf, ss, false, true, false); err != nil { c.Errorf("push error: %v", err) } } else { c.Infof("not viewed") } }
func ClearRead(c mpg.Context, w http.ResponseWriter, r *http.Request) { if !isDevServer { return } cu := user.Current(c) gn := goon.FromContext(c) u := &User{Id: cu.ID} ud := &UserData{Id: "data", Parent: gn.Key(u)} if err := gn.Get(u); err != nil { c.Errorf("err: %v", err.Error()) return } gn.Get(ud) u.Read = time.Time{} ud.Read = nil gn.PutMulti([]interface{}{u, ud}) http.Redirect(w, r, "/", http.StatusFound) }
// Task used to subscribe a feed to push. func SubscribeFeed(c mpg.Context, w http.ResponseWriter, r *http.Request) { start := time.Now() gn := goon.FromContext(c) f := Feed{Url: r.FormValue("feed")} fk := gn.Key(&f) s := "" defer func() { gn.Put(&Log{ Parent: fk, Id: time.Now().UnixNano(), Text: "SubscribeFeed - start " + start.String() + " - f.sub " + f.Subscribed.String() + " - " + s, }) }() if err := gn.Get(&f); err != nil { c.Errorf("%v: %v", err, f.Url) serveError(w, err) s += "err" return } else if f.IsSubscribed() { s += "is subscribed" return } u := url.Values{} u.Add("hub.callback", f.PubSubURL()) u.Add("hub.mode", "subscribe") u.Add("hub.verify", "sync") fu, _ := url.Parse(f.Url) fu.Fragment = "" u.Add("hub.topic", fu.String()) req, err := http.NewRequest("POST", f.Hub, strings.NewReader(u.Encode())) req.Header.Set("Content-Type", "application/x-www-form-urlencoded") cl := &http.Client{ Transport: &urlfetch.Transport{ Context: c, Deadline: time.Minute, }, } resp, err := cl.Do(req) if err != nil { c.Errorf("req error: %v", err) } else if resp.StatusCode != http.StatusNoContent { f.Subscribed = time.Now().Add(time.Hour * 48) gn.Put(&f) if resp.StatusCode != http.StatusConflict { c.Errorf("resp: %v - %v", f.Url, resp.Status) c.Errorf("%s", resp.Body) } s += "resp err" } else { c.Infof("subscribed: %v", f.Url) s += "success" } }
func backupOPML(c mpg.Context) { cu := user.Current(c) gn := goon.FromContext(c) u := User{Id: cu.ID} ud := UserData{Id: "data", Parent: gn.Key(&u)} if err := gn.Get(&ud); err != nil { return } uo := UserOpml{Id: time.Now().UnixNano(), Parent: gn.Key(&u)} buf := &bytes.Buffer{} if gz, err := gzip.NewWriterLevel(buf, gzip.BestCompression); err == nil { gz.Write([]byte(ud.Opml)) gz.Close() uo.Compressed = buf.Bytes() } else { c.Errorf("gz err: %v", err) uo.Opml = ud.Opml } gn.Put(&uo) }
func SetStar(c mpg.Context, w http.ResponseWriter, r *http.Request) { feed := r.FormValue("feed") story := r.FormValue("story") if len(feed) == 0 || len(story) == 0 { return } del := r.FormValue("del") != "" us := starKey(c, feed, story) gn := goon.FromContext(c) if del { gn.Delete(gn.Key(us)) } else { us.Created = time.Now() _, err := gn.Put(us) if err != nil { c.Errorf("star put err: %v", err) serveError(w, err) } } }
func DeleteAccount(c mpg.Context, w http.ResponseWriter, r *http.Request) { if _, err := doUncheckout(c); err != nil { c.Errorf("uncheckout err: %v", err) } cu := user.Current(c) gn := goon.FromContext(c) u := User{Id: cu.ID} uk := gn.Key(&u) q := datastore.NewQuery("").KeysOnly().Ancestor(uk) keys, err := gn.GetAll(q, nil) if err != nil { serveError(w, err) return } err = gn.DeleteMulti(keys) if err != nil { serveError(w, err) return } http.Redirect(w, r, routeUrl("logout"), http.StatusFound) }
func UploadOpml(c mpg.Context, w http.ResponseWriter, r *http.Request) { opml := Opml{} if err := json.Unmarshal([]byte(r.FormValue("opml")), &opml.Outline); err != nil { serveError(w, err) return } for _, o := range opml.Outline { if o == nil { serveError(w, fmt.Errorf("null in opml")) return } } backupOPML(c) cu := user.Current(c) gn := goon.FromContext(c) u := User{Id: cu.ID} ud := UserData{Id: "data", Parent: gn.Key(&u)} if err := gn.Get(&ud); err != nil { serveError(w, err) c.Errorf("get err: %v", err) return } if b, err := json.Marshal(&opml); err != nil { serveError(w, err) c.Errorf("json err: %v", err) return } else { l := Log{ Parent: ud.Parent, Id: time.Now().UnixNano(), Text: fmt.Sprintf("upload opml: %v -> %v", len(ud.Opml), len(b)), } ud.Opml = b if _, err := gn.PutMulti([]interface{}{&ud, &l}); err != nil { serveError(w, err) return } backupOPML(c) } }
func UpdateFeed(c mpg.Context, w http.ResponseWriter, r *http.Request) { gn := goon.FromContext(appengine.Timeout(c, time.Minute)) url := r.FormValue("feed") if url == "" { c.Errorf("empty update feed") return } c.Debugf("update feed %s", url) last := len(r.FormValue("last")) > 0 f := Feed{Url: url} s := "" defer func() { gn.Put(&Log{ Parent: gn.Key(&f), Id: time.Now().UnixNano(), Text: "UpdateFeed - " + s, }) }() if err := gn.Get(&f); err == datastore.ErrNoSuchEntity { c.Errorf("no such entity - " + url) s += "NSE" return } else if err != nil { s += "err - " + err.Error() return } else if last { // noop } else if time.Now().Before(f.NextUpdate) { c.Errorf("feed %v already updated: %v", url, f.NextUpdate) s += "already updated" return } feedError := func(err error) { s += "feed err - " + err.Error() f.Errors++ v := f.Errors + 1 const max = 24 * 7 if v > max { v = max } else if f.Errors == 1 { v = 0 } f.NextUpdate = time.Now().Add(time.Hour * time.Duration(v)) gn.Put(&f) c.Warningf("error with %v (%v), bump next update to %v, %v", url, f.Errors, f.NextUpdate, err) } if feed, stories, err := fetchFeed(c, f.Url, f.Url); err == nil { if err := updateFeed(c, f.Url, feed, stories, false, false, last); err != nil { feedError(err) } else { s += "success" } } else { feedError(err) } f.Subscribe(c) }
func doUncheckout(c mpg.Context) (*UserCharge, error) { cu := user.Current(c) gn := goon.FromContext(c) u := User{Id: cu.ID} uc := UserCharge{Id: 1, Parent: gn.Key(&u)} if err := gn.Get(&u); err != nil { return nil, err } if err := gn.Get(&uc); err != nil || len(uc.Customer) == 0 { return nil, err } resp, err := stripe(c, "DELETE", "customers/"+uc.Customer, "") if err != nil { return nil, err } else if resp.StatusCode != http.StatusOK { c.Errorf("%s", resp.Body) c.Errorf("stripe delete error, but proceeding") } if err := gn.RunInTransaction(func(gn *goon.Goon) error { if err := gn.Get(&u); err != nil && err != datastore.ErrNoSuchEntity { return err } u.Account = AFree if uc.Next.After(u.Until) { u.Until = uc.Next } if err := gn.Delete(gn.Key(&uc)); err != nil { return err } _, err := gn.Put(&u) return err }, nil); err != nil { return nil, err } return &uc, nil }
func ClearFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) { if !isDevServer { return } cu := user.Current(c) gn := goon.FromContext(c) done := make(chan bool) go func() { u := &User{Id: cu.ID} defer func() { done <- true }() ud := &UserData{Id: "data", Parent: gn.Key(u)} if err := gn.Get(u); err != nil { c.Errorf("user del err: %v", err.Error()) return } gn.Get(ud) u.Read = time.Time{} ud.Read = nil ud.Opml = nil gn.PutMulti([]interface{}{u, ud}) c.Infof("%v cleared", u.Email) }() del := func(kind string) { defer func() { done <- true }() q := datastore.NewQuery(kind).KeysOnly() keys, err := gn.GetAll(q, nil) if err != nil { c.Errorf("err: %v", err.Error()) return } if err := gn.DeleteMulti(keys); err != nil { c.Errorf("err: %v", err.Error()) return } c.Infof("%v deleted", kind) } types := []interface{}{ &Feed{}, &Story{}, &StoryContent{}, &Log{}, &UserOpml{}, } for _, i := range types { k := gn.Kind(i) go del(k) } for i := 0; i < len(types); i++ { <-done } http.Redirect(w, r, fmt.Sprintf("%s?url=http://localhost:8080%s", routeUrl("add-subscription"), routeUrl("test-atom")), http.StatusFound) }
func DeleteOldFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) { ctx := appengine.Timeout(c, time.Minute) gn := goon.FromContext(c) q := datastore.NewQuery(gn.Kind(&Feed{})).Filter("n=", timeMax).KeysOnly() if cur, err := datastore.DecodeCursor(r.FormValue("c")); err == nil { q = q.Start(cur) } it := q.Run(ctx) done := false var tasks []*taskqueue.Task for i := 0; i < 10000 && len(tasks) < 100; i++ { k, err := it.Next(nil) if err == datastore.Done { c.Criticalf("done") done = true break } else if err != nil { c.Errorf("err: %v", err) continue } values := make(url.Values) values.Add("f", k.StringID()) tasks = append(tasks, taskqueue.NewPOSTTask("/tasks/delete-old-feed", values)) } if len(tasks) > 0 { c.Errorf("deleting %v feeds", len(tasks)) if _, err := taskqueue.AddMulti(c, tasks, ""); err != nil { c.Errorf("err: %v", err) } } if !done { if cur, err := it.Cursor(); err == nil { values := make(url.Values) values.Add("c", cur.String()) taskqueue.Add(c, taskqueue.NewPOSTTask("/tasks/delete-old-feeds", values), "") } else { c.Errorf("err: %v", err) } } }
func DeleteBlobs(c mpg.Context, w http.ResponseWriter, r *http.Request) { ctx := appengine.Timeout(c, time.Minute) q := datastore.NewQuery("__BlobInfo__").KeysOnly() it := q.Run(ctx) wg := sync.WaitGroup{} something := false for _i := 0; _i < 20; _i++ { var bk []appengine.BlobKey for i := 0; i < 1000; i++ { k, err := it.Next(nil) if err == datastore.Done { break } else if err != nil { c.Errorf("err: %v", err) continue } bk = append(bk, appengine.BlobKey(k.StringID())) } if len(bk) == 0 { break } go func(bk []appengine.BlobKey) { something = true c.Errorf("deleteing %v blobs", len(bk)) err := blobstore.DeleteMulti(ctx, bk) if err != nil { c.Errorf("blobstore delete err: %v", err) } wg.Done() }(bk) wg.Add(1) } wg.Wait() if something { taskqueue.Add(c, taskqueue.NewPOSTTask("/tasks/delete-blobs", nil), "") } }
func updateFeed(c mpg.Context, url string, feed *Feed, stories []*Story, updateAll, fromSub, updateLast bool) error { gn := goon.FromContext(c) f := Feed{Url: url} if err := gn.Get(&f); err != nil { return fmt.Errorf("feed not found: %s", url) } gn.Put(&Log{ Parent: gn.Key(&f), Id: time.Now().UnixNano(), Text: "feed update", }) // Compare the feed's listed update to the story's update. // Note: these may not be accurate, hence, only compare them to each other, // since they should have the same relative error. storyDate := f.Updated hasUpdated := !feed.Updated.IsZero() isFeedUpdated := f.Updated.Equal(feed.Updated) if !hasUpdated { feed.Updated = f.Updated } feed.Date = f.Date feed.Average = f.Average feed.LastViewed = f.LastViewed f = *feed if updateLast { f.LastViewed = time.Now() } if hasUpdated && isFeedUpdated && !updateAll && !fromSub { c.Infof("feed %s already updated to %v, putting", url, feed.Updated) f.Updated = time.Now() scheduleNextUpdate(c, &f) gn.Put(&f) return nil } c.Debugf("hasUpdate: %v, isFeedUpdated: %v, storyDate: %v, stories: %v", hasUpdated, isFeedUpdated, storyDate, len(stories)) puts := []interface{}{&f} // find non existant stories fk := gn.Key(&f) getStories := make([]*Story, len(stories)) for i, s := range stories { getStories[i] = &Story{Id: s.Id, Parent: fk} } err := gn.GetMulti(getStories) if _, ok := err.(appengine.MultiError); err != nil && !ok { c.Errorf("GetMulti error: %v", err) return err } var updateStories []*Story for i, s := range getStories { if goon.NotFound(err, i) { updateStories = append(updateStories, stories[i]) } else if (!stories[i].Updated.IsZero() && !stories[i].Updated.Equal(s.Updated)) || updateAll { if !s.Created.IsZero() { stories[i].Created = s.Created } if !s.Published.IsZero() { stories[i].Published = s.Published } updateStories = append(updateStories, stories[i]) } } c.Debugf("%v update stories", len(updateStories)) for _, s := range updateStories { puts = append(puts, s) sc := StoryContent{ Id: 1, Parent: gn.Key(s), } buf := &bytes.Buffer{} if gz, err := gzip.NewWriterLevel(buf, gzip.BestCompression); err == nil { gz.Write([]byte(s.content)) gz.Close() sc.Compressed = buf.Bytes() } if len(sc.Compressed) == 0 { sc.Content = s.content } if _, err := gn.Put(&sc); err != nil { c.Errorf("put sc err: %v", err) return err } } c.Debugf("putting %v entities", len(puts)) if len(puts) > 1 { updateAverage(&f, f.Date, len(puts)-1) f.Date = time.Now() if !hasUpdated { f.Updated = f.Date } } scheduleNextUpdate(c, &f) if fromSub { wait := time.Now().Add(time.Hour * 6) if f.NextUpdate.Before(wait) { f.NextUpdate = wait } } delay := f.NextUpdate.Sub(time.Now()) c.Infof("next update scheduled for %v from now", delay-delay%time.Second) _, err = gn.PutMulti(puts) if err != nil { c.Errorf("update put err: %v", err) } return err }
func ImportOpmlTask(c mpg.Context, w http.ResponseWriter, r *http.Request) { gn := goon.FromContext(c) userid := r.FormValue("user") bk := r.FormValue("key") del := func() { blobstore.Delete(c, appengine.BlobKey(bk)) } var skip int if s, err := strconv.Atoi(r.FormValue("skip")); err == nil { skip = s } c.Debugf("reader import for %v, skip %v", userid, skip) d := xml.NewDecoder(blobstore.NewReader(c, appengine.BlobKey(bk))) d.CharsetReader = charset.NewReader d.Strict = false opml := Opml{} err := d.Decode(&opml) if err != nil { del() c.Warningf("gob decode failed: %v", err.Error()) return } remaining := skip var userOpml []*OpmlOutline var proc func(label string, outlines []*OpmlOutline) proc = func(label string, outlines []*OpmlOutline) { for _, o := range outlines { if o.Title == "" { o.Title = o.Text } if o.XmlUrl != "" { if remaining > 0 { remaining-- } else if len(userOpml) < IMPORT_LIMIT { userOpml = append(userOpml, &OpmlOutline{ Title: label, Outline: []*OpmlOutline{o}, }) } } if o.Title != "" && len(o.Outline) > 0 { proc(o.Title, o.Outline) } } } proc("", opml.Outline) // todo: refactor below with similar from ImportReaderTask wg := sync.WaitGroup{} wg.Add(len(userOpml)) for i := range userOpml { go func(i int) { o := userOpml[i].Outline[0] if err := addFeed(c, userid, userOpml[i]); err != nil { c.Warningf("opml import error: %v", err.Error()) // todo: do something here? } c.Debugf("opml import: %s, %s", o.Title, o.XmlUrl) wg.Done() }(i) } wg.Wait() ud := UserData{Id: "data", Parent: gn.Key(&User{Id: userid})} if err := gn.RunInTransaction(func(gn *goon.Goon) error { gn.Get(&ud) if err := mergeUserOpml(c, &ud, userOpml...); err != nil { return err } _, err := gn.Put(&ud) return err }, nil); err != nil { w.WriteHeader(http.StatusInternalServerError) c.Errorf("ude update error: %v", err.Error()) return } if len(userOpml) == IMPORT_LIMIT { task := taskqueue.NewPOSTTask(routeUrl("import-opml-task"), url.Values{ "key": {bk}, "user": {userid}, "skip": {strconv.Itoa(skip + IMPORT_LIMIT)}, }) taskqueue.Add(c, task, "import-reader") } else { del() c.Infof("opml import done: %v", userid) } }
func ImportOpml(c mpg.Context, w http.ResponseWriter, r *http.Request) { cu := user.Current(c) gn := goon.FromContext(c) u := User{Id: cu.ID} if err := gn.Get(&u); err != nil { serveError(w, err) return } backupOPML(c) blobs, _, err := blobstore.ParseUpload(r) if err != nil { serveError(w, err) return } fs := blobs["file"] if len(fs) == 0 { serveError(w, fmt.Errorf("no uploaded file found")) return } file := fs[0] fr := blobstore.NewReader(c, file.BlobKey) del := func() { blobstore.Delete(c, file.BlobKey) } fdata, err := ioutil.ReadAll(fr) if err != nil { del() serveError(w, err) return } buf := bytes.NewReader(fdata) // attempt to extract from google reader takeout zip if zb, zerr := zip.NewReader(buf, int64(len(fdata))); zerr == nil { for _, f := range zb.File { if strings.HasSuffix(f.FileHeader.Name, "Reader/subscriptions.xml") { if rc, rerr := f.Open(); rerr == nil { if fb, ferr := ioutil.ReadAll(rc); ferr == nil { fdata = fb break } } } } } // Preflight the OPML, so we can report any errors. d := xml.NewDecoder(bytes.NewReader(fdata)) d.CharsetReader = charset.NewReader d.Strict = false opml := Opml{} if err := d.Decode(&opml); err != nil { del() serveError(w, err) c.Errorf("opml error: %v", err.Error()) return } task := taskqueue.NewPOSTTask(routeUrl("import-opml-task"), url.Values{ "key": {string(file.BlobKey)}, "user": {cu.ID}, }) taskqueue.Add(c, task, "import-reader") }
func ListFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) { cu := user.Current(c) gn := goon.FromContext(c) u := &User{Id: cu.ID} ud := &UserData{Id: "data", Parent: gn.Key(u)} if err := gn.GetMulti([]interface{}{u, ud}); err != nil && !goon.NotFound(err, 1) { serveError(w, err) return } l := &Log{ Parent: ud.Parent, Id: time.Now().UnixNano(), Text: "list feeds", } l.Text += fmt.Sprintf(", len opml %v", len(ud.Opml)) putU := false putUD := false fixRead := false if time.Since(u.Read) > oldDuration { u.Read = time.Now().Add(-oldDuration) putU = true fixRead = true l.Text += ", u.Read" } trialRemaining := 0 if STRIPE_KEY != "" && ud.Opml != nil && u.Account == AFree && u.Until.Before(time.Now()) { if u.Created.IsZero() { u.Created = time.Now() putU = true } else if time.Since(u.Created) > accountFreeDuration { b, _ := json.Marshal(struct { ErrorSubscription bool }{ true, }) w.Write(b) return } trialRemaining = int((accountFreeDuration-time.Since(u.Created))/time.Hour/24) + 1 } read := make(Read) var uf Opml c.Step("unmarshal user data", func(c mpg.Context) { gob.NewDecoder(bytes.NewReader(ud.Read)).Decode(&read) json.Unmarshal(ud.Opml, &uf) }) var feeds []*Feed opmlMap := make(map[string]*OpmlOutline) var merr error c.Step("fetch feeds", func(c mpg.Context) { gn := goon.FromContext(appengine.Timeout(c, time.Minute)) for _, outline := range uf.Outline { if outline.XmlUrl == "" { for _, so := range outline.Outline { feeds = append(feeds, &Feed{Url: so.XmlUrl}) opmlMap[so.XmlUrl] = so } } else { feeds = append(feeds, &Feed{Url: outline.XmlUrl}) opmlMap[outline.XmlUrl] = outline } } merr = gn.GetMulti(feeds) }) lock := sync.Mutex{} fl := make(map[string][]*Story) q := datastore.NewQuery(gn.Kind(&Story{})). Filter(IDX_COL+" >=", u.Read). KeysOnly(). Order("-" + IDX_COL). Limit(250) updatedLinks := false now := time.Now() numStories := 0 var stars []string c.Step(fmt.Sprintf("feed unreads: %v", u.Read), func(c mpg.Context) { queue := make(chan *Feed) tc := make(chan *taskqueue.Task) done := make(chan bool) wg := sync.WaitGroup{} feedProc := func() { for f := range queue { c.Step(f.Title, func(c mpg.Context) { defer wg.Done() var stories []*Story gn := goon.FromContext(appengine.Timeout(c, time.Minute)) if !f.Date.Before(u.Read) { fk := gn.Key(f) sq := q.Ancestor(fk) keys, _ := gn.GetAll(sq, nil) stories = make([]*Story, len(keys)) for j, key := range keys { stories[j] = &Story{ Id: key.StringID(), Parent: fk, } } gn.GetMulti(stories) } if f.Link != opmlMap[f.Url].HtmlUrl { l.Text += fmt.Sprintf(", link: %v -> %v", opmlMap[f.Url].HtmlUrl, f.Link) updatedLinks = true opmlMap[f.Url].HtmlUrl = f.Link } manualDone := false if time.Since(f.LastViewed) > time.Hour*24*2 { if !f.NextUpdate.Before(timeMax) { tc <- taskqueue.NewPOSTTask(routeUrl("update-feed-manual"), url.Values{ "feed": {f.Url}, "last": {"1"}, }) manualDone = true } else { tc <- taskqueue.NewPOSTTask(routeUrl("update-feed-last"), url.Values{ "feed": {f.Url}, }) } } if !manualDone && now.Sub(f.NextUpdate) >= 0 { tc <- taskqueue.NewPOSTTask(routeUrl("update-feed-manual"), url.Values{ "feed": {f.Url}, }) } lock.Lock() fl[f.Url] = stories numStories += len(stories) lock.Unlock() }) } } go taskSender(c, "update-manual", tc, done) for i := 0; i < 20; i++ { go feedProc() } for i, f := range feeds { if goon.NotFound(merr, i) { continue } wg.Add(1) queue <- f } close(queue) c.Step("stars", func(c mpg.Context) { gn := goon.FromContext(c) q := datastore.NewQuery(gn.Kind(&UserStar{})). Ancestor(ud.Parent). KeysOnly(). Filter("c >=", u.Read). Order("-c") keys, _ := gn.GetAll(q, nil) stars = make([]string, len(keys)) for i, key := range keys { stars[i] = starID(key) } }) // wait for feeds to complete so there are no more tasks to queue wg.Wait() // then finish enqueuing tasks close(tc) <-done }) if numStories > 0 { c.Step("numStories", func(c mpg.Context) { stories := make([]*Story, 0, numStories) for _, v := range fl { stories = append(stories, v...) } sort.Sort(sort.Reverse(Stories(stories))) if len(stories) > numStoriesLimit { stories = stories[:numStoriesLimit] fl = make(map[string][]*Story) for _, s := range stories { fk := s.Parent.StringID() p := fl[fk] fl[fk] = append(p, s) } } last := stories[len(stories)-1].Created if u.Read.Before(last) { u.Read = last putU = true fixRead = true } }) } if fixRead { c.Step("fix read", func(c mpg.Context) { nread := make(Read) for k, v := range fl { for _, s := range v { rs := readStory{Feed: k, Story: s.Id} if read[rs] { nread[rs] = true } } } if len(nread) != len(read) { read = nread var b bytes.Buffer gob.NewEncoder(&b).Encode(&read) ud.Read = b.Bytes() putUD = true l.Text += ", fix read" } }) } numStories = 0 for k, v := range fl { newStories := make([]*Story, 0, len(v)) for _, s := range v { if !read[readStory{Feed: k, Story: s.Id}] { newStories = append(newStories, s) } } numStories += len(newStories) fl[k] = newStories } if numStories == 0 { l.Text += ", clear read" fixRead = false if ud.Read != nil { putUD = true ud.Read = nil } last := u.Read for _, v := range feeds { if last.Before(v.Date) { last = v.Date } } c.Infof("nothing here, move up: %v -> %v", u.Read, last) if u.Read.Before(last) { putU = true u.Read = last } } if updatedLinks { backupOPML(c) if o, err := json.Marshal(&uf); err == nil { ud.Opml = o putUD = true l.Text += ", update links" } else { c.Errorf("json UL err: %v, %v", err, uf) } } if putU { gn.Put(u) l.Text += ", putU" } if putUD { gn.Put(ud) l.Text += ", putUD" } l.Text += fmt.Sprintf(", len opml %v", len(ud.Opml)) gn.Put(l) c.Step("json marshal", func(c mpg.Context) { gn := goon.FromContext(c) o := struct { Opml []*OpmlOutline Stories map[string][]*Story Options string TrialRemaining int Feeds []*Feed Stars []string UnreadDate time.Time UntilDate int64 }{ Opml: uf.Outline, Stories: fl, Options: u.Options, TrialRemaining: trialRemaining, Feeds: feeds, Stars: stars, UnreadDate: u.Read, UntilDate: u.Until.Unix(), } b, err := json.Marshal(o) if err != nil { c.Errorf("cleaning") for _, v := range fl { for _, s := range v { n := sanitizer.CleanNonUTF8(s.Summary) if n != s.Summary { s.Summary = n c.Errorf("cleaned %v", s.Id) gn.Put(s) } } } b, _ = json.Marshal(o) } w.Write(b) }) }