func QueryKeys(c appengine.Context, cursor string) ([]Key, string, error) { var items []Key q := datastore.NewQuery("Key") if cursor != "" { dCursor, err := datastore.DecodeCursor(string(cursor)) if err == nil { q = q.Start(dCursor) } } t := q.Run(c) for i := 0; i < 10; i++ { var item Key k, err := t.Next(&item) if err == datastore.Done { break } if err != nil { return items, "", nil } item.Id = k.StringID() item.Agent.Get(c, item.AgentId) item.Portal.Get(c, item.PortalId) items = append(items, item) } returnedCursor, err := t.Cursor() if err != nil { return items, "", err } return items, returnedCursor.String(), nil }
// GetNewestItems returns the latest number elements for a specific namespace func GetNewestItems(c appengine.Context, namespace string, limit int, cursor string) ([]Item, string, error) { q := datastore.NewQuery("Item"). Filter("Namespace =", namespace). Order("-CreatedAt"). Limit(limit) if cursor, err := datastore.DecodeCursor(cursor); err == nil { q = q.Start(cursor) } var is []Item var err error t := q.Run(c) for { var i Item _, err = t.Next(&i) if err == datastore.Done { break } is = append(is, i) if err != nil { c.Errorf("Error fetching next item for namespace %v: %v", namespace, err) return nil, "", err } } if cursor, err := t.Cursor(); err == nil { return is, cursor.String(), nil } return nil, "", err }
func query2(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) // start cursor1 OMIT q := datastore.NewQuery("Book").Filter("Author=", "author-1").Order("-CreatedAt") pCursor := r.FormValue("cursor") if pCursor != "" { cursor, err := datastore.DecodeCursor(pCursor) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } q.Start(cursor) } // end cursor1 OMIT // start cursor2 OMIT var books []Book t := q.Run(c) for i := 0; i < 10; i++ { var book Book key, err := t.Next(&book) if err == datastore.Done { break } if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } c.Debugf("#v", key) books = append(books, book) } // end cursor2 OMIT response := struct { Cursor string Books []Book }{ Books: books, } // start cursor3 OMIT response.Books = books if cursor, err := t.Cursor(); err == nil { response.Cursor = cursor.String() } // end cursor3 OMIT je := json.NewEncoder(w) if err := je.Encode(response); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } }
func GetFeed(c mpg.Context, w http.ResponseWriter, r *http.Request) { gn := goon.FromContext(c) f := Feed{Url: r.FormValue("f")} var stars []string wg := sync.WaitGroup{} fk := gn.Key(&f) q := datastore.NewQuery(gn.Kind(&Story{})).Ancestor(fk).KeysOnly() q = q.Order("-" + IDX_COL) if cur := r.FormValue("c"); cur != "" { if dc, err := datastore.DecodeCursor(cur); err == nil { q = q.Start(dc) } } else { // grab the stars list on the first run wg.Add(1) go c.Step("stars", func(c mpg.Context) { gn := goon.FromContext(c) usk := starKey(c, f.Url, "") q := datastore.NewQuery(gn.Kind(&UserStar{})).Ancestor(gn.Key(usk).Parent()).KeysOnly() keys, _ := gn.GetAll(q, nil) stars = make([]string, len(keys)) for i, key := range keys { stars[i] = starID(key) } wg.Done() }) } iter := gn.Run(q) var stories []*Story for i := 0; i < 20; i++ { if k, err := iter.Next(nil); err == nil { stories = append(stories, &Story{ Id: k.StringID(), Parent: k.Parent(), }) } else if err == datastore.Done { break } else { serveError(w, err) return } } cursor := "" if ic, err := iter.Cursor(); err == nil { cursor = ic.String() } gn.GetMulti(&stories) wg.Wait() b, _ := json.Marshal(struct { Cursor string Stories []*Story Stars []string `json:",omitempty"` }{ Cursor: cursor, Stories: stories, Stars: stars, }) w.Write(b) }
func GetStars(c mpg.Context, w http.ResponseWriter, r *http.Request) { gn := goon.FromContext(c) cu := user.Current(c) u := User{Id: cu.ID} q := datastore.NewQuery(gn.Key(&UserStar{}).Kind()). Ancestor(gn.Key(&u)). Order("-c"). Limit(20) if cur := r.FormValue("c"); cur != "" { if dc, err := datastore.DecodeCursor(cur); err == nil { q = q.Start(dc) } } iter := gn.Run(q) stars := make(map[string]int64) var us UserStar var stories []*Story for { if k, err := iter.Next(&us); err == nil { stars[starID(k)] = us.Created.Unix() stories = append(stories, &Story{ Id: k.StringID(), Parent: gn.Key(&Feed{Url: k.Parent().StringID()}), }) } else if err == datastore.Done { break } else { serveError(w, err) return } } cursor := "" if ic, err := iter.Cursor(); err == nil { cursor = ic.String() } var smap map[string][]*Story if len(stories) > 0 { gn.GetMulti(&stories) smap = make(map[string][]*Story) for _, s := range stories { f := s.Parent.StringID() smap[f] = append(smap[f], s) } } b, _ := json.Marshal(struct { Cursor string Stories map[string][]*Story Stars map[string]int64 }{ Cursor: cursor, Stories: smap, Stars: stars, }) w.Write(b) }
func applyCursor(query_in *datastore.Query, cursor_in string) (query_out *datastore.Query, err error) { if cursor_in != "" { cursor, err := datastore.DecodeCursor(cursor_in) if err == nil { return query_in.Start(cursor), nil } else { return query_in, err } } else { return query_in, nil } }
func decodeDatastoreCursorValue(d *Decoder, v reflect.Value) error { s, err := d.DecodeString() if err != nil { return err } cursor, err := ds.DecodeCursor(s) if err != nil { return err } v.Set(reflect.ValueOf(cursor)) return nil }
func SitemapFeed(c mpg.Context, w http.ResponseWriter, r *http.Request) { vars := mux.Vars(r) feed := vars["feed"] fk, err := datastore.DecodeKey(feed) if err != nil { serveError(w, err) return } bf := base64.URLEncoding.EncodeToString([]byte(fk.StringID())) q := datastore.NewQuery("S").KeysOnly().Ancestor(fk) q = q.Limit(Limit) cs := r.FormValue("c") if len(cs) > 0 { if cur, err := datastore.DecodeCursor(cs); err == nil { q = q.Start(cur) } } stories := make(map[string]string) it := q.Run(c) for { k, err := it.Next(nil) if err == datastore.Done { break } else if err != nil { c.Errorf("next error: %v", err) break } stories[k.StringID()] = base64.URLEncoding.EncodeToString([]byte(k.StringID())) } cs = "" if len(stories) == Limit { if cur, err := it.Cursor(); err == nil { cs = cur.String() } } if err := templates.ExecuteTemplate(w, "sitemap-feed.html", struct { Feed, Feed64 string Stories map[string]string Cursor string }{ Feed: feed, Feed64: bf, Stories: stories, Cursor: cs, }); err != nil { c.Errorf("%v", err) serveError(w, err) return } }
func BackendStart(c mpg.Context, w http.ResponseWriter, r *http.Request) { return const sz = 100 ic := 0 var f func(appengine.Context) var cs string f = func(c appengine.Context) { gn := goon.FromContext(c) c.Errorf("ic: %d", ic) wg := sync.WaitGroup{} wg.Add(sz) var j int64 q := datastore.NewQuery("F").KeysOnly() if cs != "" { if cur, err := datastore.DecodeCursor(cs); err == nil { q = q.Start(cur) c.Errorf("cur start: %v", cur) } } it := q.Run(c) for j = 0; j < sz; j++ { k, err := it.Next(nil) c.Errorf("%v: %v, %v", j, k, err) if err != nil { c.Criticalf("err: %v", err) return } go func(k *datastore.Key) { f := Feed{Url: k.StringID()} if err := gn.Get(&f); err == nil { f.Subscribe(c) } wg.Done() }(k) } cur, err := it.Cursor() if err == nil { cs = cur.String() } wg.Wait() ic++ runtime.RunInBackground(c, f) } runtime.RunInBackground(c, f) }
// 画像のメタデータ一覧をDSから取得します。 // TODO: 表示する画像数を絞る必要がないなら、Cursor必要ないかも。 func GetImages(c appengine.Context, cursorStr string) ([]Image, string, error) { q := datastore.NewQuery("Image").Order("-Date") if len(cursorStr) != 0 { cursor, err := datastore.DecodeCursor(cursorStr) if err != nil { return []Image{}, "", err } q = q.Start(cursor) } images := []Image{} iter := q.Run(c) isNext := true for { var img Image _, err := iter.Next(&img) if err == datastore.Done { isNext = false break } if err != nil { c.Errorf("fetching next Person: %s", err.Error()) break } err = img.setThumbnailURL(thumbnailsLongestSide, false) if err != nil { c.Errorf("%s", err.Error()) break } images = append(images, img) } if isNext { next_cursor, err := iter.Cursor() if err != nil { c.Errorf("%s", err.Error()) return []Image{}, "", err } return images, next_cursor.String(), nil } else { return images, "", nil } }
func GetPortals(c appengine.Context, labels string, checkfavorited bool, favorited []string, cursor string) ([]Portal, string, error) { q := datastore.NewQuery("Portal") if cursor != "" { dCursor, err := datastore.DecodeCursor(string(cursor)) if err == nil { q = q.Start(dCursor) } } if len(labels) == 0 { q = q.Limit(30) } else { splits := strings.Split(labels, " ") c.Infof("query....%s", splits) q = q.Filter("Labels=", splits[0]).Limit(10) } var portals []Portal t := q.Run(c) for i := 0; i < 10; i++ { var portal Portal _, err := t.Next(&portal) if err == datastore.Done { break } if err != nil { return portals, "", err } if _, err := datastore.NewQuery("Key").Filter("PortalId=", portal.Id).GetAll(c, &portal.Keys); err != nil { return portals, "", err } if checkfavorited { for _, portalid := range favorited { if portalid == portal.Id { portals = append(portals, portal) } } } else { portals = append(portals, portal) } } cursor1, err := t.Cursor() if err != nil { return portals, "", err } return portals, cursor1.String(), nil }
func doExpire(c appengine.Context, expireTime time.Time, cursorString string) { query := getExpiredQuery(expireTime).KeysOnly() if len(cursorString) > 0 { if cursor, err := datastore.DecodeCursor(cursorString); err != nil { c.Errorf("Failed to decode cursor: %s", err) return } else { query = query.Start(cursor) } } for { toDelete := make([]*datastore.Key, 0, 100) for queryIterator := query.Run(c); ; { peerKey, err := queryIterator.Next(nil) if len(toDelete) >= 100 { break } else if err == datastore.Done { c.Infof("Done finding expired peers") break } else if err != nil { c.Criticalf("Failed to get next peer: %#v (%s)", err, err) return } toDelete = append(toDelete, peerKey) } deleted := len(toDelete) c.Infof("Deleting %d expired peers", deleted) if err := ds.DeleteMulti(c, toDelete); err != nil { c.Criticalf("Failed to delete peers: %#v (%s)", err, err) return } if deleted <= 0 { break } } c.Infof("Finished deleting expired peers") }
func DeleteOldFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) { ctx := appengine.Timeout(c, time.Minute) gn := goon.FromContext(c) q := datastore.NewQuery(gn.Kind(&Feed{})).Filter("n=", timeMax).KeysOnly() if cur, err := datastore.DecodeCursor(r.FormValue("c")); err == nil { q = q.Start(cur) } it := q.Run(ctx) done := false var tasks []*taskqueue.Task for i := 0; i < 10000 && len(tasks) < 100; i++ { k, err := it.Next(nil) if err == datastore.Done { c.Criticalf("done") done = true break } else if err != nil { c.Errorf("err: %v", err) continue } values := make(url.Values) values.Add("f", k.StringID()) tasks = append(tasks, taskqueue.NewPOSTTask("/tasks/delete-old-feed", values)) } if len(tasks) > 0 { c.Errorf("deleting %v feeds", len(tasks)) if _, err := taskqueue.AddMulti(c, tasks, ""); err != nil { c.Errorf("err: %v", err) } } if !done { if cur, err := it.Cursor(); err == nil { values := make(url.Values) values.Add("c", cur.String()) taskqueue.Add(c, taskqueue.NewPOSTTask("/tasks/delete-old-feeds", values), "") } else { c.Errorf("err: %v", err) } } }
func GetFeed(c mpg.Context, w http.ResponseWriter, r *http.Request) { gn := goon.FromContext(c) f := Feed{Url: r.FormValue("f")} fk := gn.Key(&f) q := datastore.NewQuery(gn.Key(&Story{}).Kind()).Ancestor(fk).KeysOnly() q = q.Order("-" + IDX_COL) if c := r.FormValue("c"); c != "" { if dc, err := datastore.DecodeCursor(c); err == nil { q = q.Start(dc) } } iter := gn.Run(q) var stories []*Story for i := 0; i < 20; i++ { if k, err := iter.Next(nil); err == nil { stories = append(stories, &Story{ Id: k.StringID(), Parent: k.Parent(), }) } else if err == datastore.Done { break } else { serveError(w, err) return } } cursor := "" if ic, err := iter.Cursor(); err == nil { cursor = ic.String() } gn.GetMulti(&stories) b, _ := json.Marshal(struct { Cursor string Stories []*Story }{ Cursor: cursor, Stories: stories, }) w.Write(b) }
func Sitemap(c mpg.Context, w http.ResponseWriter, r *http.Request) { q := datastore.NewQuery("F").KeysOnly() q = q.Limit(Limit) cs := r.FormValue("c") if len(cs) > 0 { if cur, err := datastore.DecodeCursor(cs); err == nil { q = q.Start(cur) } } var keys []*datastore.Key it := q.Run(c) for { k, err := it.Next(nil) if err == datastore.Done { break } else if err != nil { c.Errorf("next error: %v", err) break } keys = append(keys, k) } cs = "" if len(keys) == Limit { if cur, err := it.Cursor(); err == nil { cs = cur.String() } } if err := templates.ExecuteTemplate(w, "sitemap.html", struct { Keys []*datastore.Key Cursor string }{ Keys: keys, Cursor: cs, }); err != nil { c.Errorf("%v", err) serveError(w, err) return } }
func (it *Iterator) Next() bool { if it.offset+1 < len(it.buffer) { it.offset++ it.result = &Token{Kind: it.kind, Hash: it.buffer[it.offset]} return true } if it.done { return false } // Reset buffer and offset it.offset = 0 it.buffer = make([]string, 0, bufferSize) // Create query // TODO (panamafrancis) Keys only query? q := datastore.NewQuery(it.kind).Limit(bufferSize) if !it.isAll { // Filter on the direction {subject,objekt...} q = q.Filter(it.dir.String()+" =", it.name) } // Get last cursor position cursor, err := datastore.DecodeCursor(it.last) if err == nil { q = q.Start(cursor) } // Buffer the keys of the next 50 matches t := q.Run(it.qs.context) for { // Quirk of the datastore, you cannot pass a nil value to to Next() // even if you just want the keys var k *datastore.Key skip := false if it.kind == quadKind { temp := new(QuadEntry) k, err = t.Next(temp) // Skip if quad has been deleted if len(temp.Added) <= len(temp.Deleted) { skip = true } } else { temp := new(NodeEntry) k, err = t.Next(temp) // Skip if node has been deleted if temp.Size == 0 { skip = true } } if err == datastore.Done { it.done = true break } if err != nil { glog.Errorf("Error fetching next entry %v", err) it.err = err return false } if !skip { it.buffer = append(it.buffer, k.StringID()) } } // Save cursor position cursor, err = t.Cursor() if err == nil { it.last = cursor.String() } // Protect against bad queries if it.done && len(it.buffer) == 0 { glog.Warningf("Query did not return any results") return false } // First result it.result = &Token{Kind: it.kind, Hash: it.buffer[it.offset]} return true }
func UpdateFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) { q := datastore.NewQuery("F").KeysOnly().Filter("n <=", time.Now()) q = q.Limit(100) cs := r.FormValue("c") hasCursor := false if len(cs) > 0 { if cur, err := datastore.DecodeCursor(cs); err == nil { q = q.Start(cur) hasCursor = true c.Infof("starting at %v", cur) } else { c.Errorf("cursor error %v", err.Error()) } } if !hasCursor { qs, err := taskqueue.QueueStats(c, []string{"update-feed"}, 0) if err != nil || !qs[0].OldestETA.IsZero() { c.Errorf("already %v (%v) tasks", qs[0].Tasks, qs[0].Executed1Minute) return } } var keys []*datastore.Key it := q.Run(Timeout(c, time.Second*60)) for { k, err := it.Next(nil) if err == datastore.Done { break } else if err != nil { c.Errorf("next error: %v", err.Error()) break } keys = append(keys, k) } if len(keys) == 0 { c.Errorf("no results") return } else { cur, err := it.Cursor() if err != nil { c.Errorf("to cur error %v", err.Error()) } else { c.Infof("add with cur %v", cur) t := taskqueue.NewPOSTTask(routeUrl("update-feeds"), url.Values{ "c": {cur.String()}, }) taskqueue.Add(c, t, "update-feed") } } c.Infof("updating %d feeds", len(keys)) var tasks []*taskqueue.Task for _, k := range keys { tasks = append(tasks, taskqueue.NewPOSTTask(routeUrl("update-feed"), url.Values{ "feed": {k.StringID()}, })) } var ts []*taskqueue.Task const taskLimit = 100 for len(tasks) > 0 { if len(tasks) > taskLimit { ts = tasks[:taskLimit] tasks = tasks[taskLimit:] } else { ts = tasks tasks = tasks[0:0] } if _, err := taskqueue.AddMulti(c, ts, "update-feed"); err != nil { c.Errorf("taskqueue error: %v", err.Error()) } } }
func batchMap(c appengine.Context, w http.ResponseWriter, r *http.Request) { start := time.Now() if !queueMore(c) { c.Infof("Too many jobs queued, backing off") http.Error(w, "Busy", 503) return } q := datastore.NewQuery(r.FormValue("type")).KeysOnly() if cstr := r.FormValue("cursor"); cstr != "" { cursor, err := datastore.DecodeCursor(cstr) maybePanic(err) c.Infof("Starting from cursor %v", cstr) q = q.Start(cursor) } keys := []string{} finished := false t := q.Run(c) for i := 0; i < 10000; i++ { k, err := t.Next(nil) if err == datastore.Done { finished = true break } else if err != nil { http.Error(w, err.Error(), 500) return } keys = append(keys, k.Encode()) } c.Infof("Got %v %v keys in %v, finished=%v", len(keys), r.FormValue("type"), time.Since(start), finished) for len(keys) > 0 && r.FormValue("next") != "" { subkeys := keys if len(subkeys) > 100 { subkeys = keys[:100] keys = keys[100:] } else { keys = nil } buf := &bytes.Buffer{} z := gzip.NewWriter(buf) e := json.NewEncoder(z) maybePanic(e.Encode(subkeys)) maybePanic(z.Flush()) maybePanic(z.Close()) c.Infof("Queueing %v with %v bytes of compressed keys", mapStage2, buf.Len()) task := &taskqueue.Task{ Path: r.FormValue("next"), Payload: buf.Bytes(), } _, err := taskqueue.Add(c, task, mapStage2) maybePanic(err) } if !finished { cursor, err := t.Cursor() maybePanic(err) c.Infof("Requesting more from %v", cursor.String()) r.Form.Set("cursor", cursor.String()) taskqueue.Add(c, taskqueue.NewPOSTTask("/batch/map", r.Form), mapStage1) } w.WriteHeader(201) }
func (runner *runner) StartFrom(cursor string) *runner { c, _ := datastore.DecodeCursor(cursor) runner.query = runner.query.Start(c) return runner }
// logs is the logic implementation of the Logs page. func logs(p *page.Params) { c := p.AppCtx // First get devices var devices []*ds.Device if devices, p.Err = cache.GetDevListForAccKey(c, p.Account.GetKey(c)); p.Err != nil { return } p.Custom["Devices"] = devices fv := p.Request.FormValue p.Custom["Before"] = fv("before") p.Custom["After"] = fv("after") p.Custom["SearchLoc"] = fv("loc") if fv("devID") == "" { // No device chosen yet return } var err error var devID int64 if devID, err = strconv.ParseInt(string(fv("devID")), 10, 64); err != nil { p.ErrorMsg = "Invalid Device! Please select a Device from the list below." return } // Check if device is owned by the user: var dev *ds.Device for _, d := range devices { if d.KeyID == devID { dev = d p.Custom["Device"] = d break } } if dev == nil { p.ErrorMsg = "You do not have access to the specified Device! Please select a Device from the list below." return } // Parse filters: var before time.Time if fv("before") != "" { if before, err = p.ParseTime(timeLayout, strings.TrimSpace(fv("before"))); err != nil { p.ErrorMsg = template.HTML(`Invalid <span class="highlight">Before</span>!`) return } // Add 1 second to the parsed time because fraction of a second is not parsed but exists, // so this new time will also include records which has the same time up to the second part and has millisecond part too. before = before.Add(time.Second) } var after time.Time if fv("after") != "" { if after, err = p.ParseTime(timeLayout, strings.TrimSpace(fv("after"))); err != nil { p.ErrorMsg = template.HTML(`Invalid <span class="highlight">After</span>!`) return } } var searchLoc appengine.GeoPoint areaCode := int64(-1) if dev.Indexed() && fv("loc") != "" { // GPS coordinates; lat must be in range -90..90, lng must be in range -180..180 baseErr := template.HTML(`Invalid <span class="highlight">Location</span>!`) var coords = strings.Split(strings.TrimSpace(fv("loc")), ",") if len(coords) != 2 { p.ErrorMsg = baseErr return } searchLoc.Lat, err = strconv.ParseFloat(coords[0], 64) if err != nil { p.ErrorMsg = baseErr return } searchLoc.Lng, err = strconv.ParseFloat(coords[1], 64) if err != nil { p.ErrorMsg = baseErr return } if !searchLoc.Valid() { p.ErrorMsg = template.HTML(`Invalid <span class="highlight">Location</span> specified by latitude and longitude! Valid range: [-90, 90] latitude and [-180, 180] longitude`) return } areaCode = AreaCodeForGeoPt(dev.AreaSize, searchLoc.Lat, searchLoc.Lng) } var page int cursorsString := fv("cursors") var cursors = strings.Split(cursorsString, ";")[1:] // Split always returns at least 1 element (and we use semicolon separator before cursors) // Form values if fv("page") == "" { page = 1 } else { page, err = strconv.Atoi(fv("page")) if err != nil || page < 1 { page = 1 } if page > len(cursors) { // If page is provided, so are (should be) the cursors page = len(cursors) } } switch { case fv("submitFirstPage") != "": page = 1 case fv("submitPrevPage") != "": if page > 1 { page-- } case fv("submitNextPage") != "": page++ } pageSize := p.Account.GetLogsPageSize() if ps := fv("pageSize"); ps != "" && ps != strconv.Itoa(pageSize) { // Page size has been changed (on Settings page), drop cursors, render page 1 page = 1 cursorsString = "" cursors = make([]string, 0, 1) } // 'ts all good, proceed with the query: q := datastore.NewQuery(ds.ENameGPS).Filter(ds.PNameDevKeyID+"=", devID) if !before.IsZero() { q = q.Filter(ds.PNameCreated+"<", before) } if !after.IsZero() { q = q.Filter(ds.PNameCreated+">", after) } if areaCode >= 0 { q = q.Filter(ds.PNameAreaCodes+"=", areaCode) } q = q.Order("-" + ds.PNameCreated).Limit(pageSize) var records = make([]*ds.GPS, 0, pageSize) // If there is a cursor, set it. // Page - cursor index mapping: cursors[page-2] // 1st page: no cursor, 2nd page: cursors[0], 3nd page: cursors[1], ... if page > 1 && page <= len(cursors)+1 { var cursor datastore.Cursor if cursor, p.Err = datastore.DecodeCursor(cursors[page-2]); p.Err != nil { return } q = q.Start(cursor) } // Iterate over the results: t := q.Run(c) for { r := new(ds.GPS) _, err := t.Next(r) if err == datastore.Done { break } if err != nil { // Datastore error p.Err = err return } records = append(records, r) r.Dd = -1 // For now, will be set if applicable if r.Track() { // Check the previous (in time) record and calculate distance. // If previous is not a Track, check the one before that etc. for i := len(records) - 2; i >= 0; i-- { if prev := records[i]; prev.Track() { prev.Dd = Distance(r.GeoPoint.Lat, r.GeoPoint.Lng, prev.GeoPoint.Lat, prev.GeoPoint.Lng) prev.Dt = prev.Created.Sub(r.Created) break } } } } if len(records) == 0 { // End of list reached, disable Next page button: p.Custom["EndOfList"] = true } if page == 1 || page > len(cursors) { // Get updated cursor and store it for next page: var cursor datastore.Cursor if cursor, p.Err = t.Cursor(); p.Err != nil { return } cursorString := cursor.String() if page == 1 { // If new records were inserted, they appear on the first page in which case // the cursor for the 2nd page changes (and all other cursors will change). // In this case drop all the cursors: if len(cursors) > 0 && cursors[0] != cursorString { cursorsString = "" cursors = make([]string, 0, 1) } } else { // When end of list is reached, the same cursor will be returned if len(records) == 0 && page == len(cursors)+1 && cursors[page-2] == cursorString { // Add 1 extra, empty page, but not more. if page > 2 && cursors[page-3] == cursorString { // An extra, empty page has already been added, do not add more: page-- } } } if page > len(cursors) { cursors = append(cursors, cursorString) cursorsString += ";" + cursorString } } // Calculate labels: '1'..'9' then 'A'... for i, lbl := len(records)-1, '1'; i >= 0; i-- { if r := records[i]; r.Track() { r.Label = lbl if lbl == '9' { lbl = 'A' - 1 } lbl++ } } p.Custom["CursorList"] = cursors p.Custom["Cursors"] = cursorsString p.Custom["Page"] = page p.Custom["PageSize"] = pageSize p.Custom["RecordOffset"] = (page-1)*pageSize + 1 p.Custom["Records"] = records if p.Mobile { p.Custom["MapWidth"], p.Custom["MapHeight"] = p.Account.GetMobMapPrevSize() p.Custom["MapImgFormat"] = p.Account.GetMobMapImgFormat() } else { p.Custom["MapWidth"], p.Custom["MapHeight"] = p.Account.GetMapPrevSize() } p.Custom["APIKey"] = "AIzaSyCEU_tZ1n0-mMg4woGKIfPqdbi0leSKvjg" p.Custom["AllMarkers"] = allMarkers(records) if len(records) == 0 { if page == 1 { if before.IsZero() && after.IsZero() && areaCode < 0 { p.Custom["PrintNoRecordsForDev"] = true } else { p.Custom["PrintNoMatchForFilters"] = true } } else { p.Custom["PrintNoMoreRecords"] = true } } }
func CFixer(c mpg.Context, w http.ResponseWriter, r *http.Request) { q := datastore.NewQuery("F").KeysOnly() q = q.Limit(1000) cs := r.FormValue("c") if len(cs) > 0 { if cur, err := datastore.DecodeCursor(cs); err == nil { q = q.Start(cur) c.Infof("starting at %v", cur) } else { c.Errorf("cursor error %v", err.Error()) } } var keys []*datastore.Key it := q.Run(Timeout(c, time.Second*15)) for { k, err := it.Next(nil) if err == datastore.Done { break } else if err != nil { c.Errorf("next error: %v", err.Error()) break } keys = append(keys, k) } if len(keys) == 0 { c.Errorf("no results") return } else { cur, err := it.Cursor() if err != nil { c.Errorf("to cur error %v", err.Error()) } else { c.Infof("add with cur %v", cur) t := taskqueue.NewPOSTTask("/tasks/cfixer", url.Values{ "c": {cur.String()}, }) taskqueue.Add(c, t, "cfixer") } } c.Infof("fixing %d feeds", len(keys)) var tasks []*taskqueue.Task for _, k := range keys { c.Infof("f: %v", k.StringID()) tasks = append(tasks, taskqueue.NewPOSTTask("/tasks/cfix", url.Values{ "feed": {k.StringID()}, })) } var ts []*taskqueue.Task const taskLimit = 100 for len(tasks) > 0 { if len(tasks) > taskLimit { ts = tasks[:taskLimit] tasks = tasks[taskLimit:] } else { ts = tasks tasks = tasks[0:0] } if _, err := taskqueue.AddMulti(c, ts, "cfixer"); err != nil { c.Errorf("taskqueue error: %v", err.Error()) } } }
func NewArticlePage(c appengine.Context, filter ArticleFilter, start string) (*ArticlePage, error) { scopeKey, err := filter.key(c) if err != nil { return nil, err } q := datastore.NewQuery("Article").Ancestor(scopeKey).Order("-Fetched").Order("-Published") if filter.Property != "" { q = q.Filter("Properties = ", filter.Property) } else if filter.Tag != "" { q = q.Filter("Tags = ", filter.Tag) } if start != "" { if cursor, err := datastore.DecodeCursor(start); err == nil { q = q.Start(cursor) } else { return nil, err } } t := q.Run(c) articles := make([]Article, articlePageSize) entryKeys := make([]*datastore.Key, articlePageSize) var readCount int for readCount = 0; readCount < articlePageSize; readCount++ { article := &articles[readCount] if _, err := t.Next(article); err != nil && err == datastore.Done { break } else if IsFieldMismatch(err) { // Ignore - migration issue } else if err != nil { return nil, err } entryKey := article.Entry article.ID = entryKey.StringID() article.Source = entryKey.Parent().StringID() entryKeys[readCount] = entryKey } continueFrom := "" if readCount >= articlePageSize { if cursor, err := t.Cursor(); err == nil { continueFrom = cursor.String() } } articles = articles[:readCount] entryKeys = entryKeys[:readCount] entries := make([]Entry, readCount) if err := datastore.GetMulti(c, entryKeys, entries); err != nil { if multiError, ok := err.(appengine.MultiError); ok { for _, singleError := range multiError { if singleError != nil { // Safely ignore ErrFieldMismatch if !IsFieldMismatch(singleError) { return nil, err } } } } else { return nil, err } } for i, _ := range articles { if entries[i].HasMedia { if media, err := MediaForEntry(c, entryKeys[i]); err != nil { c.Warningf("Error loading media for entry: %s", err) } else { articles[i].Media = media } } articles[i].Details = &entries[i] if articles[i].Tags == nil { articles[i].Tags = make([]string, 0) } } page := ArticlePage{ Articles: articles, Continue: continueFrom, } return &page, nil }
func guestViewCursor(w http.ResponseWriter, r *http.Request, m map[string]interface{}) { c := appengine.NewContext(r) q := ds.NewQuery(gbp.GbEntryKind) q.Order("-Date") b1 := new(bytes.Buffer) cur_start, err := memcache.Get(c, "greeting_cursor") if err == nil { str_curs := string(cur_start.Value) if len(cur_start.Value) > 0 { cursor, err := ds.DecodeCursor(str_curs) // inverse is string() loghttp.E(w, r, err, false) if err == nil { b1.WriteString("found cursor from memcache -" + stringspb.Ellipsoider(str_curs, 10) + "-<br>\n") q = q.Start(cursor) } } } iter := q.Run(c) var cntr int = 0 for { var g gbp.GbEntryRetr cntr++ if cntr > 2 { b1.WriteString(" batch complete -" + string(cntr) + "-<br>\n") break } _, err := iter.Next(&g) if err == ds.Done { b1.WriteString("scan complete -" + string(cntr) + "-<br>\n") break } if fmt.Sprintf("%T", err) == fmt.Sprintf("%T", new(ds.ErrFieldMismatch)) { err = nil // ignore this one - it's caused by our deliberate differences between gbsaveEntry and gbEntrieRetr } if err != nil { b1.WriteString("error fetching next: " + err.Error() + "<br>\n") break } b1.WriteString(" - " + g.String()) } // Get updated cursor and store it for next time. if cur_end, err := iter.Cursor(); err == nil { str_c_end := cur_end.String() // inverse is decode() val := []byte(str_c_end) mi_save := &memcache.Item{ Key: "greeting_cursor", Value: val, Expiration: 60 * time.Second, } if err := memcache.Set(c, mi_save); err != nil { b1.WriteString("error adding memcache item " + err.Error() + "<br>\n") } else { b1.WriteString("wrote cursor to memcache -" + stringspb.Ellipsoider(str_c_end, 10) + "-<br>\n") } } else { b1.WriteString("could not retrieve cursor_end " + err.Error() + "<br>\n") } w.Header().Set("Content-Type", "text/html") w.Write(b1.Bytes()) w.Write([]byte("<br>----<br>")) }
func updateProfilesFromRadars( context appengine.Context, generation string, cursor_string string, chunk int) (count int, err error) { context.Infof("Radars Chunk %d", chunk) profiles := make(map[string]*Profile) query := datastore.NewQuery("Radar").Project("UserName") if cursor_string != "" { cursor, err := datastore.DecodeCursor(cursor_string) if err == nil { query = query.Start(cursor) } else { return count, err } } iterator := query.Run(context) count = 0 for { var radar Radar _, err := iterator.Next(&radar) if err == datastore.Done { break } else if err != nil { context.Errorf("Radar iterator error: %v", err) continue } else { username := radar.UserName profile, found := profiles[username] if !found { profile = createProfile(username, generation) profiles[username] = profile } profile.RadarCount++ } count = count + 1 // we can't write more than 500 entities in a single call to PutMulti, // so we stop after 500 radars if count == 500 { break } } updateProfileEntities(context, profiles, generation, "radars", count) if count == 500 { // in case there are more radars, continue using a cursor based on the current position if cursor, err := iterator.Cursor(); err == nil { data := url.Values{} data.Set("generation", generation) data.Set("cursor", cursor.String()) data.Set("chunk", strconv.Itoa(chunk+1)) client := urlfetch.Client(context) request, _ := http.NewRequest("GET", "http://openradar-golang.appspot.com/api/updateprofiles"+"?"+data.Encode(), nil) response, err := client.Do(request) context.Infof("response %+v (%+v)", response, err) } } else { // if this was our last time through, start looping through comments data := url.Values{} data.Set("generation", generation) data.Set("source", "comments") client := urlfetch.Client(context) request, _ := http.NewRequest("GET", "http://openradar-golang.appspot.com/api/updateprofiles"+"?"+data.Encode(), nil) response, err := client.Do(request) context.Infof("response %+v (%+v)", response, err) } logMemoryUsage(context) return count, err }