func queuePush(w http.ResponseWriter, r *http.Request, mx map[string]interface{}) { lg, b := loghttp.BuffLoggerUniversal(w, r) _ = b c := appengine.NewContext(r) m := map[string][]string{"counter_name": []string{nscStringKey}} t := taskqueue.NewPOSTTask("/_ah/namespaced-counters/queue-pop", m) taskqueue.Add(c, t, "") c, err := appengine.Namespace(c, altNamespace) lg(err) taskqueue.Add(c, t, "") io.WriteString(w, "tasks enqueued\n") io.WriteString(w, "\ncounter values now: \n") readBothNamespaces(w, r, mx) io.WriteString(w, "\n\n...sleeping... \n") time.Sleep(time.Duration(400) * time.Millisecond) readBothNamespaces(w, r, mx) }
func asyncStoreStats(c appengine.Context, st Stats, uuid string) error { buf := bytes.Buffer{} e := gob.NewEncoder(&buf) err := e.Encode(st) if err != nil { return err } cherr := make(chan error, 2) go func() { task := &taskqueue.Task{ Path: "/asyncStoreStats", Payload: buf.Bytes(), } if uuid != "" { task.Header = http.Header{uuidHeader: []string{uuid}} } _, err = taskqueue.Add(c, task, "asyncstore") cherr <- err }() go func() { task := &taskqueue.Task{ Path: "/asyncRollupRecent", Payload: buf.Bytes(), } _, err = taskqueue.Add(c, task, "rolluprecent") cherr <- err }() return anyErr(<-cherr, <-cherr) }
func UpdateFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) { gn := goon.FromContext(c) q := datastore.NewQuery(gn.Key(&Feed{}).Kind()).KeysOnly() q = q.Filter("n <=", time.Now()) retry, _ := strconv.Atoi(r.FormValue("retry")) c.Errorf("retry: %v", retry) //* iterator it := gn.Run(q) i := 0 done := false for { k, err := it.Next(nil) if err == datastore.Done { done = true break } else if err != nil { c.Errorf("next error: %v", err.Error()) break } t := taskqueue.NewPOSTTask(routeUrl("update-feed"), url.Values{ "feed": {k.StringID()}, }) if _, err := taskqueue.Add(c, t, "update-feed"); err != nil { c.Errorf("taskqueue error: %v", err.Error()) } i++ } c.Infof("updating %d feeds", i) fmt.Fprintf(w, "updating %d feeds", i) if !done { time.Sleep(time.Second * time.Duration(i) / 50) // sleep about the time it'll take to process them t := taskqueue.NewPOSTTask("/tasks/update-feeds", url.Values{ "retry": {strconv.Itoa(retry + 1)}, }) if _, err := taskqueue.Add(c, t, "update-feeds"); err != nil { c.Errorf("taskqueue update feeds error: %v", err.Error()) } c.Errorf("ran update again") fmt.Fprintf(w, "\nran update again") } //*/ /* get all q = q.Limit(1000) keys, _ := gn.GetAll(q, nil) for _, k := range keys { t := taskqueue.NewPOSTTask(routeUrl("update-feed"), url.Values{ "feed": {k.StringID()}, }) if _, err := taskqueue.Add(c, t, "update-feed"); err != nil { c.Errorf("taskqueue error: %v", err.Error()) } } c.Infof("updating %d feeds", len(keys)) fmt.Fprintf(w, "updating %d feeds", len(keys)) //*/ }
func saveConfHandler(w io.Writer, r *http.Request, ctx appengine.Context, u *user.User) error { c, err := confFromRequest(r) if err != nil { return fmt.Errorf("conf from request: %v", err) } err = datastore.RunInTransaction(ctx, func(ctx appengine.Context) error { // Save the conference and generate the tickets if err := c.Save(ctx); err != nil { return fmt.Errorf("save conference: %v", err) } if err := c.CreateAndSaveTickets(ctx); err != nil { return fmt.Errorf("generate tickets: %v", err) } // Announce the conference a := conf.NewAnnouncement(fmt.Sprintf( "A new conference has just been scheduled! %s in %s. Don't wait; book now!", c.Name, c.City)) if err := a.Save(ctx); err != nil { return fmt.Errorf("announce conference: %v", err) } // Queue a task to email interested users. task := taskqueue.NewPOSTTask( "/notifyinterestedusers", url.Values{"conf_id": []string{c.ID()}}, ) if _, err = taskqueue.Add(ctx, task, ""); err != nil { return fmt.Errorf("add task to default queue: %v", err) } // Queue a task to review the conference. task = &taskqueue.Task{ Method: "PULL", Payload: []byte(c.ID()), } task, err := taskqueue.Add(ctx, task, "review-conference-queue") if err != nil { return fmt.Errorf("add task to review queue: %v", err) } return nil }, &datastore.TransactionOptions{XG: true}) if err != nil { return err } return RedirectTo("/showtickets?conf_id=" + url.QueryEscape(c.ID())) }
func batchCouch(c appengine.Context, w http.ResponseWriter, r *http.Request) { keys := []string{} z, err := gzip.NewReader(r.Body) maybePanic(err) d := json.NewDecoder(z) maybePanic(d.Decode(&keys)) c.Infof("Got %v keys to couch", len(keys)) wg := sync.WaitGroup{} ch := make(chan string) for i := 0; i < 8; i++ { wg.Add(1) go func() { defer wg.Done() for kstr := range ch { err := postToCouchCommon(c, kstr, true) if err != nil { c.Errorf("Error on %v: %v", kstr, err) taskqueue.Add(c, taskqueue.NewPOSTTask("/couchit", url.Values{"key": []string{kstr}}), "couchit") } } }() } for _, kstr := range keys { ch <- kstr } close(ch) wg.Wait() w.WriteHeader(204) }
func del(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) t := taskqueue.NewPOSTTask("/worker", nil) if _, err := taskqueue.Add(c, t, ""); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } }
// Look for new flights that we should add to our database. Invoked by cron. func scanHandler(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) if db, err1 := fdb24.NewFlightDBFr24(urlfetch.Client(c)); err1 != nil { c.Errorf(" /mdb/scan: newdb: %v", err1) http.Error(w, err1.Error(), http.StatusInternalServerError) } else { if flights, err2 := db.LookupList(sfo.KBoxSFO120K); err2 != nil { c.Errorf(" /mdb/scan: lookup: %v", err2) http.Error(w, err2.Error(), http.StatusInternalServerError) } else { set := ftype.FIFOSet{} if err3 := loadFIFOSet(c, &set); err3 != nil { c.Errorf(" /mdb/scan: loadcache: %v", err3) http.Error(w, err3.Error(), http.StatusInternalServerError) } new := set.FindNew(flights) if err4 := saveFIFOSet(c, set); err4 != nil { c.Errorf(" /mdb/scan: savecache: %v", err4) http.Error(w, err4.Error(), http.StatusInternalServerError) } // Enqueue the new flights n := 1000 for i, fs := range new { if i >= n { break } if fsStr, err5 := fs.Base64Encode(); err5 != nil { http.Error(w, err5.Error(), http.StatusInternalServerError) return } else { url := fmt.Sprintf("/fdb/addflight?deb=%s", fs.F.UniqueIdentifier()) t := taskqueue.NewPOSTTask(url, map[string][]string{ "flightsnapshot": {fsStr}, }) // We could be smarter about this. t.Delay = time.Minute * 45 if _, err6 := taskqueue.Add(c, t, "addflight"); err6 != nil { c.Errorf(" /mdb/scan: enqueue: %v", err6) http.Error(w, err6.Error(), http.StatusInternalServerError) return } } } var params = map[string]interface{}{ "New": new, "Flights": flights, } if err7 := templates.ExecuteTemplate(w, "fdb-scan", params); err7 != nil { http.Error(w, err7.Error(), http.StatusInternalServerError) } } } }
// send uses the Channel API to send the provided message in JSON-encoded form // to the client identified by clientID. // // Channels created with one version of an app (eg, the default frontend) // cannot be sent on from another version (eg, a backend). This is a limitation // of the Channel API that should be fixed at some point. // The send function creates a task that runs on the frontend (where the // channel was created). The task handler makes the channel.Send API call. func send(c appengine.Context, clientID string, m Message) { if clientID == "" { c.Debugf("no channel; skipping message send") return } switch { case m.TilesDone: c.Debugf("tiles done") case m.ZipDone: c.Debugf("zip done") default: c.Debugf("%d tiles", len(m.IDs)) } b, err := json.Marshal(m) if err != nil { panic(err) } task := taskqueue.NewPOSTTask("/send", url.Values{ "clientID": {clientID}, "msg": {string(b)}, }) host := appengine.DefaultVersionHostname(c) task.Header.Set("Host", host) if _, err := taskqueue.Add(c, task, sendQueue); err != nil { c.Errorf("add send task failed: %v", err) } }
func addCurriculo(c appengine.Context, r render.Render, req *http.Request) { curriculo := new(Curriculo) decoder := json.NewDecoder(req.Body) err := decoder.Decode(&curriculo) if err != nil { log.Println(err) r.JSON(http.StatusInternalServerError, err.Error()) return } key := datastore.NewKey(c, "Curriculo", curriculo.Email, 0, nil) _, err = datastore.Put(c, key, curriculo) if err != nil { log.Println(err) r.JSON(http.StatusInternalServerError, err.Error()) return } t := taskqueue.NewPOSTTask("/matcher/curriculos", map[string][]string{ "curriculo": {curriculo.Email}, }) if _, err := taskqueue.Add(c, t, ""); err != nil { log.Println(err) r.JSON(http.StatusInternalServerError, err.Error()) return } r.JSON(http.StatusOK, "success") }
func freshen(c appengine.Context, key string) error { s, ok := Sources[key] if !ok { return fmt.Errorf("%q not found", key) } item, err := memcache.Get(c, key+"_fresh") if err == memcache.ErrCacheMiss { return fetch(c, key) } else if err != nil { return err } fresh, err := strconv.ParseInt(string(item.Value), 10, 64) if err != nil { return err } if time.Now().Unix() < fresh+int64(s.Refresh.Seconds()) { return nil } t := &taskqueue.Task{Path: "/fetch/" + key} if _, err := taskqueue.Add(c, t, "fetch-"+key); err != nil { return err } return nil }
func UpdateFeeds(c mpg.Context, w http.ResponseWriter, r *http.Request) { gn := goon.FromContext(c) q := datastore.NewQuery(gn.Key(&Feed{}).Kind()).KeysOnly() q = q.Filter("n <=", time.Now()).Limit(3000) it := gn.Run(q) i := 0 for { k, err := it.Next(nil) if err == datastore.Done { break } else if err != nil { c.Errorf("next error: %v", err.Error()) break } t := taskqueue.NewPOSTTask(routeUrl("update-feed"), url.Values{ "feed": {k.StringID()}, }) if _, err := taskqueue.Add(c, t, "update-feed"); err != nil { c.Errorf("taskqueue error: %v", err.Error()) } i++ } c.Infof("updating %d feeds", i) fmt.Fprintf(w, "updating %d feeds", i) }
// addTaskClientGroupPut receives a list of ClientGroups to put into datastore // and stores it temporarily into memcache. It then submits the key as a // taskqueue task. func addTaskClientGroupPut(c appengine.Context, dateStr string, cgs []ClientGroup) { // Create unique key for memcache key := cgMemcachePutKey() // Store CGs into memcache item := &memcache.Item{ Key: key, Object: cgs, } if err := memcache.Gob.Set(c, item); err != nil { c.Errorf("rtt.addTaskClientGroupPut:memcache.Set: %s", err) return } // Submit taskqueue task values := make(url.Values) values.Add(FormKeyPutKey, key) values.Add(FormKeyImportDate, dateStr) task := taskqueue.NewPOSTTask(URLTaskImportPut, values) _, err := taskqueue.Add(c, task, TaskQueueNameImportPut) if err != nil { c.Errorf("rtt.addTaskClientGroupPut:taskqueue.Add: %s", err) return } }
// Insert a task to reset the running timer. Should be done in a transaction. func resetTimer(c appengine.Context, owner string) error { t := taskqueue.NewPOSTTask("/task/reset", url.Values{ "owner": []string{owner}, }) _, err := taskqueue.Add(c, t, "") return err }
func collectUrls(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) url := "http://catoverflow.com/api/query?offset=0&limit=1000" client := urlfetch.Client(c) resp, err := client.Get(url) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } defer resp.Body.Close() body, err := ioutil.ReadAll(resp.Body) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } //Split the body by new lines to get the url for each image. s := string(body) urls := strings.Fields(s) for _, u := range urls { t := taskqueue.NewPOSTTask("/worker", map[string][]string{"url": {u}}) if _, err := taskqueue.Add(c, t, ""); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } } }
func backfill(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { c := appengine.NewContext(r) index, err := search.Open(xkcdIndex) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } current, _ := GetCurrent(c) for i := 1; i <= current.Num; i++ { // xcdc returns 404 with issue 404 if i == 404 { continue } comicNum := strconv.Itoa(i) force := r.FormValue("force") if force != "yes" { var s ComicSearch err := index.Get(c, comicNum, &s) if err == nil { continue } } t := taskqueue.NewPOSTTask("/index", map[string][]string{"id": {comicNum}}) if _, err := taskqueue.Add(c, t, ""); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } } }
// Grab all users, and enqueue them for batch processing func upgradeHandler(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) cdb := ComplaintDB{C: c, Memcache: false} var cps = []types.ComplainerProfile{} cps, err := cdb.GetAllProfiles() if err != nil { c.Errorf("upgradeHandler: getallprofiles: %v", err) http.Error(w, err.Error(), http.StatusInternalServerError) return } for _, cp := range cps { b64 := "" if b64, err = cp.Base64Encode(); err != nil { c.Errorf("upgradeHandler: profile encode:", err) http.Error(w, err.Error(), http.StatusInternalServerError) return } t := taskqueue.NewPOSTTask("/batch/upgradeuser", map[string][]string{ "profile": {b64}, }) if _, err := taskqueue.Add(c, t, "batch"); err != nil { c.Errorf("upgradeHandler: enqueue: %v", err) http.Error(w, err.Error(), http.StatusInternalServerError) return } } c.Infof("enqueued %d batch", len(cps)) w.Write([]byte(fmt.Sprintf("OK, enqueued %d", len(cps)))) }
func cron(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) q := datastore.NewQuery("asin") for t := q.Run(c); ; { var x Asin _, err := t.Next(&x) if err == datastore.Done { break } if err != nil { return } task := taskqueue.NewPOSTTask("/task/fetching", url.Values{ "asin": {x.Name}, }) if !appengine.IsDevAppServer() { host := backendName + "." + appengine.DefaultVersionHostname(c) task.Header.Set("Host", host) } if _, err := taskqueue.Add(c, task, ""); err != nil { c.Errorf("add fetching task: %v", err) http.Error(w, "Error: couldn't schedule fetching task", 500) return } fmt.Fprintf(w, "OK") } }
func ImportOpml(c mpg.Context, w http.ResponseWriter, r *http.Request) { cu := user.Current(c) gn := goon.FromContext(c) u := User{Id: cu.ID} if err := gn.Get(&u); err != nil { serveError(w, err) return } backupOPML(c) if file, _, err := r.FormFile("file"); err == nil { if fdata, err := ioutil.ReadAll(file); err == nil { buf := bytes.NewReader(fdata) // attempt to extract from google reader takeout zip if zb, zerr := zip.NewReader(buf, int64(len(fdata))); zerr == nil { for _, f := range zb.File { if strings.HasSuffix(f.FileHeader.Name, "Reader/subscriptions.xml") { if rc, rerr := f.Open(); rerr == nil { if fb, ferr := ioutil.ReadAll(rc); ferr == nil { fdata = fb break } } } } } // Preflight the OPML, so we can report any errors. d := xml.NewDecoder(bytes.NewReader(fdata)) d.CharsetReader = charset.NewReader d.Strict = false opml := Opml{} if err := d.Decode(&opml); err != nil { serveError(w, err) c.Errorf("opml error: %v", err.Error()) return } var b bytes.Buffer enc := gob.NewEncoder(&b) err := enc.Encode(&opml) if err != nil { serveError(w, err) return } bk, err := saveFile(c, b.Bytes()) if err != nil { serveError(w, err) return } task := taskqueue.NewPOSTTask(routeUrl("import-opml-task"), url.Values{ "key": {string(bk)}, "user": {cu.ID}, }) taskqueue.Add(c, task, "import-reader") } } }
func UpdateCLLater(c appengine.Context, n string, delay time.Duration) { t := taskqueue.NewPOSTTask("/update-cl", url.Values{ "cl": []string{n}, }) t.Delay = delay if _, err := taskqueue.Add(c, t, "update-cl"); err != nil { c.Errorf("Failed adding task: %v", err) } }
func delayedDelete(c appengine.Context, fi *FileInfo) { if key := string(fi.Key); key != "" { task := &taskqueue.Task{ Path: "/" + url.QueryEscape(key) + "/-", Method: "DELETE", Delay: EXPIRATION_TIME * 1000000, } taskqueue.Add(c, task, "") } }
func queueProcessing(filename string, c appengine.Context) error { t := taskqueue.NewPOSTTask("/", url.Values{ "filename": {filename}, }) _, err := taskqueue.Add(c, t, "image-processor") return err }
func doEnqueuePaymentDoneEmail(reqCode, method string, c *Context) error { c.Aec().Infof("Enqueuing payment done email for reqCode=%q, method=%q", reqCode, method) v := url.Values{} v.Set("reqCode", reqCode) v.Set("method", method) t := taskqueue.NewPOSTTask("/tasks/send-payment-done-email", v) _, err := taskqueue.Add(c.Aec(), t, "") return err }
func delayedDelete(c appengine.Context, fi *FileInfo) { if key := string(fi.Key); key != "" { task := &taskqueue.Task{ Path: "/" + escape(key) + "/-", Method: "DELETE", Delay: time.Duration(EXPIRATION_TIME) * time.Second, } taskqueue.Add(c, task, "") } }
func TestTasks(t *testing.T) { c, err := NewContext(&Options{TaskQueues: []string{"testQueue"}}) if err != nil { t.Fatalf("NewContext: %v", err) } defer c.Close() task := taskqueue.NewPOSTTask("/post", map[string][]string{}) _, err = taskqueue.Add(c, task, "testQueue") if err != nil { t.Fatalf("Could not add task to queue") } stats, err := taskqueue.QueueStats(c, []string{"testQueue"}, 0) // fetch all of them if err != nil { t.Fatalf("Could not get taskqueue statistics") } t.Logf("TaskStatistics = %#v", stats) if len(stats) == 0 { t.Fatalf("Queue statistics are empty") } else if stats[0].Tasks != 1 { t.Fatalf("Could not find the task we just added") } err = taskqueue.Purge(c, "testQueue") if err != nil { t.Fatalf("Could not purge the queue") } stats, err = taskqueue.QueueStats(c, []string{"testQueue"}, 0) // fetch all of them if len(stats) == 0 { t.Fatalf("Queue statistics are empty") } if stats[0].Tasks != 0 { t.Fatalf("Purge command not successful") } tasks := []*taskqueue.Task{ taskqueue.NewPOSTTask("/post1", map[string][]string{}), taskqueue.NewPOSTTask("/post2", map[string][]string{}), } _, err = taskqueue.AddMulti(c, tasks, "testQueue") if err != nil { t.Fatalf("Could not add bulk tasklist to queue") } stats, err = taskqueue.QueueStats(c, []string{"testQueue"}, 0) // fetch all of them if err != nil { t.Fatalf("Could not get taskqueue statistics") } if len(stats) == 0 { t.Fatalf("Could not find the tasks we just added") } else if stats[0].Tasks != 2 { t.Fatalf("Could not find the tasks we just added") } }
func doEnqueuePayRequestEmails(reqCodes []string, c *Context) error { c.Aec().Infof("Enqueuing pay request emails for reqCodes: %v", reqCodes) if len(reqCodes) == 0 { return nil } v := url.Values{} v.Set("reqCodes", strings.Join(reqCodes, ",")) t := taskqueue.NewPOSTTask("/tasks/send-pay-request-emails", v) _, err := taskqueue.Add(c.Aec(), t, "") return err }
func couchit(c appengine.Context, k *datastore.Key, v url.Values) error { vals := url.Values{ "key": []string{k.Encode()}, } for k, v := range v { vals[k] = v } _, err := taskqueue.Add(c, taskqueue.NewPOSTTask("/couchit", vals), "couchit") return err }
func (f Feed) Subscribe(c appengine.Context) { if !f.IsSubscribed() { t := taskqueue.NewPOSTTask(routeUrl("subscribe-feed"), url.Values{ "feed": {f.Url}, }) if _, err := taskqueue.Add(c, t, "update-manual"); err != nil { c.Errorf("taskqueue error: %v", err.Error()) } else { c.Warningf("subscribe feed: %v", f.Url) } } }
func UploadHandler(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) u := user.Current(c) if u == nil { url, _ := user.LoginURL(c, r.URL.String()) w.Header().Set("Location", url) w.WriteHeader(http.StatusFound) return } id := r.FormValue("id") if len(id) > 0 { w.Header().Set("Location", "/upload2?id=has_key:"+id) w.WriteHeader(http.StatusFound) // uploadTemplate.Execute(w, id) return } blobs, other_params, err := blobstore.ParseUpload(r) if len(blobs) == 0 { // w.WriteHeader(http.StatusBadRequest) // fmt.Fprintf(w, "No data '%v'", err) w.Header().Set("Location", "/upload2?id=Bad+upload:"+err.String()) w.WriteHeader(http.StatusFound) return } file := blobs["file_data"] if len(file) == 0 { // w.WriteHeader(http.StatusBadRequest) // fmt.Fprintf(w, "No data") w.Header().Set("Location", "/upload2?id=No_file_data") w.WriteHeader(http.StatusFound) return } key := string(file[0].BlobKey) if other_params == nil { other_params = make(map[string][]string) } other_params["key"] = append(other_params["key"], key) task := taskqueue.NewPOSTTask("/process/gedcom", other_params) task.Name = key if err := taskqueue.Add(c, task, ""); err != nil { // http.Error(w, err.String(), http.StatusInternalServerError) w.Header().Set("Location", "/upload2?id=bad_task:"+err.String()) w.WriteHeader(http.StatusFound) return } w.Header().Set("Location", "/upload?id="+key) w.WriteHeader(http.StatusFound) return }
// SendConfirmation schedules a task to email a confirmation request // to a new user. func (u *Account) SendConfirmation(c appengine.Context) error { t, err := delayedConfirmAccount.Task(*u) if err != nil { return fmt.Errorf("error getting function task: %s", err) } t.RetryOptions = &taskqueue.RetryOptions{ RetryLimit: 3, } if _, err := taskqueue.Add(c, t, ""); err != nil { return fmt.Errorf("error adding confirmation to taskqueue: %s", err) } return nil }
func queuePush(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) m := map[string][]string{"counter_name": []string{nscStringKey}} t := taskqueue.NewPOSTTask("/_ah/namespaced-counters/queue-pop", m) taskqueue.Add(c, t, "") c, err := appengine.Namespace(c, altNamespace) util_err.Err_log(err) taskqueue.Add(c, t, "") io.WriteString(w, "tasks enqueued\n") io.WriteString(w, "\ncounter values now: \n") readBothNamespaces(w, r) io.WriteString(w, "\n\n...sleeping... \n") time.Sleep(time.Duration(400) * time.Millisecond) readBothNamespaces(w, r) }