func sendYoHandler(w http.ResponseWriter, r *http.Request) { client := getYoClient() var user string timeNow := time.Now() time1hr := timeNow.Add(-59 * time.Minute) q := datastore.NewQuery("Reminder").Filter("Delivered =", 0).Filter("TimeStamp <=", time1hr) var reminders []Reminder c := appengine.NewContext(r) keys, err := q.GetAll(c, &reminders) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } for i := range reminders { user = reminders[i].UserName err := client.YoUser(user, r) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } reminders[i].Delivered = 1 } datastore.PutMulti(c, keys, reminders) }
func createAllStatusTypes(c appengine.Context) error { names := [...]string{ STATUS_NEW_APP, STATUS_EMAIL_SEARCHING, STATUS_EMAIL_READY, STATUS_EMAIL_BODY_READY, STATUS_EMAIL_BODY_GENERATING, STATUS_EMAIL_SENT, STATUS_EMAIL_VIEWED, STATUS_EMAIL_CLICKED, } var keys []*datastore.Key var values []*Status for _, value := range names { status := &Status{ Name: value, Created: time.Now(), } key := datastore.NewKey(c, DATASTORE_STATUS, value, 0, nil) keys = append(keys, key) values = append(values, status) } _, err := datastore.PutMulti(c, keys, values) return err }
func updateProfileEntities( context appengine.Context, profiles map[string]*Profile, generation string, kind string, count int) { // collect keys and new profile values into arrays keys := make([]*datastore.Key, 0) newvalues := make([]*Profile, 0) for username, profile := range profiles { key := datastore.NewKey(context, "Profile", username, 0, nil) keys = append(keys, key) newvalues = append(newvalues, profile) } // get all of the old profile values oldvalues := make([]*Profile, len(keys)) err := datastore.GetMulti(context, keys, oldvalues) // if the old values are from the current generation, add their counts into the new values for i := 0; i < len(keys); i++ { if (oldvalues[i] != nil) && (oldvalues[i].Generation == generation) { newvalues[i].RadarCount += oldvalues[i].RadarCount newvalues[i].CommentCount += oldvalues[i].CommentCount } } // store all of the new values _, err = datastore.PutMulti(context, keys, newvalues) if err == nil { context.Infof("Updated %d profiles for %d %s", len(profiles), count, kind) } else { context.Infof("Error updating %d profiles for %d %s (%v)", len(profiles), count, kind, err) } }
func webVersionPOST(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) r.ParseForm() versions := []Version{} keys := []*datastore.Key{} for _, os := range r.Form["OS"] { v := Version{ Current: r.FormValue("Current"), New: r.FormValue("New"), OS: os, Info: r.FormValue("Info"), Download: r.FormValue("Download"), ReleaseNotes: r.FormValue("ReleaseNotes"), } versions = append(versions, v) keys = append(keys, datastore.NewIncompleteKey(c, "Version", nil)) } _, err := datastore.PutMulti(c, keys, versions) if err != nil { c.Errorf("Error storing new version: %v", err) http.Error(w, "I had a problem storing your new version: "+err.Error(), 500) return } http.Redirect(w, r, "/admin/version/list", 303) }
func CFix(c mpg.Context, w http.ResponseWriter, r *http.Request) { gn := goon.FromContext(c) url := r.FormValue("feed") c.Infof("fix feed %s", url) f := Feed{Url: url} if err := gn.Get(&f); err != nil { c.Criticalf("cfix err: %v", err) serveError(w, err) return } q := datastore.NewQuery("S").Ancestor(gn.Key(&f)) var ss []*Story keys, err := q.GetAll(c, &ss) if err != nil { c.Errorf("getall err: %v", err) serveError(w, err) return } c.Infof("trying to fix %v stories", len(ss)) const putLimit = 500 for i := 0; i <= len(keys)/putLimit; i++ { lo := i * putLimit hi := (i + 1) * putLimit if hi > len(keys) { hi = len(keys) } c.Infof("%v - %v", lo, hi) if _, err := datastore.PutMulti(c, keys[lo:hi], ss[lo:hi]); err != nil { c.Errorf("err: %v, %v, %v", lo, hi, err) } } }
func (d *Driver) PutMulti(key []*datastore.Key, src interface{}) ([]*datastore.Key, error) { var keyLen = len(key) var fromIdx, toIdx int var v = reflect.ValueOf(src) var resultKeys = make([]*datastore.Key, 0) // TODO: split multiple goroutine for { fromIdx = toIdx toIdx = fromIdx + d.PutSplitThreshold if toIdx > keyLen { toIdx = keyLen } _keys := key[fromIdx:toIdx] _data := v.Slice(fromIdx, toIdx).Interface() d.logOps(opRead, len(_keys), "PutMulti") if updatedKeys, err := datastore.PutMulti(d.ctx, _keys, _data); err != nil { return resultKeys, err } else { resultKeys = append(resultKeys, updatedKeys...) } if toIdx == keyLen { break } } return resultKeys, nil }
func DeleteTag(c appengine.Context, tag string) (err os.Error) { // Fetch bookmarks with this tag q := datastore.NewQuery("Bookmark").Filter("UserId=", user.Current(c).Id).Filter("Tags=", tag) count, err := q.Count(c) if err != nil { return err } var bms []Bookmark keys, err := q.GetAll(c, &bms) if err != nil { return err } // Remove tag from bookmark bmsRef := make([]interface{}, count) for i := 0; i < len(bms); i++ { bmsRef[i] = &bms[i] btags := bms[i].Tags for j := 0; j < len(btags); j++ { if btags[j] == tag { bms[i].Tags = append(btags[:j], btags[j+1:]...) break } } } // Put them back on the datastore _, err = datastore.PutMulti(c, keys, bmsRef) return err }
// perform an import using the data we've decoded in jsonData func (self *importer) doImport() { // initialize our list of dirty cache entries with the top-level items // these (and more) will be flushed from the cache when we're done self.dirtyCacheEntries = append(self.dirtyCacheEntries, []string{ "/dish", "/dish/", "/menu", "/menu/", "/ingredient", "/ingredient/", }...) // build an index of the tags currently in the datastore self.indexCurrentTags() self.importIngredients() self.importDishes() self.importMeasuredIngredients() self.importPairings() self.importMenus() // add the tags we collected _, err := datastore.PutMulti(self.c, self.newTagKeys, self.newTags) check(err) // clear the cache lid := self.lid.Encode() // prefix each entry with the library id for i, _ := range self.dirtyCacheEntries { self.dirtyCacheEntries[i] = lid + self.dirtyCacheEntries[i] } // clear them all memcache.DeleteMulti(self.c, self.dirtyCacheEntries) }
func putMulti(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) // start putMulti1 OMIT keys := make([]*datastore.Key, 10) for i, _ := range keys { keys[i] = datastore.NewKey(c, "Book", "", int64(i+1), nil) // HL } books := make([]Book, 10) for i, _ := range books { number := i + 1 books[i] = Book{ fmt.Sprintf("book-%d", number), fmt.Sprintf("author-%d", number%2), number * 100, time.Now(), } } _, err := datastore.PutMulti(c, keys, books) // HL if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } // end putMulti1 OMIT w.Write([]byte("success")) }
func (db *AppEngineDB) SaveUsages(u []Usage) error { n := len(u) keys := make([]*datastore.Key, n, n) for i, v := range u { if v.UserID != "" { if v.Key != "" { key, err := datastore.DecodeKey(v.Key) if err == nil { keys[i] = key } } else { keys[i] = datastore.NewIncompleteKey(db.ctx, "Usage", nil) } } } keys, err := datastore.PutMulti(db.ctx, keys, u) if err != nil { return err } for i, v := range keys { u[i].Key = v.Encode() } return nil }
//save article and save tags transaction func (this *ArticleMetaData) Save(ctx Context) (err error) { c := ctx.GAEContext uuid, err := GenUUID() if err != nil { return err } this.Id = uuid k := datastore.NewKey(c, "Article", uuid, 0, nil) err = datastore.RunInTransaction(c, func(c appengine.Context) error { if len(this.Tags) > 0 { tags := make([]Tags, len(this.Tags)) tagsKey := make([]*datastore.Key, len(this.Tags)) for id, tag := range this.Tags { tags[id].ArticleId = uuid tags[id].Tag = tag tagId := uuid + tag tagsKey[id] = datastore.NewKey(c, "Tags", tagId, 0, nil) } _, err = datastore.PutMulti(c, tagsKey, tags) if err != nil { return err } } _, err = datastore.Put(c, k, this) return err }, &datastore.TransactionOptions{XG: true}) return err }
func (d *DatastoreStorage) StorePosts(req *http.Request, posts []blogplus.Activity) { c := appengine.NewContext(req) var keys []*datastore.Key var src []interface{} for _, post := range posts { if d.filter != nil && !d.filter(post) { c.Debugf("ignore post:%s", post.Id) continue } datespec := blogplus.GetDatespec(post.Published) data, err := blogplus.EncodeActivity(post) if err != nil { c.Errorf("encode error:%#v", err) continue } c.Infof("store %s datespec %s", post.Id, datespec) datekey := datastore.NewKey(c, activityRef, post.Id, 0, datastore.NewKey(c, datespecKind, datespec, 0, nil)) key := datastore.NewKey(c, activityKind, post.Id, 0, nil) keys = append(keys, datekey) src = append(src, &DatespecEntity{Id: key}) keys = append(keys, key) src = append(src, &ActivityEntity{ Id: post.Id, Published: post.Published, Post: data}) } _, err := datastore.PutMulti(c, keys, src) if err != nil { c.Errorf("put error:%#v", err) } }
// import all of the dishes from jsonData func (self *importer) importDishes() { // get the previously listed dishes // build an index by name prevDishesByImportId := self.indexItems(self.NewQuery("Dish"), &Dish{}, func(key *datastore.Key, item interface{}) string { return item.(*Dish).Id }) // lists for dishes being written count := len(self.jsonData.Dishes) putItems := make([]interface{}, 0, count) putKeys := make([]*datastore.Key, 0, count) putIds := make([]string, 0, count) // prepare all the dishes for index, _ := range self.jsonData.Dishes { i := &self.jsonData.Dishes[index] id := i.Id key := self.restoreKey(id, self.lid) if key.Incomplete() { // check if we have an item of the same name already if ikey, ok := prevDishesByImportId[id]; ok { self.fixUpKeys[id] = ikey key = ikey } } putItems = append(putItems, i) putKeys = append(putKeys, key) putIds = append(putIds, id) } // put all the dishes outKeys, err := datastore.PutMulti(self.c, putKeys, putItems) check(err) // update the fixUpKeys for any new items for index, putKey := range putKeys { if putKey.Incomplete() { self.fixUpKeys[putIds[index]] = outKeys[index] } else { self.dirtyCacheEntries = append(self.dirtyCacheEntries, "/dish/"+putKey.Encode()) self.dirtyCacheEntries = append(self.dirtyCacheEntries, "/dish/"+putKey.Encode()+"/tags/") self.dirtyCacheEntries = append(self.dirtyCacheEntries, "/dish/"+putKey.Encode()+"/keywords/") self.dirtyCacheEntries = append(self.dirtyCacheEntries, "/dish/"+putKey.Encode()+"/pairing/") self.dirtyCacheEntries = append(self.dirtyCacheEntries, "/dish/"+putKey.Encode()+"/mi/") } } // add tags self.importTags(putIds, outKeys) // update keywords for index, _ := range putItems { dish := putItems[index].(*Dish) words := make(map[string]bool) addWords(dish.Name, words) addWords(dish.Source, words) for tag, _ := range self.allTags[outKeys[index].Encode()] { addWords(tag, words) } updateKeywords(self.c, outKeys[index], words) } }
func PutMemo(c appengine.Context, username string, m Memo) (err error) { memos := []Memo{m} var keys = []*datastore.Key{ datastore.NewIncompleteKey(c, "Memo", UserMemoKey(c, username))} _, err = datastore.PutMulti(c, keys, memos) return }
// PutMulti is a batch version of Put. // // src must satisfy the same conditions as the dst argument to GetMulti. func PutMulti(c appengine.Context, key []*datastore.Key, src interface{}) ([]*datastore.Key, error) { if Debug { c.Debugf("writing to datastore: %#v", src) } key, errd := datastore.PutMulti(c, key, src) memcache.DeleteMulti(c, encodeKeys(key)) return key, errd }
// Sort pages func SortPages(slugs []string) { pages := make([]Page, 0) q := datastore.NewQuery("Page") keys, _ := q.GetAll(c, &pages) for i := 0; i < len(keys); i++ { pages[i].Position = LookFor(slugs, pages[i].Name) + 1 } datastore.PutMulti(c, keys, pages) }
func createAll(c appengine.Context, tags ...*Tag) { keys := make([]*datastore.Key, len(tags)) for i, tag := range tags { appx.NewKeyResolver(c).Resolve(tag) keys[i] = tag.Key() } datastore.PutMulti(c, keys, tags) time.Sleep(2 * time.Second) }
// PutKeys is a helper function the performs a PutMulti on the set of // keys and values. If a failure occured, false is returned and a // response was returned to the request. This case should be terminal. func PutKeys(c appengine.Context, w http.ResponseWriter, r *http.Request, keys []*datastore.Key, values interface{}) bool { if _, err := datastore.PutMulti(c, keys, values); err != nil { LogAndUnexpected(c, w, r, err) return false } return true }
func (db *DocDB) PutMulti(ids []string, docs interface{}) ErrorSlice { if len(ids) == 0 { return nil } keys := make([]*datastore.Key, len(ids)) for i, id := range ids { keys[i] = datastore.NewKey(db.c, db.kind, id, 0, nil) } _, err := datastore.PutMulti(db.c, keys, docs) return ErrorSliceFromError(err, len(ids)) }
func (session *Session) UploadComments() (err error) { c, err := session.AppEngineContext() if err != nil { return err } keys := []*datastore.Key{} comments := make([]Comment, 0) fmt.Printf("Comments: %d\n", len(session.Comments)) total := 0 for number, comment := range session.Comments { comments = append(comments, comment) keys = append(keys, datastore.NewKey(c, "Comment", "", number, nil)) if len(comments) == 100 { _, err = datastore.PutMulti(c, keys, comments) if err != nil { fmt.Printf("ERROR: %+v\n", err) } else { total = total + len(comments) fmt.Printf("Put %d (%d) OK\n", len(comments), total) } keys = []*datastore.Key{} comments = make([]Comment, 0) } } if len(comments) > 0 { _, err = datastore.PutMulti(c, keys, comments) if err != nil { fmt.Printf("ERROR: %+v\n", err) } else { total = total + len(comments) fmt.Printf("Put %d (%d) OK\n", len(comments), total) } } return }
func (session *Session) UploadRadars() (err error) { c, err := session.AppEngineContext() if err != nil { return err } keys := []*datastore.Key{} radars := make([]Radar, 0) fmt.Printf("Radars: %d\n", len(session.Radars)) total := 0 for number, radar := range session.Radars { radars = append(radars, radar) keys = append(keys, datastore.NewKey(c, "Radar", "", number, nil)) if len(radars) == 100 { _, err = datastore.PutMulti(c, keys, radars) if err != nil { fmt.Printf("ERROR: %+v\n", err) } else { total = total + len(radars) fmt.Printf("Put %d (%d) OK\n", len(radars), total) } keys = []*datastore.Key{} radars = make([]Radar, 0) } } if len(radars) > 0 { _, err = datastore.PutMulti(c, keys, radars) if err != nil { fmt.Printf("ERROR: %+v\n", err) } else { total = total + len(radars) fmt.Printf("Put %d (%d) OK\n", len(radars), total) } } return }
// processTaskRTTCGPut processes a taskqueue task for the putting of new // ClientGroups into datastore. func processTaskRTTCGPut(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) // Get memcache key to use from POST parameters dataKey := r.FormValue(rtt.FormKeyPutKey) var data []rtt.ClientGroup _, err := memcache.Gob.Get(c, dataKey, &data) if err != nil { // Don't return HTTP error since nothing can be done if data // is missing or corrupt. Just log to GAE to see how often this // happens. c.Errorf("handlers.processTaskRTTCGPut:memcache.Get: %s", err) return } // Create lists of keys to use in datastore.PutMulti parentKey := rtt.DatastoreParentKey(c) keys := make([]*datastore.Key, 0, len(data)) var key *datastore.Key for _, cg := range data { key = datastore.NewKey(c, "ClientGroup", net.IP(cg.Prefix).String(), 0, parentKey) keys = append(keys, key) } // Put data into datastore _, err = datastore.PutMulti(c, keys, data) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) c.Errorf("handlers.processTaskRTTCGPut:datastore.PutMulti: %s", err) return } // Remove cached CGs if err := memcache.Delete(c, dataKey); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) c.Errorf("handlers.processTaskRTTCGPut:memcache.Delte: %s", err) return } dateStr := r.FormValue(rtt.FormKeyImportDate) c.Infof("handlers: %d ClientGroups were successfully put into datastore. (%s)", len(data), dateStr) // Get which date this import is for t, err := time.Parse(rtt.DateFormat, dateStr) if err != nil { // Don't return HTTP error since incorrect date cannot be fixed. c.Errorf("handlers.processTaskRTTCGPut:time.Parse: %s", err) return } rtt.UpdateLastSuccessfulImportDate(c, t) }
// import all measured ingredients fro jsonData //jsonData.MeasuredIngredients map[string][]MeasuredIngredient func (self *importer) importMeasuredIngredients() { // index existing items by their parent dish and the ingredient // they reference miKeyFunc := func(key *datastore.Key, item interface{}) string { return key.Parent().Encode() + item.(*MeasuredIngredient).Ingredient.Encode() } prevMIs := self.indexItems(self.NewQuery("MeasuredIngredient"), &MeasuredIngredient{}, miKeyFunc) // slices of items to be written count := len(self.jsonData.MeasuredIngredients) putItems := make([]interface{}, 0, count) putKeys := make([]*datastore.Key, 0, count) for dishId, jsonMis := range self.jsonData.MeasuredIngredients { dishKey := self.restoreKey(dishId, self.lid) dishKeyEncoded := dishKey.Encode() for index, _ := range jsonMis { jsonMi := &jsonMis[index] // restore the key for the measure ingredient AND the ingredient reference miKey := self.restoreKey(jsonMi.Id, dishKey) ingKey := self.restoreKey(jsonMi.Ingredient.Encode(), self.lid) // if we didn't import the ingredient, we need to skip this one if ingKey.Incomplete() { continue } // if we don't have an entry yet, check if we already have // a reference to this ingredient for this dish if miKey.Incomplete() { miIndexKey := dishKeyEncoded + ingKey.Encode() if existingKey, found := prevMIs[miIndexKey]; found { miKey = existingKey } } jsonMi.Ingredient = ingKey jsonMi.Id = "" putItems = append(putItems, jsonMi) putKeys = append(putKeys, miKey) } } if len(putKeys) > 0 { _, err := datastore.PutMulti(self.c, putKeys, putItems) check(err) // any modified entries need to be cleared from the cache for _, putKey := range putKeys { if !putKey.Incomplete() { self.dirtyCacheEntries = append(self.dirtyCacheEntries, "/dish/"+putKey.Parent().Encode()+"/mi/"+putKey.Encode()) } } } }
// registerSite puts a Site and corresponding SliverTools in the datastore func registerSite(c appengine.Context, site *data.Site) ([]*datastore.Key, error) { key := datastore.NewKey(c, "Site", site.SiteID, 0, nil) site.When = time.Now() _, err := datastore.Put(c, key, site) if err != nil { return nil, err } q := datastore.NewQuery("Tool") var tools []*data.Tool _, err = q.GetAll(c, &tools) if err != nil { return nil, err } sliverTools := make([]*data.SliverTool, len(tools)*numServers) slKeys := make([]*datastore.Key, len(tools)*numServers) i := 0 for _, tool := range tools { for _, serverID := range serverIDs { sliverToolID := data.GetSliverToolID(tool.ToolID, tool.SliceID, serverID, site.SiteID) sliceParts := strings.Split(tool.SliceID, "_") sliverTool := &data.SliverTool{ ToolID: tool.ToolID, SliceID: tool.SliceID, SiteID: site.SiteID, ServerID: serverID, FQDN: fmt.Sprintf("%s.%s.%s.%s.%s", sliceParts[1], sliceParts[0], serverID, site.SiteID, "measurement-lab.org"), ServerPort: "", HTTPPort: tool.HTTPPort, SliverIPv4: "off", SliverIPv6: "off", UpdateRequestTimestamp: site.RegistrationTimestamp, StatusIPv4: "offline", StatusIPv6: "offline", Latitude: site.Latitude, Longitude: site.Longitude, City: site.City, Country: site.Country, When: time.Now(), } slKey := datastore.NewKey(c, "SliverTool", sliverToolID, 0, nil) slKeys[i] = slKey sliverTools[i] = sliverTool i++ } } return datastore.PutMulti(c, slKeys, sliverTools) }
// import all dish pairings //jsonData.Pairings map[string][]Pairing func (self *importer) importPairings() { // index existing items by their parent dish and the ingredient // they reference pairingKeyFunc := func(key *datastore.Key, item interface{}) string { return key.Parent().Encode() + item.(*Pairing).Other.Encode() + item.(*Pairing).Description } prevPairings := self.indexItems(self.NewQuery("Pairing"), &Pairing{}, pairingKeyFunc) // slices of items to be added count := len(self.jsonData.Pairings) putItems := make([]interface{}, 0, count) putKeys := make([]*datastore.Key, 0, count) // walk each pairing finding if we need to add the pairing given for dishId, jsonPairings := range self.jsonData.Pairings { dishKey := self.restoreKey(dishId, self.lid) dishKeyEncoded := dishKey.Encode() for index, _ := range jsonPairings { jsonPairing := &jsonPairings[index] // restore our own key and the reference key pairingKey := self.restoreKey(jsonPairing.Id, dishKey) otherKey := self.restoreKey(jsonPairing.Other.Encode(), self.lid) if otherKey.Incomplete() { // if we didn't import the referenced item // we have to skip this one continue } pairingIndexKey := dishKeyEncoded + otherKey.Encode() + jsonPairing.Description // add the new pairing only if it wasn't already present if _, found := prevPairings[pairingIndexKey]; !found { jsonPairing.Other = otherKey jsonPairing.Id = "" putItems = append(putItems, jsonPairing) putKeys = append(putKeys, pairingKey) } } } // store the new pairings if len(putKeys) > 0 { _, err := datastore.PutMulti(self.c, putKeys, putItems) check(err) // any modified entries need to be cleared from the cache for _, putKey := range putKeys { if !putKey.Incomplete() { self.dirtyCacheEntries = append(self.dirtyCacheEntries, "/dish/"+putKey.Parent().Encode()+"/pairing/"+putKey.Encode()) } } } }
// Store multiple models, calling SetKey after generating the key (if it // doesn't have one already) func PutModels(ctx appengine.Context, modelSlice []models.Model) error { keys := make([]*datastore.Key, len(modelSlice)) var err error for index, model := range modelSlice { keys[index], err = model.Key() if err == models.NoKeyLoaded { keys[index] = datastore.NewIncompleteKey(ctx, model.Kind(), nil) } } newKeys, err := datastore.PutMulti(ctx, keys, modelSlice) for index, model := range modelSlice { model.SetKey(newKeys[index]) } return err }
func rehasher(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) var items []*Item ks, err := datastore.NewQuery("item").GetAll(c, &items) if err != nil { handleError(w, err) return } _, err = datastore.PutMulti(c, ks, items) if err != nil { handleError(w, err) return } fmt.Fprintln(w, "OK!") }
// import all menus from jsonData func (self *importer) importMenus() { menuKeyFunc := func(key *datastore.Key, item interface{}) string { return item.(*Menu).Name } // index existing items by their name prevMenus := self.indexItems(self.NewQuery("Menu"), &Menu{}, menuKeyFunc) count := len(self.jsonData.Menus) // slices of menu items to be stored putItems := make([]interface{}, 0, count) putKeys := make([]*datastore.Key, 0, count) // walk each menu for index, _ := range self.jsonData.Menus { jsonMenu := &self.jsonData.Menus[index] // get the key to store to key := self.restoreKey(jsonMenu.Id, self.lid) if key.Incomplete() { // check if we already have a menu by this name if existingKey, found := prevMenus[jsonMenu.Name]; found { key = existingKey } } // walk the dishes, keeping only the ones we can reference properly newDishes := make([]*datastore.Key, 0, len(jsonMenu.Dishes)) for _, dishKey := range jsonMenu.Dishes { destKey := self.restoreKey(dishKey.Encode(), self.lid) if !destKey.Incomplete() { newDishes = append(newDishes, destKey) } } // add this menu to the list to be added jsonMenu.Dishes = newDishes jsonMenu.Id = "" putItems = append(putItems, jsonMenu) putKeys = append(putKeys, key) } // store the menus and clear the cache if len(putKeys) > 0 { _, err := datastore.PutMulti(self.c, putKeys, putItems) check(err) // any modified entries need to be cleared from the cache for _, putKey := range putKeys { if !putKey.Incomplete() { self.dirtyCacheEntries = append(self.dirtyCacheEntries, "/menu/"+"/menu/"+putKey.Encode()) } } } }
func updateCountsHandler(w http.ResponseWriter, r *http.Request) { c := aeContext(r) client := urlfetch.Client(c) twitterChans := make([]chan int, len(shareUrls)) plusOnesChans := make([]chan int, len(shareUrls)) plusCommentsChans := make([]chan int, len(shareUrls)) for i, shareUrl := range shareUrls { twitterChans[i] = fetchTwitterCountAsync(client, shareUrl) plusOnesChans[i] = fetchPlusOnesAsync(client, shareUrl) plusCommentsChans[i] = fetchPlusCommentsAsync(client, shareUrl) } fbCounts := <-fetchFacebookCountsAsync(client, shareUrls) shareKeys := make([]*datastore.Key, len(shareUrls)) for i, shareUrl := range shareUrls { shareKeys[i] = datastore.NewKey(c, "Share", shareUrl, 0, nil) } shares := make([]Share, len(shareUrls)) if err := datastore.GetMulti(c, shareKeys, shares); err != nil { log.Printf("GetMulti failed: %v", err.Error()) } for i, shareUrl := range shareUrls { shares[i].Url = shareUrl shares[i].PlusOnes = max(shares[i].PlusOnes, <-plusOnesChans[i]) shares[i].PlusComments = max(shares[i].PlusComments, <-plusCommentsChans[i]) shares[i].TwitterCount = max(shares[i].TwitterCount, <-twitterChans[i]) shares[i].FacebookCount = max(shares[i].FacebookCount, fbCounts[i]) } var page Page page.Path = "/page/counts.json" page.ContentType = "application/json" page.Payload, _ = json.Marshal(shares) err := func() error { if _, err := datastore.PutMulti(c, shareKeys, shares); err != nil { return err } pageKey := datastore.NewKey(c, "Page", page.Path, 0, nil) if _, err := datastore.Put(c, pageKey, &page); err != nil { return err } return nil }() if err != nil { log.Println(err.Error()) http.Error(w, "FAIL", http.StatusInternalServerError) return } fmt.Fprintln(w, "OK") }
func SavePlayers(c appengine.Context, s *Season, players []Player) []*datastore.Key { keys := make([]*datastore.Key, len(players)) var name, year string if s != nil { name = s.Name year = s.Year } for index, player := range players { keys[index] = PlayerKey(c, name, year, player.Name) } keys, err := datastore.PutMulti(c, keys, players) if err != nil { panic(err) } return keys }