// Should return users that are level 3 or above func TestIndexAuthors(t *testing.T) { ctx, done, err := aetest.NewContext() if err != nil { t.Fatal(err) } defer done() key := []*datastore.Key{ datastore.NewKey(ctx, "User", "dranton", 0, nil), datastore.NewKey(ctx, "User", "testertim", 0, nil), datastore.NewKey(ctx, "User", "rookierook", 0, nil), } seededUsers := []*models.User{ &models.User{FirstName: "Anton", Level: 5}, &models.User{FirstName: "Tim", Level: 3}, &models.User{FirstName: "Rookie", Level: 1}, } if _, err := datastore.PutMulti(ctx, key, seededUsers); err != nil { t.Fatal(err) } time.Sleep(300 * time.Millisecond) var authors []models.User authors, err = repositories.GetUsersAboveLevel(ctx, 3) if err != nil { t.Errorf("Error: %v", err) } if len(authors) != 2 { t.Errorf("Got %d authors, want %d", len(authors), 2) } if authors[0].Username != "testertim" { t.Errorf("Got username %d, want %d", authors[1].Username, "testertim") } }
//WriteLinkTweet writes a given Tweet to the datastore func (mb MapBuilder) writeLinkTweet(tweets <-chan anaconda.Tweet, wg *sync.WaitGroup) { defer wg.Done() var keys []*datastore.Key var values []*int64 for tweet := range tweets { key := datastore.NewIncompleteKey(mb.c, linkTweetKind, getTweetKey(mb.c)) keys = append(keys, key) values = append(values, &tweet.Id) } err := datastore.RunInTransaction(mb.c, func(c context.Context) error { _, err := datastore.PutMulti(c, keys, values) if err != nil { log.Errorf(c, "Failed to write LinkTweet to datastore. %v", err.Error()) return err } return nil }, nil) if err != nil { log.Errorf(mb.c, "Failed to write LinkTweet to datastore. %v", err.Error()) } }
// 2015-11-06 to force field EditSummary (even if empty) on every IdiomHistory persisted entity. func resaveAllIdiomHistory(c context.Context) error { defer memcache.Flush(c) saved := 0 keys, err := datastore.NewQuery("IdiomHistory").KeysOnly().GetAll(c, nil) if err != nil { return err } nbEntities := len(keys) defer func() { log.Infof(c, "Resaved %d IdiomHistory entities out of %d.", saved, nbEntities) }() for len(keys) > 0 { bunch := 100 if len(keys) < bunch { bunch = len(keys) } histories := make([]*IdiomHistory, bunch) err := datastore.GetMulti(c, keys[:bunch], histories) if err != nil { return err } _, err = datastore.PutMulti(c, keys[:bunch], histories) if err != nil { return err } saved += bunch // Remove processed keys keys = keys[bunch:] } return nil }
// PutMulti wraps datastore.PutMulti func (d *Driver) PutMulti(key []*datastore.Key, src interface{}) ([]*datastore.Key, error) { var keyLen = len(key) var fromIdx, toIdx int var v = reflect.ValueOf(src) var resultKeys = make([]*datastore.Key, 0) // TODO: split multiple goroutine for { fromIdx = toIdx toIdx = fromIdx + d.PutSplitThreshold if toIdx > keyLen { toIdx = keyLen } _keys := key[fromIdx:toIdx] _data := v.Slice(fromIdx, toIdx).Interface() d.logOps(opRead, len(_keys), "PutMulti") updatedKeys, err := datastore.PutMulti(d.ctx, _keys, _data) if err != nil { return resultKeys, err } resultKeys = append(resultKeys, updatedKeys...) if toIdx == keyLen { break } } return resultKeys, nil }
func savePostsToDB(c context.Context, posts []reddit.Post, key string) error { var keys []*datastore.Key for _, element := range posts { keys = append(keys, datastore.NewKey(c, key, element.Title, 0, getParentKey(c, key))) } _, err := datastore.PutMulti(c, keys, posts) return err }
func adminAutoPay(w http.ResponseWriter, r *http.Request) { now := time.Now() c := appengine.NewContext(r) q := datastore.NewQuery("Task"). Filter("Disabled =", false). Filter("Automatic = ", true). Filter("Next < ", now) tasks := []*Task{} if err := fillKeyQuery(c, q, &tasks); err != nil { log.Warningf(c, "Error finding automatic things: %v", err) w.WriteHeader(500) return } if len(tasks) == 0 { log.Infof(c, "No automatic tasks.") w.WriteHeader(204) return } storeKeys := make([]*datastore.Key, 0, 2*len(tasks)) vals := []interface{}{} for i := range tasks { log.Infof(c, "Recording automatic task %q for %v at %s", tasks[i].Name, tasks[i].Assignee, moneyFmt(tasks[i].Value)) su, err := getUserByEmail(c, tasks[i].Assignee) if err != nil { log.Warningf(c, "Failed to look up user %v: %v", tasks[i].Assignee, err) w.WriteHeader(500) return } tasks[i].updateTime() tasks[i].Prev = now storeKeys = append(storeKeys, tasks[i].Key) vals = append(vals, tasks[i]) storeKeys = append(storeKeys, datastore.NewIncompleteKey(c, "LoggedTask", nil)) vals = append(vals, &LoggedTask{ Task: tasks[i].Key, User: su.Key, Completed: now, Who: su.Name, Name: tasks[i].Name, Amount: tasks[i].Value, }) } if _, err := datastore.PutMulti(c, storeKeys, vals); err != nil { panic(err) } w.WriteHeader(204) }
// storeNextSessions saves IDs of items under kindNext entity kind, // keyed by "sessionID:eventSession.Update". func storeNextSessions(c context.Context, items []*eventSession) error { pkey := nextSessionParent(c) keys := make([]*datastore.Key, len(items)) for i, s := range items { id := s.ID + ":" + s.Update keys[i] = datastore.NewKey(c, kindNext, id, 0, pkey) } zeros := make([]struct{}, len(keys)) _, err := datastore.PutMulti(c, keys, zeros) return err }
func example() { // [START batch] // A batch put. _, err = datastore.PutMulti(ctx, []*datastore.Key{k1, k2, k3}, []interface{}{e1, e2, e3}) // A batch get. var entities = make([]*T, 3) err = datastore.GetMulti(ctx, []*datastore.Key{k1, k2, k3}, entities) // A batch delete. err = datastore.DeleteMulti(ctx, []*datastore.Key{k1, k2, k3}) // [END batch] _ = err }
func (d rdsImpl) PutMulti(keys []ds.Key, vals []ds.PropertyMap, cb ds.PutMultiCB) error { rkeys := dsMF2R(keys) rvals := make([]datastore.PropertyLoadSaver, len(vals)) for i, val := range vals { rvals[i] = &typeFilter{val} } rkeys, err := datastore.PutMulti(d, rkeys, rvals) return idxCallbacker(err, len(keys), func(idx int, err error) { k := ds.Key(nil) if err == nil { k = dsR2F(rkeys[idx]) } cb(k, err) }) }
// Load reads the JSON representation of entities from the io.Reader "r", // and stores them in the Datastore using the given context.Context. // The Options parameter allows you to configure how the dump will work. // If there is any parsing erros, improper format, or datastore failures // during the process, that error is returned and processing stops. The // error may be returned after some entities were loaded: there is no // parsing cache. func Load(c context.Context, r io.Reader, o *Options) error { entities, err := DecodeEntities(c, r) if err != nil { return err } if len(entities) == 0 { log.Infof(c, "Skipping load of 0 entities") return nil } batchSize := o.BatchSize if batchSize <= 0 { batchSize = 50 } for start, end := 0, 0; start < len(entities); { end += batchSize if end > len(entities) { end = len(entities) } keys := make([]*datastore.Key, 0, end-start) values := make([]datastore.PropertyList, 0, cap(keys)) for _, e := range entities[start:end] { keys = append(keys, e.Key) values = append(values, e.Properties) } keys, err = datastore.PutMulti(c, keys, values) if err != nil { return err } log.Infof(c, "Loaded %d entities ...", len(keys)) if o.GetAfterPut { log.Infof(c, "Making a read to force consistency ...") l := make([]Entity, len(keys)) err := datastore.GetMulti(c, keys, l) if err != nil { return err } } start = end } return nil }
func (a *GaeDatastoreAccessor) saveAppConfig(c context.Context, appConfig ApplicationConfig) error { keys := make([]*datastore.Key, len(appConfig.Toggles)) properties := make([]*AppConfigProperty, len(appConfig.Toggles)) i := 0 for name, value := range appConfig.Toggles { prop := AppConfigProperty{ AppConfigId: 0, // TODO: meaningful appConfigId Name: name, Value: value, } keystr := fmt.Sprintf("%d_%s", prop.AppConfigId, prop.Name) keys[i] = datastore.NewKey(c, "AppConfigProperty", keystr, 0, nil) properties[i] = &prop i++ } _, err := datastore.PutMulti(c, keys, properties) return err }
func adminMarkPaid(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) action := r.FormValue("action") keys := make([]*datastore.Key, 0, len(r.Form["pay"])) for _, s := range r.Form["pay"] { k, err := datastore.DecodeKey(s) if err != nil { panic(err) } keys = append(keys, k) } if action == "Mark Paid" { tasks := make([]LoggedTask, len(keys)) err := datastore.GetMulti(c, keys, tasks) if err != nil { panic(err) } now := time.Now().UTC() for i := range tasks { tasks[i].Paid = true tasks[i].PaidTime = now } _, err = datastore.PutMulti(c, keys, tasks) if err != nil { panic(err) } } else if action == "Delete" { err := datastore.DeleteMulti(c, keys) if err != nil { panic(err) } } else { panic("Unhandled action: " + action) } http.Redirect(w, r, "/admin/", 303) }
func (c *DSContext) PutMultiTeams(teams []*Team) error { keys := make([]*datastore.Key, len(teams)) for i, m := range teams { if m.ID == 0 { keys[i] = datastore.NewIncompleteKey(c, KindTeam, nil) } else { keys[i] = datastore.NewKey(c, KindTeam, "", m.ID, nil) } } keys, err := datastore.PutMulti(c, keys, teams) if err != nil { return err } for i := range teams { teams[i].ID = keys[i].IntID() } return nil }
func (c *DSContext) PutMultiLeagues(leagues []*League) error { keys := make([]*datastore.Key, len(leagues)) for i, m := range leagues { if m.ID == 0 { keys[i] = datastore.NewIncompleteKey(c, KindLeague, nil) } else { keys[i] = datastore.NewKey(c, KindLeague, "", m.ID, nil) } } keys, err := datastore.PutMulti(c, keys, leagues) if err != nil { return err } for i := range leagues { leagues[i].ID = keys[i].IntID() } return nil }
func (c *DSContext) PutMultiMatches(matches []*Match) error { keys := make([]*datastore.Key, len(matches)) for i, m := range matches { if m.ID == 0 { keys[i] = datastore.NewIncompleteKey(c, KindMatch, nil) } else { keys[i] = datastore.NewKey(c, KindMatch, "", m.ID, nil) } } keys, err := datastore.PutMulti(c, keys, matches) if err != nil { return err } for i := range matches { matches[i].ID = keys[i].IntID() } return nil }
func (a *GaeDatastoreAccessor) getMessagesForUser(c context.Context, username string) ([]*datastore.Key, []*MessageForUser, error) { var dateZero time.Time q := datastore.NewQuery("MessageForUser"). Filter("Username ="******"DismissalDate =", dateZero) messages := make([]*MessageForUser, 0) keys, err := q.GetAll(c, &messages) // Mark as seen now := time.Now() for _, msg := range messages { msg.LastViewDate = now if msg.FirstViewDate == dateZero { msg.FirstViewDate = now } } _, err = datastore.PutMulti(c, keys, messages) if err != nil { log.Warningf(c, "Could not save messages view dates: %v", err) } return keys, messages, err }
func handleRemove(w http.ResponseWriter, r *http.Request, apiKey APIKey) *appError { if r.Method != "DELETE" { return &appError{nil, fmt.Sprintf("Invalid request method: %s", r.Method), 401} } c := appengine.NewContext(r) strChatID := r.FormValue("chatID") rmPath := r.FormValue("path") if rmPath == "" { return &appError{nil, "Missing path.", 401} } var fbChatID int64 = -1 var chatKey *datastore.Key var err error if strChatID != "" { fbChatID, err = strconv.ParseInt(strChatID, 10, 64) if err != nil { return &appError{nil, "Bad chat ID", 400} } chatKeys, err := datastore.NewQuery("Chat").Filter("FacebookChatID =", fbChatID). KeysOnly().GetAll(c, nil) if err != nil { return &appError{err, "Datastore error: " + err.Error(), 400} } else if len(chatKeys) == 0 { return &appError{nil, "Bad chat ID", 400} } chatKey = chatKeys[0] } else { chatKey = nil } deleted := make([]Link, 0) keysToRemove, err := datastore.NewQuery("Link"). Filter("Path =", rmPath).Filter("ChatKey =", chatKey).GetAll(c, &deleted) if len(keysToRemove) != 0 { newKeys := make([]*datastore.Key, len(keysToRemove)) for i := range keysToRemove { newKeys[i] = datastore.NewIncompleteKey(c, "DeletedLink", nil) } err = datastore.RunInTransaction(c, func(tc context.Context) (err error) { _, err = datastore.PutMulti(c, newKeys, deleted) if err != nil { return } err = datastore.DeleteMulti(c, keysToRemove) return }, nil) } var resp RemoveResponse if err != nil { resp = RemoveResponse{ false, 0, err.Error(), } } else { resp = RemoveResponse{ true, len(keysToRemove), "", } } respJSON, _ := json.Marshal(resp) w.Write(respJSON) return nil }
// ReplaceAuthDB updates database with given AuthDBSnapshot if it is new. // The first return value indicates database was updated, // and the second one has the latest AuthReplicationState. func ReplaceAuthDB(c context.Context, newData AuthDBSnapshot) (bool, *AuthReplicationState, error) { var stat *AuthReplicationState updated := false err := datastore.RunInTransaction(c, func(c context.Context) error { curstat := &AuthReplicationState{} if err := GetReplicationState(c, curstat); err != nil { return err } if newData.ReplicationState.PrimaryID != curstat.PrimaryID { return fmt.Errorf("primary id mismatch. incoming=%q; want=%q", newData.ReplicationState.PrimaryID, curstat.PrimaryID) } // database is already up-to-date. if curstat.AuthDBRev >= newData.ReplicationState.AuthDBRev { stat = curstat return nil } dbsnap := &AuthDBSnapshot{} if err := currentAuthDBSnapshot(c, dbsnap); err != nil { return err } var newEntities []entity var delKeys []*datastore.Key // Going to update database. if !reflect.DeepEqual(newData.GlobalConfig, dbsnap.GlobalConfig) { newEntities = append(newEntities, newData.GlobalConfig) } newGrps, delGrKeys := groupsDiff(dbsnap.Groups, newData.Groups) newEntities = append(newEntities, newGrps...) delKeys = append(delKeys, delGrKeys...) newWls, delWlKeys := whitelistsDiff(dbsnap.IPWhitelists, newData.IPWhitelists) newEntities = append(newEntities, newWls...) delKeys = append(delKeys, delWlKeys...) if !reflect.DeepEqual(newData.IPWhitelistAssignments, dbsnap.IPWhitelistAssignments) { newEntities = append(newEntities, newData.IPWhitelistAssignments) } curstat.AuthDBRev = newData.ReplicationState.AuthDBRev curstat.ModifiedTimestamp = newData.ReplicationState.ModifiedTimestamp var wg sync.WaitGroup ch := make(chan error, 3) wg.Add(3) go func() { defer wg.Done() if _, err := datastore.Put(c, ReplicationStateKey(c), curstat); err != nil { ch <- err return } ch <- nil }() go func() { defer wg.Done() keys := make([]*datastore.Key, 0, len(newEntities)) for _, n := range newEntities { keys = append(keys, n.key()) } if _, err := datastore.PutMulti(c, keys, newEntities); err != nil { ch <- err return } ch <- nil }() go func() { defer wg.Done() if err := datastore.DeleteMulti(c, delKeys); err != nil { ch <- err return } ch <- nil }() go func() { wg.Wait() close(ch) }() for err := range ch { if err != nil { return err } } stat = curstat updated = true return nil }, &datastore.TransactionOptions{ XG: true, }) if err != nil { return false, nil, err } return updated, stat, nil }
func TestPutGetDelete(t *testing.T) { c, closeFunc := NewContext(t) defer closeFunc() type testEntity struct { IntVal int } // Check we set memcahce, put datastore and delete memcache. seq := make(chan string, 3) nds.SetMemcacheSetMulti(func(c context.Context, items []*memcache.Item) error { seq <- "memcache.SetMulti" return memcache.SetMulti(c, items) }) nds.SetDatastorePutMulti(func(c context.Context, keys []*datastore.Key, vals interface{}) ([]*datastore.Key, error) { seq <- "datastore.PutMulti" return datastore.PutMulti(c, keys, vals) }) nds.SetMemcacheDeleteMulti(func(c context.Context, keys []string) error { seq <- "memcache.DeleteMulti" close(seq) return memcache.DeleteMulti(c, keys) }) incompleteKey := datastore.NewIncompleteKey(c, "Entity", nil) key, err := nds.Put(c, incompleteKey, &testEntity{43}) if err != nil { t.Fatal(err) } nds.SetMemcacheSetMulti(memcache.SetMulti) nds.SetDatastorePutMulti(datastore.PutMulti) nds.SetMemcacheDeleteMulti(memcache.DeleteMulti) if s := <-seq; s != "memcache.SetMulti" { t.Fatal("memcache.SetMulti not", s) } if s := <-seq; s != "datastore.PutMulti" { t.Fatal("datastore.PutMulti not", s) } if s := <-seq; s != "memcache.DeleteMulti" { t.Fatal("memcache.DeleteMulti not", s) } // Check chan is closed. <-seq if key.Incomplete() { t.Fatal("Key is incomplete") } te := &testEntity{} if err := nds.Get(c, key, te); err != nil { t.Fatal(err) } if te.IntVal != 43 { t.Fatal("te.Val != 43", te.IntVal) } // Get from cache. te = &testEntity{} if err := nds.Get(c, key, te); err != nil { t.Fatal(err) } if te.IntVal != 43 { t.Fatal("te.Val != 43", te.IntVal) } // Change value. if _, err := nds.Put(c, key, &testEntity{64}); err != nil { t.Fatal(err) } // Get from cache. te = &testEntity{} if err := nds.Get(c, key, te); err != nil { t.Fatal(err) } if te.IntVal != 64 { t.Fatal("te.Val != 64", te.IntVal) } if err := nds.Delete(c, key); err != nil { t.Fatal(err) } if err := nds.Get(c, key, &testEntity{}); err != datastore.ErrNoSuchEntity { t.Fatal("expected datastore.ErrNoSuchEntity") } }
//I need to implement a token on this form func csvimport(w http.ResponseWriter, r *http.Request) *appError { c := appengine.NewContext(r) log.Infof(c, "method: ", r.Method) if r.Method != "POST" { return &appError{ errors.New("Unsupported method call to import"), "Imports most be POSTed", http.StatusMethodNotAllowed, } } //this block for check the user's credentials should eventually be broken out into a filter u := user.Current(c) if u == nil { url, err := user.LoginURL(c, r.URL.String()) if err != nil { return &appError{err, "Could not determine LoginURL", http.StatusInternalServerError} } w.Header().Set("Location", url) w.WriteHeader(http.StatusFound) return nil } //some crappy security so that only a certain person can upload things //we should probably have a users entity in datastore that we manage manually for this kinda thing if u.Email != "*****@*****.**" { return &appError{ errors.New("Illegal import attempted by " + u.Email), "Your user is not allowed to import", http.StatusForbidden, } } //r.ParseMultipartForm(1 << 10) file, handler, err := r.FormFile("uploadfile") if err != nil { return &appError{err, "Error uploading file", http.StatusInternalServerError} } defer file.Close() log.Infof(c, "New import file: %s ", handler.Filename) cr := csv.NewReader(file) var res []*Resource var keys []*datastore.Key //at the moment we always insert a new item, this should be an insert or update based on OrganizationName //if we get a large enough data set we'll need to implement two loops so that we only batch a certain number of records at a time for { rec, err := cr.Read() if err == io.EOF { break } if err != nil { return &appError{err, "Error reading file", http.StatusInternalServerError} } //if the first row has column headers then skip to the next one if strings.ToLower(strings.Trim(rec[1], " ")) == "category" { continue } //Search for this Resource by OrganizationName q := datastore.NewQuery("Resource").Filter("organizationname =", rec[2]).KeysOnly().Limit(2) tmpKey := datastore.NewIncompleteKey(c, "Resource", nil) if tmpKeys, err := q.GetAll(c, nil); len(tmpKeys) == 1 && err == nil { tmpKey = tmpKeys[0] } //we may want IDs in there eventually //_, err = strconv.ParseInt(rec[0], 2, 64) tmp := &Resource{ Category: rec[1], //getSliceFromString(rec[1]), OrganizationName: rec[2], Address: rec[3], ZipCode: rec[4], Days: GetDays(rec[5:8]), TimeOpenClose: GetTimes(rec[5:8], c), PeopleServed: getSliceFromString(rec[8]), Description: rec[9], PhoneNumber: rec[10], LastUpdatedBy: u.Email, LastUpdatedTime: time.Now().UTC(), IsActive: true, Location: appengine.GeoPoint{}, } //log.Infof(c, "len slice check: %x, len rec LatLng check: %x, check for comma: %x", len(rec) > 11, len(rec[11]) > 0, strings.Index(rec[11], ",") != -1) if len(rec) > 11 && len(rec[11]) > 0 && strings.Index(rec[11], ",") != -1 { tmp.Location.Lng, _ = strconv.ParseFloat(strings.Split(rec[11], ",")[0], 64) tmp.Location.Lat, _ = strconv.ParseFloat(strings.Split(rec[11], ",")[1], 64) //log.Println(tmp.Location) } res = append(res, tmp) keys = append(keys, tmpKey) } _, err = datastore.PutMulti(c, keys, res) if err != nil { log.Debugf(c, err.Error()) http.Error(w, err.Error(), http.StatusInternalServerError) return &appError{err, "Error updating database", http.StatusInternalServerError} } // clear the cache memcache.Flush(c) http.Redirect(w, r, "/index.html", http.StatusFound) return nil }
func createSampleEntities(c context.Context, size int) error { buff := make([]Entity, 0, 10) keys := make([]*datastore.Key, 0, 10) for i := 1; i <= size; i++ { k := datastore.NewKey(c, "User", "", int64(i), nil) e := Entity{Key: k} e.Add(datastore.Property{Name: "Title", Value: lorem.Sentence(5, 10)}) e.Add(datastore.Property{ Name: "SubTitle", Value: lorem.Sentence(3, 5), NoIndex: true, }) e.Add(datastore.Property{ Name: "Description", Value: lorem.Paragraph(3, 5), NoIndex: true, }) e.Add(datastore.Property{Name: "Size", Value: int64(32)}) for j := 0; j < 5; j++ { e.Add(datastore.Property{ Name: "Tags", Value: lorem.Word(5, 10), Multiple: true, }) } e.Add(datastore.Property{Name: "Price", Value: float64(123.45)}) for j := 0; j < 10; j++ { e.Add(datastore.Property{ Name: "PriceHistory", Value: float64(123.45) - float64(j), Multiple: true, }) } e.Add(datastore.Property{Name: "Favicon", Value: icon, NoIndex: true}) e.Add(datastore.Property{Name: "FaviconSource", Value: blobKey}) for j := 1; j <= 3; j++ { e.Add(datastore.Property{ Name: "Friends", Value: datastore.NewKey(c, "Friend", "", int64(j), k), Multiple: true, }) } buff = append(buff, e) keys = append(keys, k) if len(buff) == 10 { _, err := datastore.PutMulti(c, keys, buff) if err != nil { return err } _ = datastore.GetMulti(c, keys, buff) buff = make([]Entity, 0, 10) keys = make([]*datastore.Key, 0, 10) } } if len(buff) > 0 { k, err := datastore.PutMulti(c, keys, buff) if err != nil { return err } _ = datastore.GetMulti(c, k, buff) } return nil }
// PutMulti is a batch version of Put. // // src must be a *[]S, *[]*S, *[]I, []S, []*S, or []I, for some struct type S, // or some interface type I. If *[]I or []I, each element must be a struct pointer. func (g *Goon) PutMulti(src interface{}) ([]*datastore.Key, error) { keys, err := g.extractKeys(src, true) // allow incomplete keys on a Put request if err != nil { return nil, err } var memkeys []string for _, key := range keys { if !key.Incomplete() { memkeys = append(memkeys, memkey(key)) } } // Memcache needs to be updated after the datastore to prevent a common race condition, // where a concurrent request will fetch the not-yet-updated data from the datastore // and populate memcache with it. if g.inTransaction { for _, mk := range memkeys { g.toDeleteMC[mk] = true } } else { defer memcache.DeleteMulti(g.Context, memkeys) } v := reflect.Indirect(reflect.ValueOf(src)) multiErr, any := make(appengine.MultiError, len(keys)), false goroutines := (len(keys)-1)/putMultiLimit + 1 var wg sync.WaitGroup wg.Add(goroutines) for i := 0; i < goroutines; i++ { go func(i int) { defer wg.Done() lo := i * putMultiLimit hi := (i + 1) * putMultiLimit if hi > len(keys) { hi = len(keys) } rkeys, pmerr := datastore.PutMulti(g.Context, keys[lo:hi], v.Slice(lo, hi).Interface()) if pmerr != nil { any = true // this flag tells PutMulti to return multiErr later merr, ok := pmerr.(appengine.MultiError) if !ok { g.error(pmerr) for j := lo; j < hi; j++ { multiErr[j] = pmerr } return } copy(multiErr[lo:hi], merr) } for i, key := range keys[lo:hi] { if multiErr[lo+i] != nil { continue // there was an error writing this value, go to next } vi := v.Index(lo + i).Interface() if key.Incomplete() { g.setStructKey(vi, rkeys[i]) keys[i] = rkeys[i] } if g.inTransaction { mk := memkey(rkeys[i]) delete(g.toDelete, mk) g.toSet[mk] = vi } else { g.putMemory(vi) } } }(i) } wg.Wait() if any { return keys, realError(multiErr) } return keys, nil }
func serveComplete(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) u := user.Current(c) su, err := getUser(c, u) if err != nil { w.WriteHeader(400) fmt.Fprintf(w, "You are not permitted, %v", u) return } if err := r.ParseForm(); err != nil { http.Error(w, err.Error(), 400) fmt.Fprintf(w, "Can't parse form from %v", u) return } taskIds := []*datastore.Key{} for _, s := range r.Form["task"] { k, err := datastore.DecodeKey(s) if err != nil { panic(err) } taskIds = append(taskIds, k) } log.Infof(c, "Doing tasks for %v: %v", su, taskIds) tasks := make([]Task, len(taskIds)) err = datastore.GetMulti(c, taskIds, tasks) if err != nil { panic(err) } now := time.Now() storeKeys := make([]*datastore.Key, 0, 2*len(taskIds)) vals := []interface{}{} for i := range tasks { if tasks[i].Next.Before(now) { tasks[i].updateTime() tasks[i].Prev = now storeKeys = append(storeKeys, taskIds[i]) vals = append(vals, &tasks[i]) storeKeys = append(storeKeys, datastore.NewIncompleteKey(c, "LoggedTask", nil)) vals = append(vals, &LoggedTask{ Task: taskIds[i], User: su.Key, Completed: now, Who: su.Name, Name: tasks[i].Name, Amount: tasks[i].Value, }) } } log.Infof(c, "Putting %#v in %v", vals, storeKeys) _, err = datastore.PutMulti(c, storeKeys, vals) if err != nil { http.Error(w, err.Error(), 500) log.Errorf(c, "Error saving stuff: %v", err) return } http.Redirect(w, r, "/", http.StatusTemporaryRedirect) }
func adminRepairHistoryVersions(w http.ResponseWriter, r *http.Request) error { c := appengine.NewContext(r) defer memcache.Flush(c) idiomIDStr := r.FormValue("idiomId") if idiomIDStr == "" { return PiError{"Mandatory param: idiomId", http.StatusBadRequest} } idiomID := String2Int(idiomIDStr) // Warning: fetching the whole history of 1 idiom // may have quite a big memory footprint log.Infof(c, "Repairing versions for idiom: %v", idiomID) q := datastore.NewQuery("IdiomHistory"). Filter("Id =", idiomID). Order("VersionDate") histories := make([]*IdiomHistory, 0) historyKeys, err := q.GetAll(c, &histories) if err != nil { return err } for i := range histories[1:] { if histories[i].VersionDate.After(histories[i+1].VersionDate) { return PiError{ErrorText: "History items not well sorted", Code: 500} } } for i := range histories { histories[i].Version = 1 + i } lastVersion := len(histories) log.Infof(c, "\tSaving %v history entities.", len(histories)) for len(historyKeys) > 0 { bunch := 10 if len(historyKeys) < 10 { bunch = len(historyKeys) } _, err = datastore.PutMulti(c, historyKeys[:bunch], histories[:bunch]) if err != nil { return err } // Remove processed items historyKeys = historyKeys[bunch:] histories = histories[bunch:] } var idiom Idiom idiomKey := newIdiomKey(c, idiomID) err = datastore.Get(c, idiomKey, &idiom) if err != nil { return err } if idiom.Version == lastVersion { log.Infof(c, "\tIdiom version %v already clean", idiom.Version) } else { log.Infof(c, "\tFixing idiom version %v -> %v", idiom.Version, lastVersion) idiom.Version = lastVersion _, err = datastore.Put(c, idiomKey, &idiom) if err != nil { return err } } w.Header().Set("Content-Type", "application/json") fmt.Fprintln(w, Response{"success": true, "message": "History repaired for idiom " + idiomIDStr}) return nil }