func TestRaceNonConflictingPuts(t *testing.T) { t.Parallel() ds := datastore.Get(Use(context.Background())) num := int32(0) wg := sync.WaitGroup{} for i := 0; i < 100; i++ { wg.Add(1) go func() { defer wg.Done() err := ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) return ds.Put(pmap( "$kind", "Thing", Next, "Value", 100)) }, nil) if err != nil { t.Fatal("error during transaction", err) } atomic.AddInt32(&num, 1) }() } wg.Wait() if num != 100 { t.Fatal("expected 100 runs, got", num) } }
// SetGlobalEnable is a convenience function for manipulating the GlobalConfig. // // It's meant to be called from admin handlers on your app to turn dscache // functionality on or off in emergencies. func SetGlobalEnable(c context.Context, memcacheEnabled bool) error { // always go to the default namespace c, err := info.Get(c).Namespace("") if err != nil { return err } return datastore.Get(c).RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) cfg := &GlobalConfig{Enable: true} if err := ds.Get(cfg); err != nil && err != datastore.ErrNoSuchEntity { return err } if cfg.Enable == memcacheEnabled { return nil } cfg.Enable = memcacheEnabled if memcacheEnabled { // when going false -> true, wipe memcache. if err := memcache.Get(c).Flush(); err != nil { return err } } return ds.Put(cfg) }, nil) }
func TestRaceGetPut(t *testing.T) { t.Parallel() value := int32(0) num := int32(0) ds := datastore.Get(Use(context.Background())) wg := sync.WaitGroup{} for i := 0; i < 100; i++ { wg.Add(1) go func() { defer wg.Done() err := ds.RunInTransaction(func(c context.Context) error { atomic.AddInt32(&num, 1) ds := datastore.Get(c) obj := pmap("$key", ds.MakeKey("Obj", 1)) if err := ds.Get(obj); err != nil && err != datastore.ErrNoSuchEntity { t.Fatal("error get", err) } cur := int64(0) if ps, ok := obj["Value"]; ok { cur = ps[0].Value().(int64) } cur++ obj["Value"] = []datastore.Property{prop(cur)} return ds.Put(obj) }, &datastore.TransactionOptions{Attempts: 200}) if err != nil { t.Fatal("error during transaction", err) } atomic.AddInt32(&value, 1) }() } wg.Wait() obj := pmap("$key", ds.MakeKey("Obj", 1)) if ds.Get(obj) != nil { t.FailNow() } t.Logf("Ran %d inner functions", num) if int64(value) != obj["Value"][0].Value().(int64) { t.Fatalf("value wrong value %d v %d", value, obj["Value"][0].Value().(int64)) } }
func TestBrokenFeatures(t *testing.T) { t.Parallel() e := errors.New("default err") Convey("BrokenFeatures", t, func() { c := memory.Use(context.Background()) Convey("Can break ds", func() { Convey("without a default", func() { c, bf := FilterRDS(c, nil) ds := datastore.Get(c) vals := []datastore.PropertyMap{{ "$key": {datastore.MkPropertyNI(ds.NewKey("Wut", "", 1, nil))}, }} Convey("by specifying an error", func() { bf.BreakFeatures(e, "GetMulti", "PutMulti") So(ds.GetMulti(vals), ShouldEqual, e) Convey("and you can unbreak them as well", func() { bf.UnbreakFeatures("GetMulti") So(errors.SingleError(ds.GetMulti(vals)), ShouldEqual, datastore.ErrNoSuchEntity) Convey("no broken features at all is a shortcut", func() { bf.UnbreakFeatures("PutMulti") So(errors.SingleError(ds.GetMulti(vals)), ShouldEqual, datastore.ErrNoSuchEntity) }) }) }) Convey("Not specifying an error gets you a generic error", func() { bf.BreakFeatures(nil, "GetMulti") So(ds.GetMulti(vals).Error(), ShouldContainSubstring, `feature "GetMulti" is broken`) }) }) Convey("with a default", func() { c, bf := FilterRDS(c, e) ds := datastore.Get(c) vals := []datastore.PropertyMap{{ "$key": {datastore.MkPropertyNI(ds.NewKey("Wut", "", 1, nil))}, }} bf.BreakFeatures(nil, "GetMulti") So(ds.GetMulti(vals), ShouldEqual, e) }) }) }) }
func ExampleFilterRDS() { // Set up your context using a base service implementation (memory or prod) c := memory.Use(context.Background()) // Apply the counter.FilterRDS c, counter := FilterRDS(c) // functions use ds from the context like normal... they don't need to know // that there are any filters at all. someCalledFunc := func(c context.Context) { ds := datastore.Get(c) vals := []datastore.PropertyMap{{ "FieldName": {datastore.MkProperty(100)}, "$key": {datastore.MkProperty(ds.NewKey("Kind", "", 1, nil))}}, } if err := ds.PutMulti(vals); err != nil { panic(err) } } // Using the other function. someCalledFunc(c) someCalledFunc(c) // Then we can see what happened! fmt.Printf("%d\n", counter.PutMulti.Successes()) // Output: // 2 }
func mkds(data []*Foo) (under, over *count.DSCounter, ds datastore.Interface) { c := memory.UseWithAppID(context.Background(), "something~else") ds = datastore.Get(c) _, err := ds.AllocateIDs(ds.KeyForObj(data[0]), 100) if err != nil { panic(err) } if err := ds.PutMulti(data); err != nil { panic(err) } c, under = count.FilterRDS(c) c = FilterRDS(c) c, over = count.FilterRDS(c) ds = datastore.Get(c) return }
func testGetMeta(c context.Context, k *dsS.Key) int64 { ds := dsS.Get(c) mg := &MetaGroup{Parent: k.Root()} if err := ds.Get(mg); err != nil { panic(err) } return mg.Version }
func withTxnBuf(ctx context.Context, cb func(context.Context) error, opts *datastore.TransactionOptions) error { inf := info.Get(ctx) ns := inf.GetNamespace() parentState, _ := ctx.Value(dsTxnBufParent).(*txnBufState) roots := stringset.New(0) rootLimit := 1 if opts != nil && opts.XG { rootLimit = XGTransactionGroupLimit } sizeBudget, writeCountBudget := DefaultSizeBudget, DefaultWriteCountBudget if parentState != nil { // TODO(riannucci): this is a bit wonky since it means that a child // transaction declaring XG=true will only get to modify 25 groups IF // they're same groups affected by the parent transactions. So instead of // respecting opts.XG for inner transactions, we just dup everything from // the parent transaction. roots = parentState.roots.Dup() rootLimit = parentState.rootLimit sizeBudget = parentState.sizeBudget - parentState.entState.total writeCountBudget = parentState.writeCountBudget - parentState.entState.numWrites() } bufDS, err := memory.NewDatastore(inf.FullyQualifiedAppID(), ns) if err != nil { return err } state := &txnBufState{ entState: &sizeTracker{}, bufDS: bufDS.Raw(), roots: roots, rootLimit: rootLimit, ns: ns, aid: inf.AppID(), parentDS: datastore.Get(context.WithValue(ctx, dsTxnBufHaveLock, true)).Raw(), sizeBudget: sizeBudget, writeCountBudget: writeCountBudget, } if err = cb(context.WithValue(ctx, dsTxnBufParent, state)); err != nil { return err } // no reason to unlock this ever. At this point it's toast. state.Lock() if parentState == nil { return commitToReal(state) } if err = parentState.canApplyLocked(state); err != nil { return err } parentState.commitLocked(state) return nil }
func TestRace(t *testing.T) { t.Parallel() c := FilterRDS(memory.Use(context.Background())) ds := datastore.Get(c) wg := sync.WaitGroup{} for i := 0; i < 100; i++ { id := int64(i + 1) wg.Add(1) go func() { defer wg.Done() err := ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) for i := 0; i < 100; i++ { err := ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) ctr := &Counter{ID: id} if err := ds.Get(ctr); err != nil && err != datastore.ErrNoSuchEntity { t.Fatal("bad Get", err) } ctr.Value++ return ds.Put(ctr) }, nil) if err != nil { t.Fatal("bad inner RIT", err) } } return nil }, nil) if err != nil { t.Fatal("bad outer RIT", err) } }() } wg.Wait() }
func TestCompoundIndexes(t *testing.T) { t.Parallel() idxKey := func(def dsS.IndexDefinition) string { So(def, ShouldNotBeNil) return "idx::" + string(serialize.ToBytes(*def.PrepForIdxTable())) } numItms := func(c *memCollection) uint64 { ret, _ := c.GetTotals() return ret } Convey("Test Compound indexes", t, func() { type Model struct { ID int64 `gae:"$id"` Field1 []string Field2 []int64 } c := Use(context.Background()) ds := dsS.Get(c) t := ds.Testable().(*dsImpl) head := t.data.head So(ds.Put(&Model{1, []string{"hello", "world"}, []int64{10, 11}}), ShouldBeNil) idx := dsS.IndexDefinition{ Kind: "Model", SortBy: []dsS.IndexColumn{ {Property: "Field2"}, }, } coll := head.GetCollection(idxKey(idx)) So(coll, ShouldNotBeNil) So(numItms(coll), ShouldEqual, 2) idx.SortBy[0].Property = "Field1" coll = head.GetCollection(idxKey(idx)) So(coll, ShouldNotBeNil) So(numItms(coll), ShouldEqual, 2) idx.SortBy = append(idx.SortBy, dsS.IndexColumn{Property: "Field1"}) So(head.GetCollection(idxKey(idx)), ShouldBeNil) t.AddIndexes(&idx) coll = head.GetCollection(idxKey(idx)) So(coll, ShouldNotBeNil) So(numItms(coll), ShouldEqual, 4) }) }
// NewDatastore creates a new standalone memory implementation of the datastore, // suitable for embedding for doing in-memory data organization. // // It's configured by default with the following settings: // * AutoIndex(true) // * Consistent(true) // * DisableSpecialEntities(true) // // These settings can of course be changed by using the Testable() interface. func NewDatastore(aid, ns string) (ds.Interface, error) { ctx := UseWithAppID(context.Background(), aid) ctx, err := info.Get(ctx).Namespace(ns) if err != nil { return nil, err } ret := ds.Get(ctx) t := ret.Testable() t.AutoIndex(true) t.Consistent(true) t.DisableSpecialEntities(true) return ret, nil }
// High level test for regression in how zero time is stored, // see https://codereview.chromium.org/1334043003/ func TestDefaultTimeField(t *testing.T) { t.Parallel() Convey("Default time.Time{} can be stored", t, func() { type Model struct { ID int64 `gae:"$id"` Time time.Time } ds := dsS.Get(Use(context.Background())) m := Model{ID: 1} So(ds.Put(&m), ShouldBeNil) // Reset to something non zero to ensure zero is fetched. m.Time = time.Now().UTC() So(ds.Get(&m), ShouldBeNil) So(m.Time.IsZero(), ShouldBeTrue) }) }
// IsGloballyEnabled checks to see if this filter is enabled globally. // // This checks InstanceEnabledStatic, as well as polls the datastore entity // /dscache,1 (a GlobalConfig instance) // Once every GlobalEnabledCheckInterval. // // For correctness, any error encountered returns true. If this assumed false, // then Put operations might incorrectly invalidate the cache. func IsGloballyEnabled(c context.Context) bool { if !InstanceEnabledStatic { return false } now := clock.Now(c) globalEnabledLock.RLock() nextCheck := globalEnabledNextCheck enabledVal := globalEnabled globalEnabledLock.RUnlock() if now.Before(nextCheck) { return enabledVal } globalEnabledLock.Lock() defer globalEnabledLock.Unlock() // just in case we raced if now.Before(globalEnabledNextCheck) { return globalEnabled } // always go to the default namespace c, err := info.Get(c).Namespace("") if err != nil { return true } cfg := &GlobalConfig{Enable: true} if err := datastore.Get(c).Get(cfg); err != nil && err != datastore.ErrNoSuchEntity { return true } globalEnabled = cfg.Enable globalEnabledNextCheck = now.Add(GlobalEnabledCheckInterval) return globalEnabled }
func TestContextAccess(t *testing.T) { t.Parallel() // p is a function which recovers an error and then immediately panics with // the contained string. It's defer'd in each test so that we can use the // ShouldPanicWith assertion (which does an == comparison and not // a reflect.DeepEquals comparison). p := func() { panic(recover().(error).Error()) } Convey("Context Access", t, func() { c := context.Background() Convey("blank", func() { So(dsS.GetRaw(c), ShouldBeNil) So(mcS.GetRaw(c), ShouldBeNil) So(tqS.GetRaw(c), ShouldBeNil) So(infoS.Get(c), ShouldBeNil) }) // needed for everything else c = infoS.Set(c, Info()) Convey("Info", func() { So(infoS.Get(c), ShouldNotBeNil) So(func() { defer p() infoS.Get(c).Datacenter() }, ShouldPanicWith, "dummy: method Info.Datacenter is not implemented") }) Convey("Datastore", func() { c = dsS.SetRaw(c, Datastore()) So(dsS.Get(c), ShouldNotBeNil) So(func() { defer p() _, _ = dsS.Get(c).DecodeCursor("wut") }, ShouldPanicWith, "dummy: method Datastore.DecodeCursor is not implemented") }) Convey("Memcache", func() { c = mcS.SetRaw(c, Memcache()) So(mcS.Get(c), ShouldNotBeNil) So(func() { defer p() _ = mcS.Get(c).Add(nil) }, ShouldPanicWith, "dummy: method Memcache.AddMulti is not implemented") }) Convey("TaskQueue", func() { c = tqS.SetRaw(c, TaskQueue()) So(tqS.Get(c), ShouldNotBeNil) So(func() { defer p() _ = tqS.Get(c).Purge("") }, ShouldPanicWith, "dummy: method TaskQueue.Purge is not implemented") }) Convey("User", func() { c = userS.Set(c, User()) So(userS.Get(c), ShouldNotBeNil) So(func() { defer p() _ = userS.Get(c).IsAdmin() }, ShouldPanicWith, "dummy: method User.IsAdmin is not implemented") }) Convey("Mail", func() { c = mailS.Set(c, Mail()) So(mailS.Get(c), ShouldNotBeNil) So(func() { defer p() _ = mailS.Get(c).Send(nil) }, ShouldPanicWith, "dummy: method Mail.Send is not implemented") }) Convey("Module", func() { c = modS.Set(c, Module()) So(modS.Get(c), ShouldNotBeNil) So(func() { defer p() modS.Get(c).List() }, ShouldPanicWith, "dummy: method Module.List is not implemented") }) }) }
func TestQueryExecution(t *testing.T) { t.Parallel() Convey("Test query execution", t, func() { c, err := info.Get(Use(context.Background())).Namespace("ns") if err != nil { panic(err) } So(info.Get(c).FullyQualifiedAppID(), ShouldEqual, "dev~app") So(info.Get(c).GetNamespace(), ShouldEqual, "ns") data := ds.Get(c) testing := data.Testable() for _, tc := range queryExecutionTests { Convey(tc.name, func() { for i, stage := range tc.test { // outside of Convey, since these must always happen testing.CatchupIndexes() testing.AddIndexes(stage.addIdxs...) if err := data.PutMulti(stage.putEnts); err != nil { // prevent Convey from thinking this assertion should show up in // every test loop. panic(err) } if err := data.DeleteMulti(stage.delEnts); err != nil { panic(err) } Convey(fmt.Sprintf("stage %d", i), func() { for j, expect := range stage.expect { runner := func(f func(ic context.Context) error, _ *ds.TransactionOptions) error { return f(c) } if expect.inTxn { runner = data.RunInTransaction } if expect.count == 0 { if len(expect.keys) > 0 { expect.count = len(expect.keys) } else { expect.count = len(expect.get) } } if expect.keys != nil { Convey(fmt.Sprintf("expect %d (keys)", j), func() { err := runner(func(c context.Context) error { data := ds.Get(c) count, err := data.Count(expect.q) So(err, ShouldBeNil) So(count, ShouldEqual, expect.count) rslt := []*ds.Key(nil) So(data.GetAll(expect.q, &rslt), ShouldBeNil) So(len(rslt), ShouldEqual, len(expect.keys)) for i, r := range rslt { So(r, ShouldResemble, expect.keys[i]) } return nil }, &ds.TransactionOptions{XG: true}) So(err, ShouldBeNil) }) } if expect.get != nil { Convey(fmt.Sprintf("expect %d (data)", j), func() { err := runner(func(c context.Context) error { data := ds.Get(c) count, err := data.Count(expect.q) So(err, ShouldBeNil) So(count, ShouldEqual, expect.count) rslt := []ds.PropertyMap(nil) So(data.GetAll(expect.q, &rslt), ShouldBeNil) So(len(rslt), ShouldEqual, len(expect.get)) for i, r := range rslt { So(r, ShouldResemble, expect.get[i]) } return nil }, &ds.TransactionOptions{XG: true}) So(err, ShouldBeNil) }) } } for j, fn := range stage.extraFns { Convey(fmt.Sprintf("extraFn %d", j), func() { fn(c) }) } }) } }) } }) Convey("Test AutoIndex", t, func() { c, err := info.Get(Use(context.Background())).Namespace("ns") if err != nil { panic(err) } data := ds.Get(c) testing := data.Testable() testing.Consistent(true) So(data.Put(pmap("$key", key("Kind", 1), Next, "Val", 1, 2, 3, Next, "Extra", "hello", )), ShouldBeNil) So(data.Put(pmap("$key", key("Kind", 2), Next, "Val", 2, 3, 9, Next, "Extra", "ace", "hello", "there", )), ShouldBeNil) q := nq("Kind").Gt("Val", 2).Order("Val", "Extra") count, err := data.Count(q) So(err, ShouldErrLike, "Insufficient indexes") testing.AutoIndex(true) count, err = data.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 2) }) }
func TestQuerySupport(t *testing.T) { t.Parallel() Convey("Queries", t, func() { Convey("Good", func() { q := datastore.NewQuery("Foo").Ancestor(root) Convey("normal", func() { _, _, ds := mkds(dataSingleRoot) ds.Testable().AddIndexes(&datastore.IndexDefinition{ Kind: "Foo", Ancestor: true, SortBy: []datastore.IndexColumn{ {Property: "Value"}, }, }) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) q = q.Lt("Value", 400000000000000000) vals := []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 8) count, err := ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 8) f := &Foo{ID: 1, Parent: root} So(ds.Get(f), ShouldBeNil) f.Value = append(f.Value, 100) So(ds.Put(f), ShouldBeNil) // Wowee, zowee, merged queries! vals2 := []*Foo{} So(ds.GetAll(q, &vals2), ShouldBeNil) So(len(vals2), ShouldEqual, 9) So(vals2[0], ShouldResemble, f) vals2 = []*Foo{} So(ds.GetAll(q.Limit(2).Offset(1), &vals2), ShouldBeNil) So(len(vals2), ShouldEqual, 2) So(vals2, ShouldResemble, vals[:2]) return nil }, nil), ShouldBeNil) }) Convey("keysOnly", func() { _, _, ds := mkds([]*Foo{ {ID: 2, Parent: root, Value: []int64{1, 2, 3, 4, 5, 6, 7}}, {ID: 3, Parent: root, Value: []int64{3, 4, 5, 6, 7, 8, 9}}, {ID: 4, Parent: root, Value: []int64{3, 5, 7, 9, 11, 100, 1}}, {ID: 5, Parent: root, Value: []int64{1, 70, 101}}, }) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) q = q.Eq("Value", 1).KeysOnly(true) vals := []*datastore.Key{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 3) So(vals[2], ShouldResemble, ds.MakeKey("Parent", 1, "Foo", 5)) // can remove keys So(ds.Delete(ds.MakeKey("Parent", 1, "Foo", 2)), ShouldBeNil) vals = []*datastore.Key{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 2) // and add new ones So(ds.Put(&Foo{ID: 1, Parent: root, Value: []int64{1, 7, 100}}), ShouldBeNil) So(ds.Put(&Foo{ID: 7, Parent: root, Value: []int64{20, 1}}), ShouldBeNil) vals = []*datastore.Key{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 4) So(vals[0].IntID(), ShouldEqual, 1) So(vals[1].IntID(), ShouldEqual, 4) So(vals[2].IntID(), ShouldEqual, 5) So(vals[3].IntID(), ShouldEqual, 7) return nil }, nil), ShouldBeNil) }) Convey("project", func() { _, _, ds := mkds([]*Foo{ {ID: 2, Parent: root, Value: []int64{1, 2, 3, 4, 5, 6, 7}}, {ID: 3, Parent: root, Value: []int64{3, 4, 5, 6, 7, 8, 9}}, {ID: 4, Parent: root, Value: []int64{3, 5, 7, 9, 11, 100, 1}}, {ID: 5, Parent: root, Value: []int64{1, 70, 101}}, }) ds.Testable().AddIndexes(&datastore.IndexDefinition{ Kind: "Foo", Ancestor: true, SortBy: []datastore.IndexColumn{ {Property: "Value"}, }, }) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) count, err := ds.Count(q.Project("Value")) So(err, ShouldBeNil) So(count, ShouldEqual, 24) q = q.Project("Value").Offset(4).Limit(10) vals := []datastore.PropertyMap{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 10) expect := []struct { id int64 val int64 }{ {2, 3}, {3, 3}, {4, 3}, {2, 4}, {3, 4}, {2, 5}, {3, 5}, {4, 5}, {2, 6}, {3, 6}, } for i, pm := range vals { So(datastore.GetMetaDefault(pm, "key", nil), ShouldResemble, ds.MakeKey("Parent", 1, "Foo", expect[i].id)) So(pm["Value"][0].Value(), ShouldEqual, expect[i].val) } // should remove 4 entries, but there are plenty more to fill So(ds.Delete(ds.MakeKey("Parent", 1, "Foo", 2)), ShouldBeNil) vals = []datastore.PropertyMap{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 10) expect = []struct { id int64 val int64 }{ // note (3, 3) and (4, 3) are correctly missing because deleting // 2 removed two entries which are hidden by the Offset(4). {3, 4}, {3, 5}, {4, 5}, {3, 6}, {3, 7}, {4, 7}, {3, 8}, {3, 9}, {4, 9}, {4, 11}, } for i, pm := range vals { So(datastore.GetMetaDefault(pm, "key", nil), ShouldResemble, ds.MakeKey("Parent", 1, "Foo", expect[i].id)) So(pm["Value"][0].Value(), ShouldEqual, expect[i].val) } So(ds.Put(&Foo{ID: 1, Parent: root, Value: []int64{3, 9}}), ShouldBeNil) vals = []datastore.PropertyMap{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 10) expect = []struct { id int64 val int64 }{ // 'invisible' {1, 3} entry bumps the {4, 3} into view. {4, 3}, {3, 4}, {3, 5}, {4, 5}, {3, 6}, {3, 7}, {4, 7}, {3, 8}, {1, 9}, {3, 9}, {4, 9}, } for i, pm := range vals { So(datastore.GetMetaDefault(pm, "key", nil), ShouldResemble, ds.MakeKey("Parent", 1, "Foo", expect[i].id)) So(pm["Value"][0].Value(), ShouldEqual, expect[i].val) } return nil }, nil), ShouldBeNil) }) Convey("project+distinct", func() { _, _, ds := mkds([]*Foo{ {ID: 2, Parent: root, Value: []int64{1, 2, 3, 4, 5, 6, 7}}, {ID: 3, Parent: root, Value: []int64{3, 4, 5, 6, 7, 8, 9}}, {ID: 4, Parent: root, Value: []int64{3, 5, 7, 9, 11, 100, 1}}, {ID: 5, Parent: root, Value: []int64{1, 70, 101}}, }) ds.Testable().AddIndexes(&datastore.IndexDefinition{ Kind: "Foo", Ancestor: true, SortBy: []datastore.IndexColumn{ {Property: "Value"}, }, }) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) q = q.Project("Value").Distinct(true) vals := []datastore.PropertyMap{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 13) expect := []struct { id int64 val int64 }{ {2, 1}, {2, 2}, {2, 3}, {2, 4}, {2, 5}, {2, 6}, {2, 7}, {3, 8}, {3, 9}, {4, 11}, {5, 70}, {4, 100}, {5, 101}, } for i, pm := range vals { So(pm["Value"][0].Value(), ShouldEqual, expect[i].val) So(datastore.GetMetaDefault(pm, "key", nil), ShouldResemble, ds.MakeKey("Parent", 1, "Foo", expect[i].id)) } return nil }, nil), ShouldBeNil) }) Convey("overwrite", func() { data := []*Foo{ {ID: 2, Parent: root, Value: []int64{1, 2, 3, 4, 5, 6, 7}}, {ID: 3, Parent: root, Value: []int64{3, 4, 5, 6, 7, 8, 9}}, {ID: 4, Parent: root, Value: []int64{3, 5, 7, 9, 11, 100, 1, 2}}, {ID: 5, Parent: root, Value: []int64{1, 70, 101}}, } _, _, ds := mkds(data) q = q.Eq("Value", 2, 3) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) vals := []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 2) So(vals[0], ShouldResemble, data[0]) So(vals[1], ShouldResemble, data[2]) foo2 := &Foo{ID: 2, Parent: root, Value: []int64{2, 3}} So(ds.Put(foo2), ShouldBeNil) vals = []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 2) So(vals[0], ShouldResemble, foo2) So(vals[1], ShouldResemble, data[2]) foo1 := &Foo{ID: 1, Parent: root, Value: []int64{2, 3}} So(ds.Put(foo1), ShouldBeNil) vals = []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 3) So(vals[0], ShouldResemble, foo1) So(vals[1], ShouldResemble, foo2) So(vals[2], ShouldResemble, data[2]) return nil }, nil), ShouldBeNil) }) projectData := []*Foo{ {ID: 2, Parent: root, Value: []int64{1, 2, 3, 4, 5, 6, 7}, Sort: []string{"x", "z"}}, {ID: 3, Parent: root, Value: []int64{3, 4, 5, 6, 7, 8, 9}, Sort: []string{"b"}}, {ID: 4, Parent: root, Value: []int64{3, 5, 7, 9, 11, 100, 1, 2}, Sort: []string{"aa", "a"}}, {ID: 5, Parent: root, Value: []int64{1, 70, 101}, Sort: []string{"c"}}, } Convey("project+extra orders", func() { _, _, ds := mkds(projectData) ds.Testable().AddIndexes(&datastore.IndexDefinition{ Kind: "Foo", Ancestor: true, SortBy: []datastore.IndexColumn{ {Property: "Sort", Descending: true}, {Property: "Value", Descending: true}, }, }) q = q.Project("Value").Order("-Sort", "-Value").Distinct(true) So(ds.RunInTransaction(func(c context.Context) error { ds = datastore.Get(c) So(ds.Put(&Foo{ ID: 1, Parent: root, Value: []int64{0, 1, 1000}, Sort: []string{"zz"}}), ShouldBeNil) vals := []datastore.PropertyMap{} So(ds.GetAll(q, &vals), ShouldBeNil) expect := []struct { id int64 val int64 }{ {1, 1000}, {1, 1}, {1, 0}, {2, 7}, {2, 6}, {2, 5}, {2, 4}, {2, 3}, {2, 2}, {5, 101}, {5, 70}, {3, 9}, {3, 8}, {4, 100}, {4, 11}, } for i, pm := range vals { So(pm["Value"][0].Value(), ShouldEqual, expect[i].val) So(datastore.GetMetaDefault(pm, "key", nil), ShouldResemble, ds.MakeKey("Parent", 1, "Foo", expect[i].id)) } return nil }, nil), ShouldBeNil) }) Convey("buffered entity sorts before ineq, but after first parent entity", func() { // If we got this wrong, we'd see Foo,3 come before Foo,2. This might // happen because we calculate the comparison string for each entity // based on the whole entity, but we forgot to limit the comparison // string generation by the inequality criteria. data := []*Foo{ {ID: 2, Parent: root, Value: []int64{2, 3, 5, 6}, Sort: []string{"z"}}, } _, _, ds := mkds(data) ds.Testable().AddIndexes(&datastore.IndexDefinition{ Kind: "Foo", Ancestor: true, SortBy: []datastore.IndexColumn{ {Property: "Value"}, }, }) q = q.Gt("Value", 2).Limit(2) So(ds.RunInTransaction(func(c context.Context) error { ds = datastore.Get(c) foo1 := &Foo{ID: 3, Parent: root, Value: []int64{0, 2, 3, 4}} So(ds.Put(foo1), ShouldBeNil) vals := []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 2) So(vals[0], ShouldResemble, data[0]) So(vals[1], ShouldResemble, foo1) return nil }, nil), ShouldBeNil) }) Convey("keysOnly+extra orders", func() { _, _, ds := mkds(projectData) ds.Testable().AddIndexes(&datastore.IndexDefinition{ Kind: "Foo", Ancestor: true, SortBy: []datastore.IndexColumn{ {Property: "Sort"}, }, }) q = q.Order("Sort").KeysOnly(true) So(ds.RunInTransaction(func(c context.Context) error { ds = datastore.Get(c) So(ds.Put(&Foo{ ID: 1, Parent: root, Value: []int64{0, 1, 1000}, Sort: []string{"x", "zz"}}), ShouldBeNil) So(ds.Put(&Foo{ ID: 2, Parent: root, Value: []int64{0, 1, 1000}, Sort: []string{"zz", "zzz", "zzzz"}}), ShouldBeNil) vals := []*datastore.Key{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 5) So(vals, ShouldResemble, []*datastore.Key{ ds.MakeKey("Parent", 1, "Foo", 4), ds.MakeKey("Parent", 1, "Foo", 3), ds.MakeKey("Parent", 1, "Foo", 5), ds.MakeKey("Parent", 1, "Foo", 1), ds.MakeKey("Parent", 1, "Foo", 2), }) return nil }, nil), ShouldBeNil) }) Convey("query accross nested transactions", func() { _, _, ds := mkds(projectData) q = q.Eq("Value", 2, 3) foo1 := &Foo{ID: 1, Parent: root, Value: []int64{2, 3}} foo7 := &Foo{ID: 7, Parent: root, Value: []int64{2, 3}} So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(ds.Put(foo1), ShouldBeNil) vals := []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(vals, ShouldResemble, []*Foo{foo1, projectData[0], projectData[2]}) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) vals := []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(vals, ShouldResemble, []*Foo{foo1, projectData[0], projectData[2]}) So(ds.Delete(ds.MakeKey("Parent", 1, "Foo", 4)), ShouldBeNil) So(ds.Put(foo7), ShouldBeNil) vals = []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(vals, ShouldResemble, []*Foo{foo1, projectData[0], foo7}) return nil }, nil), ShouldBeNil) vals = []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(vals, ShouldResemble, []*Foo{foo1, projectData[0], foo7}) return nil }, nil), ShouldBeNil) vals := []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(vals, ShouldResemble, []*Foo{foo1, projectData[0], foo7}) }) Convey("start transaction from inside query", func() { _, _, ds := mkds(projectData) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) q := datastore.NewQuery("Foo").Ancestor(root) return ds.Run(q, func(pm datastore.PropertyMap) { So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) pm["Value"] = append(pm["Value"], datastore.MkProperty("wat")) return ds.Put(pm) }, nil), ShouldBeNil) }) }, &datastore.TransactionOptions{XG: true}), ShouldBeNil) So(ds.Run(datastore.NewQuery("Foo"), func(pm datastore.PropertyMap) { val := pm["Value"] So(val[len(val)-1].Value(), ShouldResemble, "wat") }), ShouldBeNil) }) }) }) }
func TestTransactionBuffers(t *testing.T) { t.Parallel() Convey("Get/Put/Delete", t, func() { under, over, ds := mkds(dataMultiRoot) ds.Testable().SetTransactionRetryCount(1) So(under.PutMulti.Total(), ShouldEqual, 0) So(over.PutMulti.Total(), ShouldEqual, 0) Convey("Good", func() { Convey("read-only", func() { So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(4, fooShouldHave(ds), dataMultiRoot[3].Value) return nil }, nil), ShouldBeNil) }) Convey("single-level read/write", func() { So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(4, fooShouldHave(ds), dataMultiRoot[3].Value) So(4, fooSetTo(ds), 1, 2, 3, 4) So(3, fooSetTo(ds), 1, 2, 3, 4) // look! it remembers :) So(4, fooShouldHave(ds), 1, 2, 3, 4) return nil }, &datastore.TransactionOptions{XG: true}), ShouldBeNil) // 2 because we are simulating a transaction failure So(under.PutMulti.Total(), ShouldEqual, 2) So(3, fooShouldHave(ds), 1, 2, 3, 4) So(4, fooShouldHave(ds), 1, 2, 3, 4) }) Convey("multi-level read/write", func() { So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(3, fooShouldHave(ds), dataMultiRoot[2].Value) So(3, fooSetTo(ds), 1, 2, 3, 4) So(7, fooSetTo(ds)) vals := []*Foo{ {ID: 793}, {ID: 7}, {ID: 3}, {ID: 4}, } So(ds.GetMulti(vals), ShouldResemble, errors.NewMultiError( datastore.ErrNoSuchEntity, datastore.ErrNoSuchEntity, nil, nil, )) So(vals[0].Value, ShouldBeNil) So(vals[1].Value, ShouldBeNil) So(vals[2].Value, ShouldResemble, []int64{1, 2, 3, 4}) So(vals[3].Value, ShouldResemble, dataSingleRoot[3].Value) // inner, failing, transaction So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) // we can see stuff written in the outer txn So(7, fooShouldHave(ds)) So(3, fooShouldHave(ds), 1, 2, 3, 4) So(3, fooSetTo(ds), 10, 20, 30, 40) // disaster strikes! return errors.New("whaaaa") }, nil), ShouldErrLike, "whaaaa") So(3, fooShouldHave(ds), 1, 2, 3, 4) // inner, successful, transaction So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(3, fooShouldHave(ds), 1, 2, 3, 4) So(3, fooSetTo(ds), 10, 20, 30, 40) return nil }, nil), ShouldBeNil) // now we see it So(3, fooShouldHave(ds), 10, 20, 30, 40) return nil }, &datastore.TransactionOptions{XG: true}), ShouldBeNil) // 2 because we are simulating a transaction failure So(under.PutMulti.Total(), ShouldEqual, 2) So(under.DeleteMulti.Total(), ShouldEqual, 2) So(over.PutMulti.Total(), ShouldEqual, 8) So(7, fooShouldHave(ds)) So(3, fooShouldHave(ds), 10, 20, 30, 40) }) Convey("can allocate IDs from an inner transaction", func() { nums := []int64{4, 8, 15, 16, 23, 42} k := (*datastore.Key)(nil) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) f := &Foo{Value: nums} So(ds.Put(f), ShouldBeNil) k = ds.KeyForObj(f) return nil }, nil), ShouldBeNil) So(k.IntID(), fooShouldHave(ds), nums) return nil }, nil), ShouldBeNil) So(k.IntID(), fooShouldHave(ds), nums) }) }) Convey("Bad", func() { Convey("too many roots", func() { So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) f := &Foo{ID: 7} So(ds.Get(f), ShouldBeNil) So(f, ShouldResemble, dataMultiRoot[6]) So(ds.RunInTransaction(func(c context.Context) error { return datastore.Get(c).Get(&Foo{ID: 6}) }, nil), ShouldErrLike, "too many entity groups") f.Value = []int64{9} So(ds.Put(f), ShouldBeNil) return nil }, nil), ShouldBeNil) f := &Foo{ID: 7} So(ds.Get(f), ShouldBeNil) So(f.Value, ShouldResemble, []int64{9}) }) Convey("buffered errors never reach the datastore", func() { So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(ds.Put(&Foo{ID: 1, Value: []int64{1, 2, 3, 4}}), ShouldBeNil) return errors.New("boop") }, nil), ShouldErrLike, "boop") So(under.PutMulti.Total(), ShouldEqual, 0) So(over.PutMulti.Successes(), ShouldEqual, 1) }) }) }) }
func TestCount(t *testing.T) { t.Parallel() Convey("Test Count filter", t, func() { c, fb := featureBreaker.FilterRDS(memory.Use(context.Background()), nil) c, ctr := FilterRDS(c) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) ds := datastore.Get(c) vals := []datastore.PropertyMap{{ "Val": {datastore.MkProperty(100)}, "$key": {datastore.MkPropertyNI(ds.NewKey("Kind", "", 1, nil))}, }} Convey("Calling a ds function should reflect in counter", func() { So(ds.PutMulti(vals), ShouldBeNil) So(ctr.PutMulti.Successes(), ShouldEqual, 1) Convey("effects are cumulative", func() { So(ds.PutMulti(vals), ShouldBeNil) So(ctr.PutMulti.Successes(), ShouldEqual, 2) Convey("even within transactions", func() { die(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(ds.PutMulti(append(vals, vals[0])), ShouldBeNil) return nil }, nil)) }) }) }) Convey("errors count against errors", func() { fb.BreakFeatures(nil, "GetMulti") So(ds.GetMulti(vals), ShouldErrLike, `"GetMulti" is broken`) So(ctr.GetMulti.Errors(), ShouldEqual, 1) fb.UnbreakFeatures("GetMulti") So(ds.PutMulti(vals), ShouldBeNil) die(ds.GetMulti(vals)) So(ctr.GetMulti.Errors(), ShouldEqual, 1) So(ctr.GetMulti.Successes(), ShouldEqual, 1) So(ctr.GetMulti.Total(), ShouldEqual, 2) }) }) Convey("works for memcache", t, func() { c, ctr := FilterMC(memory.Use(context.Background())) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) mc := memcache.Get(c) die(mc.Set(mc.NewItem("hello").SetValue([]byte("sup")))) _, err := mc.Get("Wat") So(err, ShouldNotBeNil) _, err = mc.Get("hello") die(err) So(ctr.SetMulti, shouldHaveSuccessesAndErrors, 1, 0) So(ctr.GetMulti, shouldHaveSuccessesAndErrors, 2, 0) So(ctr.NewItem, shouldHaveSuccessesAndErrors, 3, 0) }) Convey("works for taskqueue", t, func() { c, ctr := FilterTQ(memory.Use(context.Background())) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) tq := taskqueue.Get(c) die(tq.Add(&taskqueue.Task{Name: "wat"}, "")) So(tq.Add(&taskqueue.Task{Name: "wat"}, "DNE_QUEUE"), ShouldErrLike, "UNKNOWN_QUEUE") So(ctr.AddMulti, shouldHaveSuccessesAndErrors, 1, 1) }) Convey("works for global info", t, func() { c, fb := featureBreaker.FilterGI(memory.Use(context.Background()), nil) c, ctr := FilterGI(c) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) gi := info.Get(c) _, err := gi.Namespace("foo") die(err) fb.BreakFeatures(nil, "Namespace") _, err = gi.Namespace("boom") So(err, ShouldErrLike, `"Namespace" is broken`) So(ctr.Namespace, shouldHaveSuccessesAndErrors, 1, 1) }) Convey("works for user", t, func() { c, fb := featureBreaker.FilterUser(memory.Use(context.Background()), nil) c, ctr := FilterUser(c) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) u := user.Get(c) _, err := u.CurrentOAuth("foo") die(err) fb.BreakFeatures(nil, "CurrentOAuth") _, err = u.CurrentOAuth("foo") So(err, ShouldErrLike, `"CurrentOAuth" is broken`) So(ctr.CurrentOAuth, shouldHaveSuccessesAndErrors, 1, 1) }) Convey("works for mail", t, func() { c, fb := featureBreaker.FilterMail(memory.Use(context.Background()), nil) c, ctr := FilterMail(c) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) m := mail.Get(c) err := m.Send(&mail.Message{ Sender: "*****@*****.**", To: []string{"*****@*****.**"}, Body: "hi", }) die(err) fb.BreakFeatures(nil, "Send") err = m.Send(&mail.Message{ Sender: "*****@*****.**", To: []string{"*****@*****.**"}, Body: "hi", }) So(err, ShouldErrLike, `"Send" is broken`) So(ctr.Send, shouldHaveSuccessesAndErrors, 1, 1) }) }
func TestDatastoreSingleReadWriter(t *testing.T) { t.Parallel() Convey("Datastore single reads and writes", t, func() { c := Use(context.Background()) ds := dsS.Get(c) So(ds, ShouldNotBeNil) Convey("getting objects that DNE is an error", func() { So(ds.Get(&Foo{ID: 1}), ShouldEqual, dsS.ErrNoSuchEntity) }) Convey("bad namespaces fail", func() { _, err := infoS.Get(c).Namespace("$$blzyall") So(err.Error(), ShouldContainSubstring, "namespace \"$$blzyall\" does not match") }) Convey("Can Put stuff", func() { // with an incomplete key! f := &Foo{Val: 10} So(ds.Put(f), ShouldBeNil) k := ds.KeyForObj(f) So(k.String(), ShouldEqual, "dev~app::/Foo,1") Convey("and Get it back", func() { newFoo := &Foo{ID: 1} So(ds.Get(newFoo), ShouldBeNil) So(newFoo, ShouldResemble, f) Convey("but it's hidden from a different namespace", func() { c, err := infoS.Get(c).Namespace("whombat") So(err, ShouldBeNil) ds = dsS.Get(c) So(ds.Get(f), ShouldEqual, dsS.ErrNoSuchEntity) }) Convey("and we can Delete it", func() { So(ds.Delete(k), ShouldBeNil) So(ds.Get(newFoo), ShouldEqual, dsS.ErrNoSuchEntity) }) }) Convey("Deleteing with a bogus key is bad", func() { So(ds.Delete(ds.NewKey("Foo", "wat", 100, nil)), ShouldEqual, dsS.ErrInvalidKey) }) Convey("Deleteing a DNE entity is fine", func() { So(ds.Delete(ds.NewKey("Foo", "wat", 0, nil)), ShouldBeNil) }) Convey("Deleting entities from a nonexistant namespace works", func() { aid := infoS.Get(c).FullyQualifiedAppID() keys := make([]*dsS.Key, 10) for i := range keys { keys[i] = ds.MakeKey(aid, "noexist", "Kind", i+1) } So(ds.DeleteMulti(keys), ShouldBeNil) count := 0 So(ds.Raw().DeleteMulti(keys, func(err error) error { count++ So(err, ShouldBeNil) return nil }), ShouldBeNil) So(count, ShouldEqual, len(keys)) }) Convey("with multiple puts", func() { So(testGetMeta(c, k), ShouldEqual, 1) foos := make([]Foo, 10) for i := range foos { foos[i].Val = 10 foos[i].Parent = k } So(ds.PutMulti(foos), ShouldBeNil) So(testGetMeta(c, k), ShouldEqual, 11) keys := make([]*dsS.Key, len(foos)) for i, f := range foos { keys[i] = ds.KeyForObj(&f) } Convey("ensure that group versions persist across deletes", func() { So(ds.DeleteMulti(append(keys, k)), ShouldBeNil) ds.Testable().CatchupIndexes() count := 0 So(ds.Run(dsS.NewQuery(""), func(_ *dsS.Key) { count++ }), ShouldBeNil) So(count, ShouldEqual, 3) So(testGetMeta(c, k), ShouldEqual, 22) So(ds.Put(&Foo{ID: 1}), ShouldBeNil) So(testGetMeta(c, k), ShouldEqual, 23) }) Convey("can Get", func() { vals := make([]dsS.PropertyMap, len(keys)) for i := range vals { vals[i] = dsS.PropertyMap{} So(vals[i].SetMeta("key", keys[i]), ShouldBeTrue) } So(ds.GetMulti(vals), ShouldBeNil) for i, val := range vals { So(val, ShouldResemble, dsS.PropertyMap{ "Val": {dsS.MkProperty(10)}, "$key": {dsS.MkPropertyNI(keys[i])}, }) } }) }) Convey("allocating ids prevents their use", func() { start, err := ds.AllocateIDs(ds.MakeKey("Foo", 0), 100) So(err, ShouldBeNil) So(start, ShouldEqual, 2) f := &Foo{Val: 10} So(ds.Put(f), ShouldBeNil) k := ds.KeyForObj(f) So(k.String(), ShouldEqual, "dev~app::/Foo,102") }) }) Convey("implements DSTransactioner", func() { Convey("Put", func() { f := &Foo{Val: 10} So(ds.Put(f), ShouldBeNil) k := ds.KeyForObj(f) So(k.String(), ShouldEqual, "dev~app::/Foo,1") Convey("can Put new entity groups", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) f := &Foo{Val: 100} So(ds.Put(f), ShouldBeNil) So(f.ID, ShouldEqual, 2) f.ID = 0 f.Val = 200 So(ds.Put(f), ShouldBeNil) So(f.ID, ShouldEqual, 3) return nil }, &dsS.TransactionOptions{XG: true}) So(err, ShouldBeNil) f := &Foo{ID: 2} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 100) f.ID = 3 So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 200) }) Convey("can Put new entities in a current group", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) f := &Foo{Val: 100, Parent: k} So(ds.Put(f), ShouldBeNil) So(ds.KeyForObj(f).String(), ShouldEqual, "dev~app::/Foo,1/Foo,1") f.ID = 0 f.Val = 200 So(ds.Put(f), ShouldBeNil) So(ds.KeyForObj(f).String(), ShouldEqual, "dev~app::/Foo,1/Foo,2") return nil }, nil) So(err, ShouldBeNil) f := &Foo{ID: 1, Parent: k} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 100) f.ID = 2 So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 200) }) Convey("Deletes work too", func() { err := ds.RunInTransaction(func(c context.Context) error { return dsS.Get(c).Delete(k) }, nil) So(err, ShouldBeNil) So(ds.Get(&Foo{ID: 1}), ShouldEqual, dsS.ErrNoSuchEntity) }) Convey("A Get counts against your group count", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) pm := dsS.PropertyMap{} So(pm.SetMeta("key", ds.NewKey("Foo", "", 20, nil)), ShouldBeTrue) So(ds.Get(pm), ShouldEqual, dsS.ErrNoSuchEntity) So(pm.SetMeta("key", k), ShouldBeTrue) So(ds.Get(pm).Error(), ShouldContainSubstring, "cross-group") return nil }, nil) So(err, ShouldBeNil) }) Convey("Get takes a snapshot", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) // Don't ever do this in a real program unless you want to guarantee // a failed transaction :) f.Val = 11 So(dsS.GetNoTxn(c).Put(f), ShouldBeNil) So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) return nil }, nil) So(err, ShouldBeNil) f := &Foo{ID: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 11) }) Convey("and snapshots are consistent even after Puts", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) f := &Foo{ID: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) // Don't ever do this in a real program unless you want to guarantee // a failed transaction :) f.Val = 11 So(dsS.GetNoTxn(c).Put(f), ShouldBeNil) So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) f.Val = 20 So(ds.Put(f), ShouldBeNil) So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) // still gets 10 return nil }, &dsS.TransactionOptions{Attempts: 1}) So(err.Error(), ShouldContainSubstring, "concurrent") f := &Foo{ID: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 11) }) Convey("Reusing a transaction context is bad news", func() { txnDS := dsS.Interface(nil) err := ds.RunInTransaction(func(c context.Context) error { txnDS = dsS.Get(c) So(txnDS.Get(f), ShouldBeNil) return nil }, nil) So(err, ShouldBeNil) So(txnDS.Get(f).Error(), ShouldContainSubstring, "expired") }) Convey("Nested transactions are rejected", func() { err := ds.RunInTransaction(func(c context.Context) error { err := dsS.Get(c).RunInTransaction(func(c context.Context) error { panic("noooo") }, nil) So(err.Error(), ShouldContainSubstring, "nested transactions") return nil }, nil) So(err, ShouldBeNil) }) Convey("Concurrent transactions only accept one set of changes", func() { // Note: I think this implementation is actually /slightly/ wrong. // According to my read of the docs for appengine, when you open a // transaction it actually (essentially) holds a reference to the // entire datastore. Our implementation takes a snapshot of the // entity group as soon as something observes/affects it. // // That said... I'm not sure if there's really a semantic difference. err := ds.RunInTransaction(func(c context.Context) error { So(dsS.Get(c).Put(&Foo{ID: 1, Val: 21}), ShouldBeNil) err := dsS.GetNoTxn(c).RunInTransaction(func(c context.Context) error { So(dsS.Get(c).Put(&Foo{ID: 1, Val: 27}), ShouldBeNil) return nil }, nil) So(err, ShouldBeNil) return nil }, nil) So(err.Error(), ShouldContainSubstring, "concurrent") f := &Foo{ID: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 27) }) Convey("XG", func() { Convey("Modifying two groups with XG=false is invalid", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) f := &Foo{ID: 1, Val: 200} So(ds.Put(f), ShouldBeNil) f.ID = 2 err := ds.Put(f) So(err.Error(), ShouldContainSubstring, "cross-group") return err }, nil) So(err.Error(), ShouldContainSubstring, "cross-group") }) Convey("Modifying >25 groups with XG=true is invald", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) foos := make([]Foo, 25) for i := int64(1); i < 26; i++ { foos[i-1].ID = i foos[i-1].Val = 200 } So(ds.PutMulti(foos), ShouldBeNil) err := ds.Put(&Foo{ID: 26}) So(err.Error(), ShouldContainSubstring, "too many entity groups") return err }, &dsS.TransactionOptions{XG: true}) So(err.Error(), ShouldContainSubstring, "too many entity groups") }) }) Convey("Errors and panics", func() { Convey("returning an error aborts", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) So(ds.Put(&Foo{ID: 1, Val: 200}), ShouldBeNil) return fmt.Errorf("thingy") }, nil) So(err.Error(), ShouldEqual, "thingy") f := &Foo{ID: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) }) Convey("panicing aborts", func() { So(func() { So(ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) So(ds.Put(&Foo{Val: 200}), ShouldBeNil) panic("wheeeeee") }, nil), ShouldBeNil) }, ShouldPanic) f := &Foo{ID: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) }) }) Convey("Transaction retries", func() { tst := ds.Testable() Reset(func() { tst.SetTransactionRetryCount(0) }) Convey("SetTransactionRetryCount set to zero", func() { tst.SetTransactionRetryCount(0) calls := 0 So(ds.RunInTransaction(func(c context.Context) error { calls++ return nil }, nil), ShouldBeNil) So(calls, ShouldEqual, 1) }) Convey("default TransactionOptions is 3 attempts", func() { tst.SetTransactionRetryCount(100) // more than 3 calls := 0 So(ds.RunInTransaction(func(c context.Context) error { calls++ return nil }, nil), ShouldEqual, dsS.ErrConcurrentTransaction) So(calls, ShouldEqual, 3) }) Convey("non-default TransactionOptions ", func() { tst.SetTransactionRetryCount(100) // more than 20 calls := 0 So(ds.RunInTransaction(func(c context.Context) error { calls++ return nil }, &dsS.TransactionOptions{Attempts: 20}), ShouldEqual, dsS.ErrConcurrentTransaction) So(calls, ShouldEqual, 20) }) Convey("SetTransactionRetryCount is respected", func() { tst.SetTransactionRetryCount(1) // less than 3 calls := 0 So(ds.RunInTransaction(func(c context.Context) error { calls++ return nil }, nil), ShouldBeNil) So(calls, ShouldEqual, 2) }) Convey("fatal errors are not retried", func() { tst.SetTransactionRetryCount(1) calls := 0 So(ds.RunInTransaction(func(c context.Context) error { calls++ return fmt.Errorf("omg") }, nil).Error(), ShouldEqual, "omg") So(calls, ShouldEqual, 1) }) }) }) }) Convey("Testable.Consistent", func() { Convey("false", func() { ds.Testable().Consistent(false) // the default for i := 0; i < 10; i++ { So(ds.Put(&Foo{ID: int64(i + 1), Val: i + 1}), ShouldBeNil) } q := dsS.NewQuery("Foo").Gt("Val", 3) count, err := ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 0) So(ds.Delete(ds.MakeKey("Foo", 4)), ShouldBeNil) count, err = ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 0) ds.Testable().Consistent(true) count, err = ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 6) }) Convey("true", func() { ds.Testable().Consistent(true) for i := 0; i < 10; i++ { So(ds.Put(&Foo{ID: int64(i + 1), Val: i + 1}), ShouldBeNil) } q := dsS.NewQuery("Foo").Gt("Val", 3) count, err := ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 7) So(ds.Delete(ds.MakeKey("Foo", 4)), ShouldBeNil) count, err = ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 6) }) }) Convey("Testable.DisableSpecialEntities", func() { ds.Testable().DisableSpecialEntities(true) So(ds.Put(&Foo{}), ShouldErrLike, "allocateIDs is disabled") So(ds.Put(&Foo{ID: 1}), ShouldBeNil) ds.Testable().CatchupIndexes() count, err := ds.Count(dsS.NewQuery("")) So(err, ShouldBeNil) So(count, ShouldEqual, 1) // normally this would include __entity_group__ }) }) }
func TestDSCache(t *testing.T) { t.Parallel() zeroTime, err := time.Parse("2006-01-02T15:04:05.999999999Z", "2006-01-02T15:04:05.999999999Z") if err != nil { panic(err) } Convey("Test dscache", t, func() { c := mathrand.Set(context.Background(), rand.New(rand.NewSource(1))) clk := testclock.New(zeroTime) c = clock.Set(c, clk) c = memory.Use(c) dsUnder := datastore.Get(c) mc := memcache.Get(c) shardsForKey := func(k *datastore.Key) int { last := k.LastTok() if last.Kind == "shardObj" { return int(last.IntID) } if last.Kind == "noCacheObj" { return 0 } return DefaultShards } numMemcacheItems := func() uint64 { stats, err := mc.Stats() So(err, ShouldBeNil) return stats.Items } Convey("enabled cases", func() { c = FilterRDS(c, shardsForKey) ds := datastore.Get(c) So(dsUnder, ShouldNotBeNil) So(ds, ShouldNotBeNil) So(mc, ShouldNotBeNil) Convey("basically works", func() { pm := datastore.PropertyMap{ "BigData": {datastore.MkProperty([]byte(""))}, "Value": {datastore.MkProperty("hi")}, } encoded := append([]byte{0}, serialize.ToBytes(pm)...) o := object{ID: 1, Value: "hi"} So(ds.Put(&o), ShouldBeNil) o = object{ID: 1} So(dsUnder.Get(&o), ShouldBeNil) So(o.Value, ShouldEqual, "hi") itm, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&o))) So(err, ShouldEqual, memcache.ErrCacheMiss) o = object{ID: 1} So(ds.Get(&o), ShouldBeNil) So(o.Value, ShouldEqual, "hi") itm, err = mc.Get(itm.Key()) So(err, ShouldBeNil) So(itm.Value(), ShouldResemble, encoded) Convey("now we don't need the datastore!", func() { o := object{ID: 1} // delete it, bypassing the cache filter. Don't do this in production // unless you want a crappy cache. So(dsUnder.Delete(ds.KeyForObj(&o)), ShouldBeNil) itm, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&o))) So(err, ShouldBeNil) So(itm.Value(), ShouldResemble, encoded) So(ds.Get(&o), ShouldBeNil) So(o.Value, ShouldEqual, "hi") }) Convey("deleting it properly records that fact, however", func() { o := object{ID: 1} So(ds.Delete(ds.KeyForObj(&o)), ShouldBeNil) itm, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&o))) So(err, ShouldEqual, memcache.ErrCacheMiss) So(ds.Get(&o), ShouldEqual, datastore.ErrNoSuchEntity) itm, err = mc.Get(itm.Key()) So(err, ShouldBeNil) So(itm.Value(), ShouldResemble, []byte{}) // this one hits memcache So(ds.Get(&o), ShouldEqual, datastore.ErrNoSuchEntity) }) }) Convey("compression works", func() { o := object{ID: 2, Value: `¯\_(ツ)_/¯`} data := make([]byte, 4000) for i := range data { const alpha = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*()" data[i] = alpha[i%len(alpha)] } o.BigData = data So(ds.Put(&o), ShouldBeNil) So(ds.Get(&o), ShouldBeNil) itm, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&o))) So(err, ShouldBeNil) So(itm.Value()[0], ShouldEqual, ZlibCompression) So(len(itm.Value()), ShouldEqual, 653) // a bit smaller than 4k // ensure the next Get comes from the cache So(dsUnder.Delete(ds.KeyForObj(&o)), ShouldBeNil) o = object{ID: 2} So(ds.Get(&o), ShouldBeNil) So(o.Value, ShouldEqual, `¯\_(ツ)_/¯`) So(o.BigData, ShouldResemble, data) }) Convey("transactions", func() { Convey("work", func() { // populate an object @ ID1 So(ds.Put(&object{ID: 1, Value: "something"}), ShouldBeNil) So(ds.Get(&object{ID: 1}), ShouldBeNil) So(ds.Put(&object{ID: 2, Value: "nurbs"}), ShouldBeNil) So(ds.Get(&object{ID: 2}), ShouldBeNil) // memcache now has the wrong value (simulated race) So(dsUnder.Put(&object{ID: 1, Value: "else"}), ShouldBeNil) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) o := &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "else") o.Value = "txn" So(ds.Put(o), ShouldBeNil) So(ds.Delete(ds.KeyForObj(&object{ID: 2})), ShouldBeNil) return nil }, &datastore.TransactionOptions{XG: true}), ShouldBeNil) _, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&object{ID: 1}))) So(err, ShouldEqual, memcache.ErrCacheMiss) _, err = mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&object{ID: 2}))) So(err, ShouldEqual, memcache.ErrCacheMiss) o := &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "txn") }) Convey("errors don't invalidate", func() { // populate an object @ ID1 So(ds.Put(&object{ID: 1, Value: "something"}), ShouldBeNil) So(ds.Get(&object{ID: 1}), ShouldBeNil) So(numMemcacheItems(), ShouldEqual, 1) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) o := &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "something") o.Value = "txn" So(ds.Put(o), ShouldBeNil) return errors.New("OH NOES") }, nil).Error(), ShouldContainSubstring, "OH NOES") // memcache still has the original So(numMemcacheItems(), ShouldEqual, 1) So(dsUnder.Delete(ds.KeyForObj(&object{ID: 1})), ShouldBeNil) o := &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "something") }) }) Convey("control", func() { Convey("per-model bypass", func() { type model struct { ID string `gae:"$id"` UseDSCache datastore.Toggle `gae:"$dscache.enable,false"` Value string } itms := []model{ {ID: "hi", Value: "something"}, {ID: "there", Value: "else", UseDSCache: datastore.On}, } So(ds.PutMulti(itms), ShouldBeNil) So(ds.GetMulti(itms), ShouldBeNil) So(numMemcacheItems(), ShouldEqual, 1) }) Convey("per-key shard count", func() { s := &shardObj{ID: 4, Value: "hi"} So(ds.Put(s), ShouldBeNil) So(ds.Get(s), ShouldBeNil) So(numMemcacheItems(), ShouldEqual, 1) for i := 0; i < 20; i++ { So(ds.Get(s), ShouldBeNil) } So(numMemcacheItems(), ShouldEqual, 4) }) Convey("per-key cache disablement", func() { n := &noCacheObj{ID: "nurbs", Value: true} So(ds.Put(n), ShouldBeNil) So(ds.Get(n), ShouldBeNil) So(numMemcacheItems(), ShouldEqual, 0) }) Convey("per-model expiration", func() { type model struct { ID int64 `gae:"$id"` DSCacheExp int64 `gae:"$dscache.expiration,7"` Value string } So(ds.Put(&model{ID: 1, Value: "mooo"}), ShouldBeNil) So(ds.Get(&model{ID: 1}), ShouldBeNil) itm, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&model{ID: 1}))) So(err, ShouldBeNil) clk.Add(10 * time.Second) _, err = mc.Get(itm.Key()) So(err, ShouldEqual, memcache.ErrCacheMiss) }) }) Convey("screw cases", func() { Convey("memcache contains bogus value (simulated failed AddMulti)", func() { o := &object{ID: 1, Value: "spleen"} So(ds.Put(o), ShouldBeNil) sekret := []byte("I am a banana") itm := mc.NewItem(MakeMemcacheKey(0, ds.KeyForObj(o))).SetValue(sekret) So(mc.Set(itm), ShouldBeNil) o = &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "spleen") itm, err := mc.Get(itm.Key()) So(err, ShouldBeNil) So(itm.Flags(), ShouldEqual, ItemUKNONWN) So(itm.Value(), ShouldResemble, sekret) }) Convey("memcache contains bogus value (corrupt entry)", func() { o := &object{ID: 1, Value: "spleen"} So(ds.Put(o), ShouldBeNil) sekret := []byte("I am a banana") itm := (mc.NewItem(MakeMemcacheKey(0, ds.KeyForObj(o))). SetValue(sekret). SetFlags(uint32(ItemHasData))) So(mc.Set(itm), ShouldBeNil) o = &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "spleen") itm, err := mc.Get(itm.Key()) So(err, ShouldBeNil) So(itm.Flags(), ShouldEqual, ItemHasData) So(itm.Value(), ShouldResemble, sekret) }) Convey("other entity has the lock", func() { o := &object{ID: 1, Value: "spleen"} So(ds.Put(o), ShouldBeNil) sekret := []byte("r@vmarod!#)%9T") itm := (mc.NewItem(MakeMemcacheKey(0, ds.KeyForObj(o))). SetValue(sekret). SetFlags(uint32(ItemHasLock))) So(mc.Set(itm), ShouldBeNil) o = &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "spleen") itm, err := mc.Get(itm.Key()) So(err, ShouldBeNil) So(itm.Flags(), ShouldEqual, ItemHasLock) So(itm.Value(), ShouldResemble, sekret) }) Convey("massive entities can't be cached", func() { o := &object{ID: 1, Value: "spleen"} mr := mathrand.Get(c) numRounds := (internalValueSizeLimit / 8) * 2 buf := bytes.Buffer{} for i := 0; i < numRounds; i++ { So(binary.Write(&buf, binary.LittleEndian, mr.Int63()), ShouldBeNil) } o.BigData = buf.Bytes() So(ds.Put(o), ShouldBeNil) o.BigData = nil So(ds.Get(o), ShouldBeNil) itm, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(o))) So(err, ShouldBeNil) // Is locked until the next put, forcing all access to the datastore. So(itm.Value(), ShouldResemble, []byte{}) So(itm.Flags(), ShouldEqual, ItemHasLock) o.BigData = []byte("hi :)") So(ds.Put(o), ShouldBeNil) So(ds.Get(o), ShouldBeNil) itm, err = mc.Get(itm.Key()) So(err, ShouldBeNil) So(itm.Flags(), ShouldEqual, ItemHasData) }) Convey("failure on Setting memcache locks is a hard stop", func() { c, fb := featureBreaker.FilterMC(c, nil) fb.BreakFeatures(nil, "SetMulti") ds := datastore.Get(c) So(ds.Put(&object{ID: 1}).Error(), ShouldContainSubstring, "SetMulti") }) Convey("failure on Setting memcache locks in a transaction is a hard stop", func() { c, fb := featureBreaker.FilterMC(c, nil) fb.BreakFeatures(nil, "SetMulti") ds := datastore.Get(c) So(ds.RunInTransaction(func(c context.Context) error { So(datastore.Get(c).Put(&object{ID: 1}), ShouldBeNil) // no problems here... memcache operations happen after the function // body quits. return nil }, nil).Error(), ShouldContainSubstring, "SetMulti") }) }) Convey("misc", func() { Convey("verify numShards caps at MaxShards", func() { sc := supportContext{shardsForKey: shardsForKey} So(sc.numShards(ds.KeyForObj(&shardObj{ID: 9001})), ShouldEqual, MaxShards) }) Convey("CompressionType.String", func() { So(NoCompression.String(), ShouldEqual, "NoCompression") So(ZlibCompression.String(), ShouldEqual, "ZlibCompression") So(CompressionType(100).String(), ShouldEqual, "UNKNOWN_CompressionType(100)") }) }) }) Convey("disabled cases", func() { defer func() { globalEnabled = true }() So(IsGloballyEnabled(c), ShouldBeTrue) So(SetGlobalEnable(c, false), ShouldBeNil) // twice is a nop So(SetGlobalEnable(c, false), ShouldBeNil) // but it takes 5 minutes to kick in So(IsGloballyEnabled(c), ShouldBeTrue) clk.Add(time.Minute*5 + time.Second) So(IsGloballyEnabled(c), ShouldBeFalse) So(mc.Set(mc.NewItem("test").SetValue([]byte("hi"))), ShouldBeNil) So(numMemcacheItems(), ShouldEqual, 1) So(SetGlobalEnable(c, true), ShouldBeNil) // memcache gets flushed as a side effect So(numMemcacheItems(), ShouldEqual, 0) // Still takes 5 minutes to kick in So(IsGloballyEnabled(c), ShouldBeFalse) clk.Add(time.Minute*5 + time.Second) So(IsGloballyEnabled(c), ShouldBeTrue) }) }) }
func TestBasicDatastore(t *testing.T) { t.Parallel() Convey("basic", t, func() { inst, err := aetest.NewInstance(&aetest.Options{ StronglyConsistentDatastore: true, }) So(err, ShouldBeNil) defer inst.Close() req, err := inst.NewRequest("GET", "/", nil) So(err, ShouldBeNil) ctx := Use(context.Background(), req) ds := datastore.Get(ctx) mc := memcache.Get(ctx) inf := info.Get(ctx) Convey("logging allows you to tweak the level", func() { // You have to visually confirm that this actually happens in the stdout // of the test... yeah I know. logging.Debugf(ctx, "SHOULD NOT SEE") logging.Infof(ctx, "SHOULD SEE") ctx = logging.SetLevel(ctx, logging.Debug) logging.Debugf(ctx, "SHOULD SEE") logging.Infof(ctx, "SHOULD SEE (2)") }) Convey("Can probe/change Namespace", func() { So(inf.GetNamespace(), ShouldEqual, "") ctx, err = inf.Namespace("wat") So(err, ShouldBeNil) inf = info.Get(ctx) So(inf.GetNamespace(), ShouldEqual, "wat") ds = datastore.Get(ctx) So(ds.MakeKey("Hello", "world").Namespace(), ShouldEqual, "wat") }) Convey("Can get non-transactional context", func() { ctx, err := inf.Namespace("foo") So(err, ShouldBeNil) ds = datastore.Get(ctx) inf = info.Get(ctx) ds.RunInTransaction(func(ctx context.Context) error { So(ds.MakeKey("Foo", "bar").Namespace(), ShouldEqual, "foo") So(ds.Put(&TestStruct{ValueI: []int64{100}}), ShouldBeNil) err = datastore.GetNoTxn(ctx).RunInTransaction(func(ctx context.Context) error { ds = datastore.Get(ctx) So(ds.MakeKey("Foo", "bar").Namespace(), ShouldEqual, "foo") So(ds.Put(&TestStruct{ValueI: []int64{100}}), ShouldBeNil) return nil }, nil) So(err, ShouldBeNil) return nil }, nil) }) Convey("Can Put/Get", func() { orig := TestStruct{ ValueI: []int64{1, 7, 946688461000000, 996688461000000}, ValueB: []bool{true, false}, ValueS: []string{"hello", "world"}, ValueF: []float64{1.0, 7.0, 946688461000000.0, 996688461000000.0}, ValueBS: [][]byte{ []byte("allo"), []byte("hello"), []byte("world"), []byte("zurple"), }, ValueK: []*datastore.Key{ ds.NewKey("Something", "Cool", 0, nil), ds.NewKey("Something", "", 1, nil), ds.NewKey("Something", "Recursive", 0, ds.NewKey("Parent", "", 2, nil)), }, ValueBK: []blobstore.Key{"bellow", "hello"}, ValueGP: []datastore.GeoPoint{ {Lat: 120.7, Lng: 95.5}, }, } So(ds.Put(&orig), ShouldBeNil) ret := TestStruct{ID: orig.ID} So(ds.Get(&ret), ShouldBeNil) So(ret, ShouldResemble, orig) // can't be sure the indexes have caught up... so sleep time.Sleep(time.Second) Convey("Can query", func() { q := datastore.NewQuery("TestStruct") ds.Run(q, func(ts *TestStruct) { So(*ts, ShouldResemble, orig) }) count, err := ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 1) }) Convey("Can project", func() { q := datastore.NewQuery("TestStruct").Project("ValueS") rslts := []datastore.PropertyMap{} So(ds.GetAll(q, &rslts), ShouldBeNil) So(rslts, ShouldResemble, []datastore.PropertyMap{ { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueS": {mp("hello")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueS": {mp("world")}, }, }) q = datastore.NewQuery("TestStruct").Project("ValueBS") rslts = []datastore.PropertyMap{} So(ds.GetAll(q, &rslts), ShouldBeNil) So(rslts, ShouldResemble, []datastore.PropertyMap{ { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueBS": {mp("allo")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueBS": {mp("hello")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueBS": {mp("world")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueBS": {mp("zurple")}, }, }) count, err := ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 4) q = datastore.NewQuery("TestStruct").Lte("ValueI", 7).Project("ValueS").Distinct(true) rslts = []datastore.PropertyMap{} So(ds.GetAll(q, &rslts), ShouldBeNil) So(rslts, ShouldResemble, []datastore.PropertyMap{ { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueI": {mp(1)}, "ValueS": {mp("hello")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueI": {mp(1)}, "ValueS": {mp("world")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueI": {mp(7)}, "ValueS": {mp("hello")}, }, { "$key": {mpNI(ds.KeyForObj(&orig))}, "ValueI": {mp(7)}, "ValueS": {mp("world")}, }, }) count, err = ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 4) }) }) Convey("Can Put/Get (time)", func() { // time comparisons in Go are wonky, so this is pulled out pm := datastore.PropertyMap{ "$key": {mpNI(ds.NewKey("Something", "value", 0, nil))}, "Time": { mp(time.Date(1938, time.January, 1, 1, 1, 1, 1, time.UTC)), mp(time.Time{}), }, } So(ds.Put(&pm), ShouldBeNil) rslt := datastore.PropertyMap{} rslt.SetMeta("key", ds.KeyForObj(pm)) So(ds.Get(&rslt), ShouldBeNil) So(pm["Time"][0].Value(), ShouldResemble, rslt["Time"][0].Value()) q := datastore.NewQuery("Something").Project("Time") all := []datastore.PropertyMap{} So(ds.GetAll(q, &all), ShouldBeNil) So(len(all), ShouldEqual, 2) prop := all[0]["Time"][0] So(prop.Type(), ShouldEqual, datastore.PTInt) tval, err := prop.Project(datastore.PTTime) So(err, ShouldBeNil) So(tval, ShouldResemble, time.Time{}.UTC()) tval, err = all[1]["Time"][0].Project(datastore.PTTime) So(err, ShouldBeNil) So(tval, ShouldResemble, pm["Time"][0].Value()) ent := datastore.PropertyMap{ "$key": {mpNI(ds.MakeKey("Something", "value"))}, } So(ds.Get(&ent), ShouldBeNil) So(ent["Time"], ShouldResemble, pm["Time"]) }) Convey("memcache: Set (nil) is the same as Set ([]byte{})", func() { So(mc.Set(mc.NewItem("bob")), ShouldBeNil) // normally would panic because Value is nil bob, err := mc.Get("bob") So(err, ShouldBeNil) So(bob.Value(), ShouldResemble, []byte{}) }) }) }
"When", 946688461000000), pmap("$key", key("Kind", 3), Next, "When", 996688461000000), }}, // Original (complex) types are retained when getting the full value. {q: nq("Kind").Order("When"), get: []ds.PropertyMap{ stage1Data[1], stage1Data[3], stage1Data[2], }}, }, extraFns: []func(context.Context){ func(c context.Context) { data := ds.Get(c) curs := ds.Cursor(nil) q := nq("").Gt("__key__", key("Kind", 2)) err := data.Run(q, func(pm ds.PropertyMap, gc ds.CursorCB) error { So(pm, ShouldResemble, pmap( "$key", key("Kind", 2, "__entity_group__", 1), Next, "__version__", 1)) err := error(nil) curs, err = gc() So(err, ShouldBeNil) return ds.Stop }) So(err, ShouldBeNil)
func TestTaskQueue(t *testing.T) { t.Parallel() Convey("TaskQueue", t, func() { now := time.Date(2000, time.January, 1, 1, 1, 1, 1, time.UTC) c, tc := testclock.UseTime(context.Background(), now) c = mathrand.Set(c, rand.New(rand.NewSource(clock.Now(c).UnixNano()))) c = Use(c) tq := tqS.Get(c) tqt := tq.Testable() So(tqt, ShouldNotBeNil) So(tq, ShouldNotBeNil) Convey("implements TQMultiReadWriter", func() { Convey("Add", func() { t := tq.NewTask("/hello/world") Convey("works", func() { t.Delay = 4 * time.Second t.Header = http.Header{} t.Header.Add("Cat", "tabby") t.Payload = []byte("watwatwat") t.RetryOptions = &tqS.RetryOptions{AgeLimit: 7 * time.Second} So(tq.Add(t, ""), ShouldBeNil) name := "Z_UjshxM9ecyMQfGbZmUGOEcgxWU0_5CGLl_-RntudwAw2DqQ5-58bzJiWQN4OKzeuUb9O4JrPkUw2rOvk2Ax46THojnQ6avBQgZdrKcJmrwQ6o4qKfJdiyUbGXvy691yRfzLeQhs6cBhWrgf3wH-VPMcA4SC-zlbJ2U8An7I0zJQA5nBFnMNoMgT-2peGoay3rCSbj4z9VFFm9kS_i6JCaQH518ujLDSNCYdjTq6B6lcWrZAh0U_q3a1S2nXEwrKiw_t9MTNQFgAQZWyGBbvZQPmeRYtu8SPaWzTfd25v_YWgBuVL2rRSPSMvlDwE04nNdtvVzE8vNNiA1zRimmdzKeqATQF9_ReUvj4D7U8dcS703DZWfKMBLgBffY9jqCassOOOw77V72Oq5EVauUw3Qw0L6bBsfM9FtahTKUdabzRZjXUoze3EK4KXPt3-wdidau-8JrVf2XFocjjZbwHoxcGvbtT3b4nGLDlgwdC00bwaFBZWff" So(tqt.GetScheduledTasks()["default"][name], ShouldResemble, &tqS.Task{ ETA: now.Add(4 * time.Second), Header: http.Header{"Cat": []string{"tabby"}}, Method: "POST", Name: name, Path: "/hello/world", Payload: []byte("watwatwat"), RetryOptions: &tqS.RetryOptions{AgeLimit: 7 * time.Second}, }) }) Convey("picks up namespace", func() { c, err := info.Get(c).Namespace("coolNamespace") So(err, ShouldBeNil) tq = tqS.Get(c) t := tq.NewTask("") So(tq.Add(t, ""), ShouldBeNil) So(t.Header, ShouldResemble, http.Header{ "X-Appengine-Current-Namespace": {"coolNamespace"}, }) }) Convey("cannot add to bad queues", func() { So(tq.Add(nil, "waaat").Error(), ShouldContainSubstring, "UNKNOWN_QUEUE") Convey("but you can add Queues when testing", func() { tqt.CreateQueue("waaat") So(tq.Add(t, "waaat"), ShouldBeNil) Convey("you just can't add them twice", func() { So(func() { tqt.CreateQueue("waaat") }, ShouldPanic) }) }) }) Convey("supplies a URL if it's missing", func() { t.Path = "" So(tq.Add(t, ""), ShouldBeNil) So(t.Path, ShouldEqual, "/_ah/queue/default") }) Convey("cannot add twice", func() { t.Name = "bob" So(tq.Add(t, ""), ShouldBeNil) // can't add the same one twice! So(tq.Add(t, ""), ShouldEqual, tqS.ErrTaskAlreadyAdded) }) Convey("cannot add deleted task", func() { t.Name = "bob" So(tq.Add(t, ""), ShouldBeNil) So(tq.Delete(t, ""), ShouldBeNil) // can't add a deleted task! So(tq.Add(t, ""), ShouldEqual, tqS.ErrTaskAlreadyAdded) }) Convey("cannot set ETA+Delay", func() { t.ETA = clock.Now(c).Add(time.Hour) tc.Add(time.Second) t.Delay = time.Hour So(func() { So(tq.Add(t, ""), ShouldBeNil) }, ShouldPanic) }) Convey("must use a reasonable method", func() { t.Method = "Crystal" So(tq.Add(t, "").Error(), ShouldContainSubstring, "bad method") }) Convey("payload gets dumped for non POST/PUT methods", func() { t.Method = "HEAD" t.Payload = []byte("coool") So(tq.Add(t, ""), ShouldBeNil) So(t.Payload, ShouldBeNil) }) Convey("invalid names are rejected", func() { t.Name = "happy times" So(tq.Add(t, "").Error(), ShouldContainSubstring, "INVALID_TASK_NAME") }) Convey("AddMulti also works", func() { t2 := t.Duplicate() t2.Path = "/hi/city" expect := []*tqS.Task{t, t2} So(tq.AddMulti(expect, "default"), ShouldBeNil) So(len(expect), ShouldEqual, 2) So(len(tqt.GetScheduledTasks()["default"]), ShouldEqual, 2) for i := range expect { Convey(fmt.Sprintf("task %d: %s", i, expect[i].Path), func() { So(expect[i].Method, ShouldEqual, "POST") So(expect[i].ETA, ShouldHappenOnOrBefore, now) So(len(expect[i].Name), ShouldEqual, 500) }) } Convey("stats work too", func() { delay := -time.Second * 400 t := tq.NewTask("/somewhere") t.Delay = delay So(tq.Add(t, ""), ShouldBeNil) stats, err := tq.Stats("") So(err, ShouldBeNil) So(stats[0].Tasks, ShouldEqual, 3) So(stats[0].OldestETA, ShouldHappenOnOrBefore, clock.Now(c).Add(delay)) _, err = tq.Stats("noexist") So(err.Error(), ShouldContainSubstring, "UNKNOWN_QUEUE") }) Convey("can purge all tasks", func() { So(tq.Add(&tqS.Task{Path: "/wut/nerbs"}, ""), ShouldBeNil) So(tq.Purge(""), ShouldBeNil) So(len(tqt.GetScheduledTasks()["default"]), ShouldEqual, 0) So(len(tqt.GetTombstonedTasks()["default"]), ShouldEqual, 0) So(len(tqt.GetTransactionTasks()["default"]), ShouldEqual, 0) Convey("purging a queue which DNE fails", func() { So(tq.Purge("noexist").Error(), ShouldContainSubstring, "UNKNOWN_QUEUE") }) }) }) }) Convey("Delete", func() { t := &tqS.Task{Path: "/hello/world"} So(tq.Add(t, ""), ShouldBeNil) Convey("works", func() { err := tq.Delete(t, "") So(err, ShouldBeNil) So(len(tqt.GetScheduledTasks()["default"]), ShouldEqual, 0) So(len(tqt.GetTombstonedTasks()["default"]), ShouldEqual, 1) So(tqt.GetTombstonedTasks()["default"][t.Name], ShouldResemble, t) }) Convey("cannot delete a task twice", func() { So(tq.Delete(t, ""), ShouldBeNil) So(tq.Delete(t, "").Error(), ShouldContainSubstring, "TOMBSTONED_TASK") Convey("but you can if you do a reset", func() { tqt.ResetTasks() So(tq.Add(t, ""), ShouldBeNil) So(tq.Delete(t, ""), ShouldBeNil) }) }) Convey("cannot delete from bogus queues", func() { err := tq.Delete(t, "wat") So(err.Error(), ShouldContainSubstring, "UNKNOWN_QUEUE") }) Convey("cannot delete a missing task", func() { t.Name = "tarntioarenstyw" err := tq.Delete(t, "") So(err.Error(), ShouldContainSubstring, "UNKNOWN_TASK") }) Convey("DeleteMulti also works", func() { t2 := t.Duplicate() t2.Name = "" t2.Path = "/hi/city" So(tq.Add(t2, ""), ShouldBeNil) Convey("usually works", func() { So(tq.DeleteMulti([]*tqS.Task{t, t2}, ""), ShouldBeNil) So(len(tqt.GetScheduledTasks()["default"]), ShouldEqual, 0) So(len(tqt.GetTombstonedTasks()["default"]), ShouldEqual, 2) }) }) }) }) Convey("works with transactions", func() { t := &tqS.Task{Path: "/hello/world"} So(tq.Add(t, ""), ShouldBeNil) t2 := &tqS.Task{Path: "/hi/city"} So(tq.Add(t2, ""), ShouldBeNil) So(tq.Delete(t2, ""), ShouldBeNil) Convey("can view regular tasks", func() { So(dsS.Get(c).RunInTransaction(func(c context.Context) error { tqt := tqS.GetRaw(c).Testable() So(tqt.GetScheduledTasks()["default"][t.Name], ShouldResemble, t) So(tqt.GetTombstonedTasks()["default"][t2.Name], ShouldResemble, t2) So(tqt.GetTransactionTasks()["default"], ShouldBeNil) return nil }, nil), ShouldBeNil) }) Convey("can add a new task", func() { t3 := &tqS.Task{Path: "/sandwitch/victory"} err := dsS.Get(c).RunInTransaction(func(c context.Context) error { tq := tqS.Get(c) tqt := tq.Testable() So(tq.Add(t3, ""), ShouldBeNil) So(t3.Name, ShouldEqual, "") So(tqt.GetScheduledTasks()["default"][t.Name], ShouldResemble, t) So(tqt.GetTombstonedTasks()["default"][t2.Name], ShouldResemble, t2) So(tqt.GetTransactionTasks()["default"][0], ShouldResemble, t3) return nil }, nil) So(err, ShouldBeNil) for _, tsk := range tqt.GetScheduledTasks()["default"] { if tsk.Name == t.Name { So(tsk, ShouldResemble, t) } else { tsk.Name = "" So(tsk, ShouldResemble, t3) } } So(tqt.GetTombstonedTasks()["default"][t2.Name], ShouldResemble, t2) So(tqt.GetTransactionTasks()["default"], ShouldBeNil) }) Convey("can add a new task (but reset the state in a test)", func() { t3 := &tqS.Task{Path: "/sandwitch/victory"} ttq := tqS.Interface(nil) So(dsS.Get(c).RunInTransaction(func(c context.Context) error { ttq = tqS.Get(c) tqt := ttq.Testable() So(ttq.Add(t3, ""), ShouldBeNil) So(tqt.GetScheduledTasks()["default"][t.Name], ShouldResemble, t) So(tqt.GetTombstonedTasks()["default"][t2.Name], ShouldResemble, t2) So(tqt.GetTransactionTasks()["default"][0], ShouldResemble, t3) tqt.ResetTasks() So(len(tqt.GetScheduledTasks()["default"]), ShouldEqual, 0) So(len(tqt.GetTombstonedTasks()["default"]), ShouldEqual, 0) So(len(tqt.GetTransactionTasks()["default"]), ShouldEqual, 0) return nil }, nil), ShouldBeNil) So(len(tqt.GetScheduledTasks()["default"]), ShouldEqual, 0) So(len(tqt.GetTombstonedTasks()["default"]), ShouldEqual, 0) So(len(tqt.GetTransactionTasks()["default"]), ShouldEqual, 0) Convey("and reusing a closed context is bad times", func() { So(ttq.Add(nil, "").Error(), ShouldContainSubstring, "expired") }) }) Convey("you can AddMulti as well", func() { So(dsS.Get(c).RunInTransaction(func(c context.Context) error { tq := tqS.Get(c) tqt := tq.Testable() t.Name = "" tasks := []*tqS.Task{t.Duplicate(), t.Duplicate(), t.Duplicate()} So(tq.AddMulti(tasks, ""), ShouldBeNil) So(len(tqt.GetScheduledTasks()["default"]), ShouldEqual, 1) So(len(tqt.GetTransactionTasks()["default"]), ShouldEqual, 3) return nil }, nil), ShouldBeNil) So(len(tqt.GetScheduledTasks()["default"]), ShouldEqual, 4) So(len(tqt.GetTransactionTasks()["default"]), ShouldEqual, 0) }) Convey("unless you add too many things", func() { So(dsS.Get(c).RunInTransaction(func(c context.Context) error { for i := 0; i < 5; i++ { So(tqS.Get(c).Add(t.Duplicate(), ""), ShouldBeNil) } So(tqS.Get(c).Add(t, "").Error(), ShouldContainSubstring, "BAD_REQUEST") return nil }, nil), ShouldBeNil) }) Convey("unless you Add to a bad queue", func() { So(dsS.Get(c).RunInTransaction(func(c context.Context) error { So(tqS.Get(c).Add(t, "meat").Error(), ShouldContainSubstring, "UNKNOWN_QUEUE") Convey("unless you add it!", func() { tqS.GetRaw(c).Testable().CreateQueue("meat") So(tqS.Get(c).Add(t, "meat"), ShouldBeNil) }) return nil }, nil), ShouldBeNil) }) Convey("No other features are available, however", func() { So(dsS.Get(c).RunInTransaction(func(c context.Context) error { So(tqS.Get(c).Delete(t, "").Error(), ShouldContainSubstring, "cannot DeleteMulti from a transaction") So(tqS.Get(c).Purge("").Error(), ShouldContainSubstring, "cannot Purge from a transaction") _, err := tqS.Get(c).Stats("") So(err.Error(), ShouldContainSubstring, "cannot Stats from a transaction") return nil }, nil), ShouldBeNil) }) Convey("can get the non-transactional taskqueue context though", func() { So(dsS.Get(c).RunInTransaction(func(c context.Context) error { So(tqS.GetNoTxn(c).Delete(t, ""), ShouldBeNil) So(tqS.GetNoTxn(c).Purge(""), ShouldBeNil) _, err := tqS.GetNoTxn(c).Stats("") So(err, ShouldBeNil) return nil }, nil), ShouldBeNil) }) Convey("adding a new task only happens if we don't errout", func() { So(dsS.Get(c).RunInTransaction(func(c context.Context) error { t3 := tq.NewTask("/sandwitch/victory") So(tqS.Get(c).Add(t3, ""), ShouldBeNil) return fmt.Errorf("nooooo") }, nil), ShouldErrLike, "nooooo") So(tqt.GetScheduledTasks()["default"][t.Name], ShouldResemble, t) So(tqt.GetTombstonedTasks()["default"][t2.Name], ShouldResemble, t2) So(tqt.GetTransactionTasks()["default"], ShouldBeNil) }) Convey("likewise, a panic doesn't schedule anything", func() { func() { defer func() { _ = recover() }() So(dsS.Get(c).RunInTransaction(func(c context.Context) error { tq := tqS.Get(c) So(tq.Add(tq.NewTask("/sandwitch/victory"), ""), ShouldBeNil) panic(fmt.Errorf("nooooo")) }, nil), ShouldBeNil) }() So(tqt.GetScheduledTasks()["default"][t.Name], ShouldResemble, t) So(tqt.GetTombstonedTasks()["default"][t2.Name], ShouldResemble, t2) So(tqt.GetTransactionTasks()["default"], ShouldBeNil) }) }) }) }
func TestHuge(t *testing.T) { t.Parallel() Convey("testing datastore enforces thresholds", t, func() { _, _, ds := mkds(dataMultiRoot) Convey("exceeding inner txn size threshold still allows outer", func() { So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(18, fooSetTo(ds), hugeField) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(ds.PutMulti(hugeData), ShouldBeNil) return nil }, nil), ShouldErrLike, ErrTransactionTooLarge) return nil }, &datastore.TransactionOptions{XG: true}), ShouldBeNil) So(18, fooShouldHave(ds), hugeField) }) Convey("exceeding inner txn count threshold still allows outer", func() { So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(18, fooSetTo(ds), hugeField) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) p := ds.MakeKey("mom", 1) // This will exceed the budget, since we've already done one write in // the parent. for i := 1; i <= DefaultWriteCountBudget; i++ { So(ds.Put(&Foo{ID: int64(i), Parent: p}), ShouldBeNil) } return nil }, nil), ShouldErrLike, ErrTransactionTooLarge) return nil }, &datastore.TransactionOptions{XG: true}), ShouldBeNil) So(18, fooShouldHave(ds), hugeField) }) Convey("exceeding threshold in the parent, then retreating in the child is okay", func() { So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(ds.PutMulti(hugeData), ShouldBeNil) So(18, fooSetTo(ds), hugeField) // We're over threshold! But the child will delete most of this and // bring us back to normal. So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) keys := make([]*datastore.Key, len(hugeData)) for i, d := range hugeData { keys[i] = ds.KeyForObj(d) } return ds.DeleteMulti(keys) }, nil), ShouldBeNil) return nil }, &datastore.TransactionOptions{XG: true}), ShouldBeNil) So(18, fooShouldHave(ds), hugeField) }) }) }