func ExampleFilterRDS() { // Set up your context using a base service implementation (memory or prod) c := memory.Use(context.Background()) // Apply the counter.FilterRDS c, counter := FilterRDS(c) // functions use ds from the context like normal... they don't need to know // that there are any filters at all. someCalledFunc := func(c context.Context) { ds := datastore.Get(c) vals := []datastore.PropertyMap{{ "FieldName": {datastore.MkProperty(100)}, "$key": {datastore.MkProperty(ds.NewKey("Kind", "", 1, nil))}}, } if err := ds.PutMulti(vals); err != nil { panic(err) } } // Using the other function. someCalledFunc(c) someCalledFunc(c) // Then we can see what happened! fmt.Printf("%d\n", counter.PutMulti.Successes()) // Output: // 2 }
func toBytesErr(i interface{}, ctx KeyContext) (ret []byte, err error) { buf := bytes.Buffer{} switch t := i.(type) { case ds.IndexColumn: err = WriteIndexColumn(&buf, t) case ds.IndexDefinition: err = WriteIndexDefinition(&buf, t) case ds.KeyTok: err = WriteKeyTok(&buf, t) case ds.Property: err = WriteIndexProperty(&buf, ctx, t) case ds.PropertyMap: err = WritePropertyMap(&buf, ctx, t) default: _, v := ds.MkProperty(i).IndexTypeAndValue() err = writeIndexValue(&buf, ctx, v) } if err == nil { ret = buf.Bytes() } return }
func (q *queryImpl) Ancestor(k ds.Key) ds.Query { return q.checkMutateClone( func() error { if k == nil { // SDK has an explicit nil-check return errors.New("datastore: nil query ancestor") } if k.Namespace() != q.ns { return fmt.Errorf("bad namespace: %q (expected %q)", k.Namespace(), q.ns) } if !k.Valid(false, globalAppID, q.ns) { // technically the SDK implementation does a Weird Thing (tm) if both the // stringID and intID are set on a key; it only serializes the stringID in // the proto. This means that if you set the Ancestor to an invalid key, // you'll never actually hear about it. Instead of doing that insanity, we // just swap to an error here. return ds.ErrInvalidKey } if q.eqFilters["__ancestor__"] != nil { return errors.New("cannot have more than one ancestor") } return nil }, func(q *queryImpl) { q.addEqFilt("__ancestor__", ds.MkProperty(k)) }) }
// PropertyMapPartially turns a regular PropertyMap into a SerializedPmap. // Essentially all the []Property's become SerializedPslice, using cmpbin and // datastore/serialize's encodings. func PropertyMapPartially(k *ds.Key, pm ds.PropertyMap) (ret SerializedPmap) { ret = make(SerializedPmap, len(pm)+2) if k != nil { ret["__key__"] = [][]byte{ToBytes(ds.MkProperty(k))} for k != nil { ret["__ancestor__"] = append(ret["__ancestor__"], ToBytes(ds.MkProperty(k))) k = k.Parent() } } for k, vals := range pm { newVals := PropertySlice(vals) if len(newVals) > 0 { ret[k] = newVals } } return }
func partiallySerialize(k ds.Key, pm ds.PropertyMap) (ret serializedIndexablePmap) { ret = make(serializedIndexablePmap, len(pm)+2) if k == nil { impossible(fmt.Errorf("key to partiallySerialize is nil")) } ret["__key__"] = [][]byte{serialize.ToBytes(ds.MkProperty(k))} for k != nil { ret["__ancestor__"] = append(ret["__ancestor__"], serialize.ToBytes(ds.MkProperty(k))) k = k.Parent() } for k, vals := range pm { newVals := serializeRow(vals) if len(newVals) > 0 { ret[k] = newVals } } return }
func TestGetEntityGroupVersion(t *testing.T) { t.Parallel() Convey("GetEntityGroupVersion", t, func() { c := memory.Use(context.Background()) c, fb := featureBreaker.FilterRDS(c, errors.New("INTERNAL_ERROR")) ds := dstore.Get(c) pm := dstore.PropertyMap{ "$key": {dstore.MkPropertyNI(ds.NewKey("A", "", 0, nil))}, "Val": {dstore.MkProperty(10)}, } So(ds.Put(pm), ShouldBeNil) aKey, ok := pm.GetMetaDefault("key", nil).(dstore.Key) So(ok, ShouldBeTrue) So(aKey, ShouldNotBeNil) v, err := GetEntityGroupVersion(c, aKey) So(err, ShouldBeNil) So(v, ShouldEqual, 1) So(ds.Delete(aKey), ShouldBeNil) v, err = GetEntityGroupVersion(c, ds.NewKey("madeUp", "thing", 0, aKey)) So(err, ShouldBeNil) So(v, ShouldEqual, 2) v, err = GetEntityGroupVersion(c, ds.NewKey("madeUp", "thing", 0, nil)) So(err, ShouldBeNil) So(v, ShouldEqual, 0) fb.BreakFeatures(nil, "GetMulti") v, err = GetEntityGroupVersion(c, aKey) So(err.Error(), ShouldContainSubstring, "INTERNAL_ERROR") }) }
{"silly inequality (=> v <=)", nq().Gte("bob", 10).Lte("bob", 10), nil, nil}, {"cursors get smooshed into the inquality range", (nq().Gt("Foo", 3).Lt("Foo", 10). Start(curs("Foo", 2, "__key__", key("Something", 1))). End(curs("Foo", 20, "__key__", key("Something", 20)))), nil, &reducedQuery{ "dev~app", "ns", "Foo", map[string]stringset.Set{}, []dstore.IndexColumn{ {Property: "Foo"}, {Property: "__key__"}, }, increment(serialize.ToBytes(dstore.MkProperty(3))), serialize.ToBytes(dstore.MkProperty(10)), 2, }}, {"cursors could cause the whole query to be useless", (nq().Gt("Foo", 3).Lt("Foo", 10). Start(curs("Foo", 200, "__key__", key("Something", 1))). End(curs("Foo", 1, "__key__", key("Something", 20)))), dstore.ErrNullQuery, nil}, } func TestQueries(t *testing.T) { t.Parallel()
func keyBytes(key *ds.Key) []byte { return serialize.ToBytes(ds.MkProperty(key)) }
func TestCount(t *testing.T) { t.Parallel() Convey("Test Count filter", t, func() { c, fb := featureBreaker.FilterRDS(memory.Use(context.Background()), nil) c, ctr := FilterRDS(c) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) ds := datastore.Get(c) vals := []datastore.PropertyMap{{ "Val": {datastore.MkProperty(100)}, "$key": {datastore.MkPropertyNI(ds.NewKey("Kind", "", 1, nil))}, }} Convey("Calling a ds function should reflect in counter", func() { So(ds.PutMulti(vals), ShouldBeNil) So(ctr.PutMulti.Successes(), ShouldEqual, 1) Convey("effects are cumulative", func() { So(ds.PutMulti(vals), ShouldBeNil) So(ctr.PutMulti.Successes(), ShouldEqual, 2) Convey("even within transactions", func() { die(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(ds.PutMulti(append(vals, vals[0])), ShouldBeNil) return nil }, nil)) }) }) }) Convey("errors count against errors", func() { fb.BreakFeatures(nil, "GetMulti") So(ds.GetMulti(vals), ShouldErrLike, `"GetMulti" is broken`) So(ctr.GetMulti.Errors(), ShouldEqual, 1) fb.UnbreakFeatures("GetMulti") So(ds.PutMulti(vals), ShouldBeNil) die(ds.GetMulti(vals)) So(ctr.GetMulti.Errors(), ShouldEqual, 1) So(ctr.GetMulti.Successes(), ShouldEqual, 1) So(ctr.GetMulti.Total(), ShouldEqual, 2) }) }) Convey("works for memcache", t, func() { c, ctr := FilterMC(memory.Use(context.Background())) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) mc := memcache.Get(c) die(mc.Set(mc.NewItem("hello").SetValue([]byte("sup")))) _, err := mc.Get("Wat") So(err, ShouldNotBeNil) _, err = mc.Get("hello") die(err) So(ctr.SetMulti, shouldHaveSuccessesAndErrors, 1, 0) So(ctr.GetMulti, shouldHaveSuccessesAndErrors, 2, 0) So(ctr.NewItem, shouldHaveSuccessesAndErrors, 3, 0) }) Convey("works for taskqueue", t, func() { c, ctr := FilterTQ(memory.Use(context.Background())) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) tq := taskqueue.Get(c) die(tq.Add(&taskqueue.Task{Name: "wat"}, "")) So(tq.Add(&taskqueue.Task{Name: "wat"}, "DNE_QUEUE"), ShouldErrLike, "UNKNOWN_QUEUE") So(ctr.AddMulti, shouldHaveSuccessesAndErrors, 1, 1) }) Convey("works for global info", t, func() { c, fb := featureBreaker.FilterGI(memory.Use(context.Background()), nil) c, ctr := FilterGI(c) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) gi := info.Get(c) _, err := gi.Namespace("foo") die(err) fb.BreakFeatures(nil, "Namespace") _, err = gi.Namespace("boom") So(err, ShouldErrLike, `"Namespace" is broken`) So(ctr.Namespace, shouldHaveSuccessesAndErrors, 1, 1) }) Convey("works for user", t, func() { c, fb := featureBreaker.FilterUser(memory.Use(context.Background()), nil) c, ctr := FilterUser(c) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) u := user.Get(c) _, err := u.CurrentOAuth("foo") die(err) fb.BreakFeatures(nil, "CurrentOAuth") _, err = u.CurrentOAuth("foo") So(err, ShouldErrLike, `"CurrentOAuth" is broken`) So(ctr.CurrentOAuth, shouldHaveSuccessesAndErrors, 1, 1) }) Convey("works for mail", t, func() { c, fb := featureBreaker.FilterMail(memory.Use(context.Background()), nil) c, ctr := FilterMail(c) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) m := mail.Get(c) err := m.Send(&mail.Message{ Sender: "*****@*****.**", To: []string{"*****@*****.**"}, Body: "hi", }) die(err) fb.BreakFeatures(nil, "Send") err = m.Send(&mail.Message{ Sender: "*****@*****.**", To: []string{"*****@*****.**"}, Body: "hi", }) So(err, ShouldErrLike, `"Send" is broken`) So(ctr.Send, shouldHaveSuccessesAndErrors, 1, 1) }) }
func TestDSCache(t *testing.T) { t.Parallel() zeroTime, err := time.Parse("2006-01-02T15:04:05.999999999Z", "2006-01-02T15:04:05.999999999Z") if err != nil { panic(err) } Convey("Test dscache", t, func() { c := mathrand.Set(context.Background(), rand.New(rand.NewSource(1))) clk := testclock.New(zeroTime) c = clock.Set(c, clk) c = memory.Use(c) dsUnder := datastore.Get(c) mc := memcache.Get(c) shardsForKey := func(k *datastore.Key) int { last := k.LastTok() if last.Kind == "shardObj" { return int(last.IntID) } if last.Kind == "noCacheObj" { return 0 } return DefaultShards } numMemcacheItems := func() uint64 { stats, err := mc.Stats() So(err, ShouldBeNil) return stats.Items } Convey("enabled cases", func() { c = FilterRDS(c, shardsForKey) ds := datastore.Get(c) So(dsUnder, ShouldNotBeNil) So(ds, ShouldNotBeNil) So(mc, ShouldNotBeNil) Convey("basically works", func() { pm := datastore.PropertyMap{ "BigData": {datastore.MkProperty([]byte(""))}, "Value": {datastore.MkProperty("hi")}, } encoded := append([]byte{0}, serialize.ToBytes(pm)...) o := object{ID: 1, Value: "hi"} So(ds.Put(&o), ShouldBeNil) o = object{ID: 1} So(dsUnder.Get(&o), ShouldBeNil) So(o.Value, ShouldEqual, "hi") itm, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&o))) So(err, ShouldEqual, memcache.ErrCacheMiss) o = object{ID: 1} So(ds.Get(&o), ShouldBeNil) So(o.Value, ShouldEqual, "hi") itm, err = mc.Get(itm.Key()) So(err, ShouldBeNil) So(itm.Value(), ShouldResemble, encoded) Convey("now we don't need the datastore!", func() { o := object{ID: 1} // delete it, bypassing the cache filter. Don't do this in production // unless you want a crappy cache. So(dsUnder.Delete(ds.KeyForObj(&o)), ShouldBeNil) itm, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&o))) So(err, ShouldBeNil) So(itm.Value(), ShouldResemble, encoded) So(ds.Get(&o), ShouldBeNil) So(o.Value, ShouldEqual, "hi") }) Convey("deleting it properly records that fact, however", func() { o := object{ID: 1} So(ds.Delete(ds.KeyForObj(&o)), ShouldBeNil) itm, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&o))) So(err, ShouldEqual, memcache.ErrCacheMiss) So(ds.Get(&o), ShouldEqual, datastore.ErrNoSuchEntity) itm, err = mc.Get(itm.Key()) So(err, ShouldBeNil) So(itm.Value(), ShouldResemble, []byte{}) // this one hits memcache So(ds.Get(&o), ShouldEqual, datastore.ErrNoSuchEntity) }) }) Convey("compression works", func() { o := object{ID: 2, Value: `¯\_(ツ)_/¯`} data := make([]byte, 4000) for i := range data { const alpha = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*()" data[i] = alpha[i%len(alpha)] } o.BigData = data So(ds.Put(&o), ShouldBeNil) So(ds.Get(&o), ShouldBeNil) itm, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&o))) So(err, ShouldBeNil) So(itm.Value()[0], ShouldEqual, ZlibCompression) So(len(itm.Value()), ShouldEqual, 653) // a bit smaller than 4k // ensure the next Get comes from the cache So(dsUnder.Delete(ds.KeyForObj(&o)), ShouldBeNil) o = object{ID: 2} So(ds.Get(&o), ShouldBeNil) So(o.Value, ShouldEqual, `¯\_(ツ)_/¯`) So(o.BigData, ShouldResemble, data) }) Convey("transactions", func() { Convey("work", func() { // populate an object @ ID1 So(ds.Put(&object{ID: 1, Value: "something"}), ShouldBeNil) So(ds.Get(&object{ID: 1}), ShouldBeNil) So(ds.Put(&object{ID: 2, Value: "nurbs"}), ShouldBeNil) So(ds.Get(&object{ID: 2}), ShouldBeNil) // memcache now has the wrong value (simulated race) So(dsUnder.Put(&object{ID: 1, Value: "else"}), ShouldBeNil) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) o := &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "else") o.Value = "txn" So(ds.Put(o), ShouldBeNil) So(ds.Delete(ds.KeyForObj(&object{ID: 2})), ShouldBeNil) return nil }, &datastore.TransactionOptions{XG: true}), ShouldBeNil) _, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&object{ID: 1}))) So(err, ShouldEqual, memcache.ErrCacheMiss) _, err = mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&object{ID: 2}))) So(err, ShouldEqual, memcache.ErrCacheMiss) o := &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "txn") }) Convey("errors don't invalidate", func() { // populate an object @ ID1 So(ds.Put(&object{ID: 1, Value: "something"}), ShouldBeNil) So(ds.Get(&object{ID: 1}), ShouldBeNil) So(numMemcacheItems(), ShouldEqual, 1) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) o := &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "something") o.Value = "txn" So(ds.Put(o), ShouldBeNil) return errors.New("OH NOES") }, nil).Error(), ShouldContainSubstring, "OH NOES") // memcache still has the original So(numMemcacheItems(), ShouldEqual, 1) So(dsUnder.Delete(ds.KeyForObj(&object{ID: 1})), ShouldBeNil) o := &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "something") }) }) Convey("control", func() { Convey("per-model bypass", func() { type model struct { ID string `gae:"$id"` UseDSCache datastore.Toggle `gae:"$dscache.enable,false"` Value string } itms := []model{ {ID: "hi", Value: "something"}, {ID: "there", Value: "else", UseDSCache: datastore.On}, } So(ds.PutMulti(itms), ShouldBeNil) So(ds.GetMulti(itms), ShouldBeNil) So(numMemcacheItems(), ShouldEqual, 1) }) Convey("per-key shard count", func() { s := &shardObj{ID: 4, Value: "hi"} So(ds.Put(s), ShouldBeNil) So(ds.Get(s), ShouldBeNil) So(numMemcacheItems(), ShouldEqual, 1) for i := 0; i < 20; i++ { So(ds.Get(s), ShouldBeNil) } So(numMemcacheItems(), ShouldEqual, 4) }) Convey("per-key cache disablement", func() { n := &noCacheObj{ID: "nurbs", Value: true} So(ds.Put(n), ShouldBeNil) So(ds.Get(n), ShouldBeNil) So(numMemcacheItems(), ShouldEqual, 0) }) Convey("per-model expiration", func() { type model struct { ID int64 `gae:"$id"` DSCacheExp int64 `gae:"$dscache.expiration,7"` Value string } So(ds.Put(&model{ID: 1, Value: "mooo"}), ShouldBeNil) So(ds.Get(&model{ID: 1}), ShouldBeNil) itm, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(&model{ID: 1}))) So(err, ShouldBeNil) clk.Add(10 * time.Second) _, err = mc.Get(itm.Key()) So(err, ShouldEqual, memcache.ErrCacheMiss) }) }) Convey("screw cases", func() { Convey("memcache contains bogus value (simulated failed AddMulti)", func() { o := &object{ID: 1, Value: "spleen"} So(ds.Put(o), ShouldBeNil) sekret := []byte("I am a banana") itm := mc.NewItem(MakeMemcacheKey(0, ds.KeyForObj(o))).SetValue(sekret) So(mc.Set(itm), ShouldBeNil) o = &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "spleen") itm, err := mc.Get(itm.Key()) So(err, ShouldBeNil) So(itm.Flags(), ShouldEqual, ItemUKNONWN) So(itm.Value(), ShouldResemble, sekret) }) Convey("memcache contains bogus value (corrupt entry)", func() { o := &object{ID: 1, Value: "spleen"} So(ds.Put(o), ShouldBeNil) sekret := []byte("I am a banana") itm := (mc.NewItem(MakeMemcacheKey(0, ds.KeyForObj(o))). SetValue(sekret). SetFlags(uint32(ItemHasData))) So(mc.Set(itm), ShouldBeNil) o = &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "spleen") itm, err := mc.Get(itm.Key()) So(err, ShouldBeNil) So(itm.Flags(), ShouldEqual, ItemHasData) So(itm.Value(), ShouldResemble, sekret) }) Convey("other entity has the lock", func() { o := &object{ID: 1, Value: "spleen"} So(ds.Put(o), ShouldBeNil) sekret := []byte("r@vmarod!#)%9T") itm := (mc.NewItem(MakeMemcacheKey(0, ds.KeyForObj(o))). SetValue(sekret). SetFlags(uint32(ItemHasLock))) So(mc.Set(itm), ShouldBeNil) o = &object{ID: 1} So(ds.Get(o), ShouldBeNil) So(o.Value, ShouldEqual, "spleen") itm, err := mc.Get(itm.Key()) So(err, ShouldBeNil) So(itm.Flags(), ShouldEqual, ItemHasLock) So(itm.Value(), ShouldResemble, sekret) }) Convey("massive entities can't be cached", func() { o := &object{ID: 1, Value: "spleen"} mr := mathrand.Get(c) numRounds := (internalValueSizeLimit / 8) * 2 buf := bytes.Buffer{} for i := 0; i < numRounds; i++ { So(binary.Write(&buf, binary.LittleEndian, mr.Int63()), ShouldBeNil) } o.BigData = buf.Bytes() So(ds.Put(o), ShouldBeNil) o.BigData = nil So(ds.Get(o), ShouldBeNil) itm, err := mc.Get(MakeMemcacheKey(0, ds.KeyForObj(o))) So(err, ShouldBeNil) // Is locked until the next put, forcing all access to the datastore. So(itm.Value(), ShouldResemble, []byte{}) So(itm.Flags(), ShouldEqual, ItemHasLock) o.BigData = []byte("hi :)") So(ds.Put(o), ShouldBeNil) So(ds.Get(o), ShouldBeNil) itm, err = mc.Get(itm.Key()) So(err, ShouldBeNil) So(itm.Flags(), ShouldEqual, ItemHasData) }) Convey("failure on Setting memcache locks is a hard stop", func() { c, fb := featureBreaker.FilterMC(c, nil) fb.BreakFeatures(nil, "SetMulti") ds := datastore.Get(c) So(ds.Put(&object{ID: 1}).Error(), ShouldContainSubstring, "SetMulti") }) Convey("failure on Setting memcache locks in a transaction is a hard stop", func() { c, fb := featureBreaker.FilterMC(c, nil) fb.BreakFeatures(nil, "SetMulti") ds := datastore.Get(c) So(ds.RunInTransaction(func(c context.Context) error { So(datastore.Get(c).Put(&object{ID: 1}), ShouldBeNil) // no problems here... memcache operations happen after the function // body quits. return nil }, nil).Error(), ShouldContainSubstring, "SetMulti") }) }) Convey("misc", func() { Convey("verify numShards caps at MaxShards", func() { sc := supportContext{shardsForKey: shardsForKey} So(sc.numShards(ds.KeyForObj(&shardObj{ID: 9001})), ShouldEqual, MaxShards) }) Convey("CompressionType.String", func() { So(NoCompression.String(), ShouldEqual, "NoCompression") So(ZlibCompression.String(), ShouldEqual, "ZlibCompression") So(CompressionType(100).String(), ShouldEqual, "UNKNOWN_CompressionType(100)") }) }) }) Convey("disabled cases", func() { defer func() { globalEnabled = true }() So(IsGloballyEnabled(c), ShouldBeTrue) So(SetGlobalEnable(c, false), ShouldBeNil) // twice is a nop So(SetGlobalEnable(c, false), ShouldBeNil) // but it takes 5 minutes to kick in So(IsGloballyEnabled(c), ShouldBeTrue) clk.Add(time.Minute*5 + time.Second) So(IsGloballyEnabled(c), ShouldBeFalse) So(mc.Set(mc.NewItem("test").SetValue([]byte("hi"))), ShouldBeNil) So(numMemcacheItems(), ShouldEqual, 1) So(SetGlobalEnable(c, true), ShouldBeNil) // memcache gets flushed as a side effect So(numMemcacheItems(), ShouldEqual, 0) // Still takes 5 minutes to kick in So(IsGloballyEnabled(c), ShouldBeFalse) clk.Add(time.Minute*5 + time.Second) So(IsGloballyEnabled(c), ShouldBeTrue) }) }) }
func TestQuerySupport(t *testing.T) { t.Parallel() Convey("Queries", t, func() { Convey("Good", func() { q := datastore.NewQuery("Foo").Ancestor(root) Convey("normal", func() { _, _, ds := mkds(dataSingleRoot) ds.Testable().AddIndexes(&datastore.IndexDefinition{ Kind: "Foo", Ancestor: true, SortBy: []datastore.IndexColumn{ {Property: "Value"}, }, }) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) q = q.Lt("Value", 400000000000000000) vals := []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 8) count, err := ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 8) f := &Foo{ID: 1, Parent: root} So(ds.Get(f), ShouldBeNil) f.Value = append(f.Value, 100) So(ds.Put(f), ShouldBeNil) // Wowee, zowee, merged queries! vals2 := []*Foo{} So(ds.GetAll(q, &vals2), ShouldBeNil) So(len(vals2), ShouldEqual, 9) So(vals2[0], ShouldResemble, f) vals2 = []*Foo{} So(ds.GetAll(q.Limit(2).Offset(1), &vals2), ShouldBeNil) So(len(vals2), ShouldEqual, 2) So(vals2, ShouldResemble, vals[:2]) return nil }, nil), ShouldBeNil) }) Convey("keysOnly", func() { _, _, ds := mkds([]*Foo{ {ID: 2, Parent: root, Value: []int64{1, 2, 3, 4, 5, 6, 7}}, {ID: 3, Parent: root, Value: []int64{3, 4, 5, 6, 7, 8, 9}}, {ID: 4, Parent: root, Value: []int64{3, 5, 7, 9, 11, 100, 1}}, {ID: 5, Parent: root, Value: []int64{1, 70, 101}}, }) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) q = q.Eq("Value", 1).KeysOnly(true) vals := []*datastore.Key{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 3) So(vals[2], ShouldResemble, ds.MakeKey("Parent", 1, "Foo", 5)) // can remove keys So(ds.Delete(ds.MakeKey("Parent", 1, "Foo", 2)), ShouldBeNil) vals = []*datastore.Key{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 2) // and add new ones So(ds.Put(&Foo{ID: 1, Parent: root, Value: []int64{1, 7, 100}}), ShouldBeNil) So(ds.Put(&Foo{ID: 7, Parent: root, Value: []int64{20, 1}}), ShouldBeNil) vals = []*datastore.Key{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 4) So(vals[0].IntID(), ShouldEqual, 1) So(vals[1].IntID(), ShouldEqual, 4) So(vals[2].IntID(), ShouldEqual, 5) So(vals[3].IntID(), ShouldEqual, 7) return nil }, nil), ShouldBeNil) }) Convey("project", func() { _, _, ds := mkds([]*Foo{ {ID: 2, Parent: root, Value: []int64{1, 2, 3, 4, 5, 6, 7}}, {ID: 3, Parent: root, Value: []int64{3, 4, 5, 6, 7, 8, 9}}, {ID: 4, Parent: root, Value: []int64{3, 5, 7, 9, 11, 100, 1}}, {ID: 5, Parent: root, Value: []int64{1, 70, 101}}, }) ds.Testable().AddIndexes(&datastore.IndexDefinition{ Kind: "Foo", Ancestor: true, SortBy: []datastore.IndexColumn{ {Property: "Value"}, }, }) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) count, err := ds.Count(q.Project("Value")) So(err, ShouldBeNil) So(count, ShouldEqual, 24) q = q.Project("Value").Offset(4).Limit(10) vals := []datastore.PropertyMap{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 10) expect := []struct { id int64 val int64 }{ {2, 3}, {3, 3}, {4, 3}, {2, 4}, {3, 4}, {2, 5}, {3, 5}, {4, 5}, {2, 6}, {3, 6}, } for i, pm := range vals { So(datastore.GetMetaDefault(pm, "key", nil), ShouldResemble, ds.MakeKey("Parent", 1, "Foo", expect[i].id)) So(pm["Value"][0].Value(), ShouldEqual, expect[i].val) } // should remove 4 entries, but there are plenty more to fill So(ds.Delete(ds.MakeKey("Parent", 1, "Foo", 2)), ShouldBeNil) vals = []datastore.PropertyMap{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 10) expect = []struct { id int64 val int64 }{ // note (3, 3) and (4, 3) are correctly missing because deleting // 2 removed two entries which are hidden by the Offset(4). {3, 4}, {3, 5}, {4, 5}, {3, 6}, {3, 7}, {4, 7}, {3, 8}, {3, 9}, {4, 9}, {4, 11}, } for i, pm := range vals { So(datastore.GetMetaDefault(pm, "key", nil), ShouldResemble, ds.MakeKey("Parent", 1, "Foo", expect[i].id)) So(pm["Value"][0].Value(), ShouldEqual, expect[i].val) } So(ds.Put(&Foo{ID: 1, Parent: root, Value: []int64{3, 9}}), ShouldBeNil) vals = []datastore.PropertyMap{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 10) expect = []struct { id int64 val int64 }{ // 'invisible' {1, 3} entry bumps the {4, 3} into view. {4, 3}, {3, 4}, {3, 5}, {4, 5}, {3, 6}, {3, 7}, {4, 7}, {3, 8}, {1, 9}, {3, 9}, {4, 9}, } for i, pm := range vals { So(datastore.GetMetaDefault(pm, "key", nil), ShouldResemble, ds.MakeKey("Parent", 1, "Foo", expect[i].id)) So(pm["Value"][0].Value(), ShouldEqual, expect[i].val) } return nil }, nil), ShouldBeNil) }) Convey("project+distinct", func() { _, _, ds := mkds([]*Foo{ {ID: 2, Parent: root, Value: []int64{1, 2, 3, 4, 5, 6, 7}}, {ID: 3, Parent: root, Value: []int64{3, 4, 5, 6, 7, 8, 9}}, {ID: 4, Parent: root, Value: []int64{3, 5, 7, 9, 11, 100, 1}}, {ID: 5, Parent: root, Value: []int64{1, 70, 101}}, }) ds.Testable().AddIndexes(&datastore.IndexDefinition{ Kind: "Foo", Ancestor: true, SortBy: []datastore.IndexColumn{ {Property: "Value"}, }, }) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) q = q.Project("Value").Distinct(true) vals := []datastore.PropertyMap{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 13) expect := []struct { id int64 val int64 }{ {2, 1}, {2, 2}, {2, 3}, {2, 4}, {2, 5}, {2, 6}, {2, 7}, {3, 8}, {3, 9}, {4, 11}, {5, 70}, {4, 100}, {5, 101}, } for i, pm := range vals { So(pm["Value"][0].Value(), ShouldEqual, expect[i].val) So(datastore.GetMetaDefault(pm, "key", nil), ShouldResemble, ds.MakeKey("Parent", 1, "Foo", expect[i].id)) } return nil }, nil), ShouldBeNil) }) Convey("overwrite", func() { data := []*Foo{ {ID: 2, Parent: root, Value: []int64{1, 2, 3, 4, 5, 6, 7}}, {ID: 3, Parent: root, Value: []int64{3, 4, 5, 6, 7, 8, 9}}, {ID: 4, Parent: root, Value: []int64{3, 5, 7, 9, 11, 100, 1, 2}}, {ID: 5, Parent: root, Value: []int64{1, 70, 101}}, } _, _, ds := mkds(data) q = q.Eq("Value", 2, 3) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) vals := []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 2) So(vals[0], ShouldResemble, data[0]) So(vals[1], ShouldResemble, data[2]) foo2 := &Foo{ID: 2, Parent: root, Value: []int64{2, 3}} So(ds.Put(foo2), ShouldBeNil) vals = []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 2) So(vals[0], ShouldResemble, foo2) So(vals[1], ShouldResemble, data[2]) foo1 := &Foo{ID: 1, Parent: root, Value: []int64{2, 3}} So(ds.Put(foo1), ShouldBeNil) vals = []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 3) So(vals[0], ShouldResemble, foo1) So(vals[1], ShouldResemble, foo2) So(vals[2], ShouldResemble, data[2]) return nil }, nil), ShouldBeNil) }) projectData := []*Foo{ {ID: 2, Parent: root, Value: []int64{1, 2, 3, 4, 5, 6, 7}, Sort: []string{"x", "z"}}, {ID: 3, Parent: root, Value: []int64{3, 4, 5, 6, 7, 8, 9}, Sort: []string{"b"}}, {ID: 4, Parent: root, Value: []int64{3, 5, 7, 9, 11, 100, 1, 2}, Sort: []string{"aa", "a"}}, {ID: 5, Parent: root, Value: []int64{1, 70, 101}, Sort: []string{"c"}}, } Convey("project+extra orders", func() { _, _, ds := mkds(projectData) ds.Testable().AddIndexes(&datastore.IndexDefinition{ Kind: "Foo", Ancestor: true, SortBy: []datastore.IndexColumn{ {Property: "Sort", Descending: true}, {Property: "Value", Descending: true}, }, }) q = q.Project("Value").Order("-Sort", "-Value").Distinct(true) So(ds.RunInTransaction(func(c context.Context) error { ds = datastore.Get(c) So(ds.Put(&Foo{ ID: 1, Parent: root, Value: []int64{0, 1, 1000}, Sort: []string{"zz"}}), ShouldBeNil) vals := []datastore.PropertyMap{} So(ds.GetAll(q, &vals), ShouldBeNil) expect := []struct { id int64 val int64 }{ {1, 1000}, {1, 1}, {1, 0}, {2, 7}, {2, 6}, {2, 5}, {2, 4}, {2, 3}, {2, 2}, {5, 101}, {5, 70}, {3, 9}, {3, 8}, {4, 100}, {4, 11}, } for i, pm := range vals { So(pm["Value"][0].Value(), ShouldEqual, expect[i].val) So(datastore.GetMetaDefault(pm, "key", nil), ShouldResemble, ds.MakeKey("Parent", 1, "Foo", expect[i].id)) } return nil }, nil), ShouldBeNil) }) Convey("buffered entity sorts before ineq, but after first parent entity", func() { // If we got this wrong, we'd see Foo,3 come before Foo,2. This might // happen because we calculate the comparison string for each entity // based on the whole entity, but we forgot to limit the comparison // string generation by the inequality criteria. data := []*Foo{ {ID: 2, Parent: root, Value: []int64{2, 3, 5, 6}, Sort: []string{"z"}}, } _, _, ds := mkds(data) ds.Testable().AddIndexes(&datastore.IndexDefinition{ Kind: "Foo", Ancestor: true, SortBy: []datastore.IndexColumn{ {Property: "Value"}, }, }) q = q.Gt("Value", 2).Limit(2) So(ds.RunInTransaction(func(c context.Context) error { ds = datastore.Get(c) foo1 := &Foo{ID: 3, Parent: root, Value: []int64{0, 2, 3, 4}} So(ds.Put(foo1), ShouldBeNil) vals := []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 2) So(vals[0], ShouldResemble, data[0]) So(vals[1], ShouldResemble, foo1) return nil }, nil), ShouldBeNil) }) Convey("keysOnly+extra orders", func() { _, _, ds := mkds(projectData) ds.Testable().AddIndexes(&datastore.IndexDefinition{ Kind: "Foo", Ancestor: true, SortBy: []datastore.IndexColumn{ {Property: "Sort"}, }, }) q = q.Order("Sort").KeysOnly(true) So(ds.RunInTransaction(func(c context.Context) error { ds = datastore.Get(c) So(ds.Put(&Foo{ ID: 1, Parent: root, Value: []int64{0, 1, 1000}, Sort: []string{"x", "zz"}}), ShouldBeNil) So(ds.Put(&Foo{ ID: 2, Parent: root, Value: []int64{0, 1, 1000}, Sort: []string{"zz", "zzz", "zzzz"}}), ShouldBeNil) vals := []*datastore.Key{} So(ds.GetAll(q, &vals), ShouldBeNil) So(len(vals), ShouldEqual, 5) So(vals, ShouldResemble, []*datastore.Key{ ds.MakeKey("Parent", 1, "Foo", 4), ds.MakeKey("Parent", 1, "Foo", 3), ds.MakeKey("Parent", 1, "Foo", 5), ds.MakeKey("Parent", 1, "Foo", 1), ds.MakeKey("Parent", 1, "Foo", 2), }) return nil }, nil), ShouldBeNil) }) Convey("query accross nested transactions", func() { _, _, ds := mkds(projectData) q = q.Eq("Value", 2, 3) foo1 := &Foo{ID: 1, Parent: root, Value: []int64{2, 3}} foo7 := &Foo{ID: 7, Parent: root, Value: []int64{2, 3}} So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(ds.Put(foo1), ShouldBeNil) vals := []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(vals, ShouldResemble, []*Foo{foo1, projectData[0], projectData[2]}) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) vals := []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(vals, ShouldResemble, []*Foo{foo1, projectData[0], projectData[2]}) So(ds.Delete(ds.MakeKey("Parent", 1, "Foo", 4)), ShouldBeNil) So(ds.Put(foo7), ShouldBeNil) vals = []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(vals, ShouldResemble, []*Foo{foo1, projectData[0], foo7}) return nil }, nil), ShouldBeNil) vals = []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(vals, ShouldResembleV, []*Foo{foo1, projectData[0], foo7}) return nil }, nil), ShouldBeNil) vals := []*Foo{} So(ds.GetAll(q, &vals), ShouldBeNil) So(vals, ShouldResemble, []*Foo{foo1, projectData[0], foo7}) }) Convey("start transaction from inside query", func() { _, _, ds := mkds(projectData) So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) q := datastore.NewQuery("Foo").Ancestor(root) return ds.Run(q, func(pm datastore.PropertyMap) { So(ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) pm["Value"] = append(pm["Value"], datastore.MkProperty("wat")) return ds.Put(pm) }, nil), ShouldBeNil) }) }, &datastore.TransactionOptions{XG: true}), ShouldBeNil) So(ds.Run(datastore.NewQuery("Foo"), func(pm datastore.PropertyMap) { val := pm["Value"] So(val[len(val)-1].Value(), ShouldResemble, "wat") }), ShouldBeNil) }) }) }) }
func TestDatastoreSingleReadWriter(t *testing.T) { t.Parallel() Convey("Datastore single reads and writes", t, func() { c := Use(context.Background()) ds := dsS.Get(c) So(ds, ShouldNotBeNil) Convey("getting objects that DNE is an error", func() { So(ds.Get(&Foo{ID: 1}), ShouldEqual, dsS.ErrNoSuchEntity) }) Convey("bad namespaces fail", func() { _, err := infoS.Get(c).Namespace("$$blzyall") So(err.Error(), ShouldContainSubstring, "namespace \"$$blzyall\" does not match") }) Convey("Can Put stuff", func() { // with an incomplete key! f := &Foo{Val: 10} So(ds.Put(f), ShouldBeNil) k := ds.KeyForObj(f) So(k.String(), ShouldEqual, "dev~app::/Foo,1") Convey("and Get it back", func() { newFoo := &Foo{ID: 1} So(ds.Get(newFoo), ShouldBeNil) So(newFoo, ShouldResemble, f) Convey("but it's hidden from a different namespace", func() { c, err := infoS.Get(c).Namespace("whombat") So(err, ShouldBeNil) ds = dsS.Get(c) So(ds.Get(f), ShouldEqual, dsS.ErrNoSuchEntity) }) Convey("and we can Delete it", func() { So(ds.Delete(k), ShouldBeNil) So(ds.Get(newFoo), ShouldEqual, dsS.ErrNoSuchEntity) }) }) Convey("Deleteing with a bogus key is bad", func() { So(ds.Delete(ds.NewKey("Foo", "wat", 100, nil)), ShouldEqual, dsS.ErrInvalidKey) }) Convey("Deleteing a DNE entity is fine", func() { So(ds.Delete(ds.NewKey("Foo", "wat", 0, nil)), ShouldBeNil) }) Convey("Deleting entities from a nonexistant namespace works", func() { aid := infoS.Get(c).FullyQualifiedAppID() keys := make([]*dsS.Key, 10) for i := range keys { keys[i] = ds.MakeKey(aid, "noexist", "Kind", i+1) } So(ds.DeleteMulti(keys), ShouldBeNil) count := 0 So(ds.Raw().DeleteMulti(keys, func(err error) error { count++ So(err, ShouldBeNil) return nil }), ShouldBeNil) So(count, ShouldEqual, len(keys)) }) Convey("with multiple puts", func() { So(testGetMeta(c, k), ShouldEqual, 1) foos := make([]Foo, 10) for i := range foos { foos[i].Val = 10 foos[i].Parent = k } So(ds.PutMulti(foos), ShouldBeNil) So(testGetMeta(c, k), ShouldEqual, 11) keys := make([]*dsS.Key, len(foos)) for i, f := range foos { keys[i] = ds.KeyForObj(&f) } Convey("ensure that group versions persist across deletes", func() { So(ds.DeleteMulti(append(keys, k)), ShouldBeNil) ds.Testable().CatchupIndexes() count := 0 So(ds.Run(dsS.NewQuery(""), func(_ *dsS.Key) { count++ }), ShouldBeNil) So(count, ShouldEqual, 3) So(testGetMeta(c, k), ShouldEqual, 22) So(ds.Put(&Foo{ID: 1}), ShouldBeNil) So(testGetMeta(c, k), ShouldEqual, 23) }) Convey("can Get", func() { vals := make([]dsS.PropertyMap, len(keys)) for i := range vals { vals[i] = dsS.PropertyMap{} So(vals[i].SetMeta("key", keys[i]), ShouldBeTrue) } So(ds.GetMulti(vals), ShouldBeNil) for i, val := range vals { So(val, ShouldResemble, dsS.PropertyMap{ "Val": {dsS.MkProperty(10)}, "$key": {dsS.MkPropertyNI(keys[i])}, }) } }) }) Convey("allocating ids prevents their use", func() { start, err := ds.AllocateIDs(ds.MakeKey("Foo", 0), 100) So(err, ShouldBeNil) So(start, ShouldEqual, 2) f := &Foo{Val: 10} So(ds.Put(f), ShouldBeNil) k := ds.KeyForObj(f) So(k.String(), ShouldEqual, "dev~app::/Foo,102") }) }) Convey("implements DSTransactioner", func() { Convey("Put", func() { f := &Foo{Val: 10} So(ds.Put(f), ShouldBeNil) k := ds.KeyForObj(f) So(k.String(), ShouldEqual, "dev~app::/Foo,1") Convey("can Put new entity groups", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) f := &Foo{Val: 100} So(ds.Put(f), ShouldBeNil) So(f.ID, ShouldEqual, 2) f.ID = 0 f.Val = 200 So(ds.Put(f), ShouldBeNil) So(f.ID, ShouldEqual, 3) return nil }, &dsS.TransactionOptions{XG: true}) So(err, ShouldBeNil) f := &Foo{ID: 2} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 100) f.ID = 3 So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 200) }) Convey("can Put new entities in a current group", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) f := &Foo{Val: 100, Parent: k} So(ds.Put(f), ShouldBeNil) So(ds.KeyForObj(f).String(), ShouldEqual, "dev~app::/Foo,1/Foo,1") f.ID = 0 f.Val = 200 So(ds.Put(f), ShouldBeNil) So(ds.KeyForObj(f).String(), ShouldEqual, "dev~app::/Foo,1/Foo,2") return nil }, nil) So(err, ShouldBeNil) f := &Foo{ID: 1, Parent: k} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 100) f.ID = 2 So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 200) }) Convey("Deletes work too", func() { err := ds.RunInTransaction(func(c context.Context) error { return dsS.Get(c).Delete(k) }, nil) So(err, ShouldBeNil) So(ds.Get(&Foo{ID: 1}), ShouldEqual, dsS.ErrNoSuchEntity) }) Convey("A Get counts against your group count", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) pm := dsS.PropertyMap{} So(pm.SetMeta("key", ds.NewKey("Foo", "", 20, nil)), ShouldBeTrue) So(ds.Get(pm), ShouldEqual, dsS.ErrNoSuchEntity) So(pm.SetMeta("key", k), ShouldBeTrue) So(ds.Get(pm).Error(), ShouldContainSubstring, "cross-group") return nil }, nil) So(err, ShouldBeNil) }) Convey("Get takes a snapshot", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) // Don't ever do this in a real program unless you want to guarantee // a failed transaction :) f.Val = 11 So(dsS.GetNoTxn(c).Put(f), ShouldBeNil) So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) return nil }, nil) So(err, ShouldBeNil) f := &Foo{ID: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 11) }) Convey("and snapshots are consistent even after Puts", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) f := &Foo{ID: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) // Don't ever do this in a real program unless you want to guarantee // a failed transaction :) f.Val = 11 So(dsS.GetNoTxn(c).Put(f), ShouldBeNil) So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) f.Val = 20 So(ds.Put(f), ShouldBeNil) So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) // still gets 10 return nil }, &dsS.TransactionOptions{Attempts: 1}) So(err.Error(), ShouldContainSubstring, "concurrent") f := &Foo{ID: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 11) }) Convey("Reusing a transaction context is bad news", func() { txnDS := dsS.Interface(nil) err := ds.RunInTransaction(func(c context.Context) error { txnDS = dsS.Get(c) So(txnDS.Get(f), ShouldBeNil) return nil }, nil) So(err, ShouldBeNil) So(txnDS.Get(f).Error(), ShouldContainSubstring, "expired") }) Convey("Nested transactions are rejected", func() { err := ds.RunInTransaction(func(c context.Context) error { err := dsS.Get(c).RunInTransaction(func(c context.Context) error { panic("noooo") }, nil) So(err.Error(), ShouldContainSubstring, "nested transactions") return nil }, nil) So(err, ShouldBeNil) }) Convey("Concurrent transactions only accept one set of changes", func() { // Note: I think this implementation is actually /slightly/ wrong. // According to my read of the docs for appengine, when you open a // transaction it actually (essentially) holds a reference to the // entire datastore. Our implementation takes a snapshot of the // entity group as soon as something observes/affects it. // // That said... I'm not sure if there's really a semantic difference. err := ds.RunInTransaction(func(c context.Context) error { So(dsS.Get(c).Put(&Foo{ID: 1, Val: 21}), ShouldBeNil) err := dsS.GetNoTxn(c).RunInTransaction(func(c context.Context) error { So(dsS.Get(c).Put(&Foo{ID: 1, Val: 27}), ShouldBeNil) return nil }, nil) So(err, ShouldBeNil) return nil }, nil) So(err.Error(), ShouldContainSubstring, "concurrent") f := &Foo{ID: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 27) }) Convey("XG", func() { Convey("Modifying two groups with XG=false is invalid", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) f := &Foo{ID: 1, Val: 200} So(ds.Put(f), ShouldBeNil) f.ID = 2 err := ds.Put(f) So(err.Error(), ShouldContainSubstring, "cross-group") return err }, nil) So(err.Error(), ShouldContainSubstring, "cross-group") }) Convey("Modifying >25 groups with XG=true is invald", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) foos := make([]Foo, 25) for i := int64(1); i < 26; i++ { foos[i-1].ID = i foos[i-1].Val = 200 } So(ds.PutMulti(foos), ShouldBeNil) err := ds.Put(&Foo{ID: 26}) So(err.Error(), ShouldContainSubstring, "too many entity groups") return err }, &dsS.TransactionOptions{XG: true}) So(err.Error(), ShouldContainSubstring, "too many entity groups") }) }) Convey("Errors and panics", func() { Convey("returning an error aborts", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) So(ds.Put(&Foo{ID: 1, Val: 200}), ShouldBeNil) return fmt.Errorf("thingy") }, nil) So(err.Error(), ShouldEqual, "thingy") f := &Foo{ID: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) }) Convey("panicing aborts", func() { So(func() { So(ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) So(ds.Put(&Foo{Val: 200}), ShouldBeNil) panic("wheeeeee") }, nil), ShouldBeNil) }, ShouldPanic) f := &Foo{ID: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) }) }) Convey("Transaction retries", func() { tst := ds.Testable() Reset(func() { tst.SetTransactionRetryCount(0) }) Convey("SetTransactionRetryCount set to zero", func() { tst.SetTransactionRetryCount(0) calls := 0 So(ds.RunInTransaction(func(c context.Context) error { calls++ return nil }, nil), ShouldBeNil) So(calls, ShouldEqual, 1) }) Convey("default TransactionOptions is 3 attempts", func() { tst.SetTransactionRetryCount(100) // more than 3 calls := 0 So(ds.RunInTransaction(func(c context.Context) error { calls++ return nil }, nil), ShouldEqual, dsS.ErrConcurrentTransaction) So(calls, ShouldEqual, 3) }) Convey("non-default TransactionOptions ", func() { tst.SetTransactionRetryCount(100) // more than 20 calls := 0 So(ds.RunInTransaction(func(c context.Context) error { calls++ return nil }, &dsS.TransactionOptions{Attempts: 20}), ShouldEqual, dsS.ErrConcurrentTransaction) So(calls, ShouldEqual, 20) }) Convey("SetTransactionRetryCount is respected", func() { tst.SetTransactionRetryCount(1) // less than 3 calls := 0 So(ds.RunInTransaction(func(c context.Context) error { calls++ return nil }, nil), ShouldBeNil) So(calls, ShouldEqual, 2) }) Convey("fatal errors are not retried", func() { tst.SetTransactionRetryCount(1) calls := 0 So(ds.RunInTransaction(func(c context.Context) error { calls++ return fmt.Errorf("omg") }, nil).Error(), ShouldEqual, "omg") So(calls, ShouldEqual, 1) }) }) }) }) Convey("Testable.Consistent", func() { Convey("false", func() { ds.Testable().Consistent(false) // the default for i := 0; i < 10; i++ { So(ds.Put(&Foo{ID: int64(i + 1), Val: i + 1}), ShouldBeNil) } q := dsS.NewQuery("Foo").Gt("Val", 3) count, err := ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 0) So(ds.Delete(ds.MakeKey("Foo", 4)), ShouldBeNil) count, err = ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 0) ds.Testable().Consistent(true) count, err = ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 6) }) Convey("true", func() { ds.Testable().Consistent(true) for i := 0; i < 10; i++ { So(ds.Put(&Foo{ID: int64(i + 1), Val: i + 1}), ShouldBeNil) } q := dsS.NewQuery("Foo").Gt("Val", 3) count, err := ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 7) So(ds.Delete(ds.MakeKey("Foo", 4)), ShouldBeNil) count, err = ds.Count(q) So(err, ShouldBeNil) So(count, ShouldEqual, 6) }) }) Convey("Testable.DisableSpecialEntities", func() { ds.Testable().DisableSpecialEntities(true) So(ds.Put(&Foo{}), ShouldErrLike, "allocateIDs is disabled") So(ds.Put(&Foo{ID: 1}), ShouldBeNil) ds.Testable().CatchupIndexes() count, err := ds.Count(dsS.NewQuery("")) So(err, ShouldBeNil) So(count, ShouldEqual, 1) // normally this would include __entity_group__ }) }) }
func TestDatastoreSingleReadWriter(t *testing.T) { t.Parallel() Convey("Datastore single reads and writes", t, func() { c := Use(context.Background()) ds := dsS.Get(c) So(ds, ShouldNotBeNil) Convey("getting objects that DNE is an error", func() { So(ds.Get(&Foo{Id: 1}), ShouldEqual, dsS.ErrNoSuchEntity) }) Convey("bad namespaces fail", func() { _, err := infoS.Get(c).Namespace("$$blzyall") So(err.Error(), ShouldContainSubstring, "namespace \"$$blzyall\" does not match") }) Convey("Can Put stuff", func() { // with an incomplete key! f := &Foo{Val: 10} So(ds.Put(f), ShouldBeNil) k := ds.KeyForObj(f) So(k.String(), ShouldEqual, "/Foo,1") Convey("and Get it back", func() { newFoo := &Foo{Id: 1} So(ds.Get(newFoo), ShouldBeNil) So(newFoo, ShouldResemble, f) Convey("but it's hidden from a different namespace", func() { c, err := infoS.Get(c).Namespace("whombat") So(err, ShouldBeNil) ds = dsS.Get(c) So(ds.Get(f), ShouldEqual, dsS.ErrNoSuchEntity) }) Convey("and we can Delete it", func() { So(ds.Delete(k), ShouldBeNil) So(ds.Get(newFoo), ShouldEqual, dsS.ErrNoSuchEntity) }) }) Convey("Deleteing with a bogus key is bad", func() { So(ds.Delete(ds.NewKey("Foo", "wat", 100, nil)), ShouldEqual, dsS.ErrInvalidKey) }) Convey("Deleteing a DNE entity is fine", func() { So(ds.Delete(ds.NewKey("Foo", "wat", 0, nil)), ShouldBeNil) }) Convey("with multiple puts", func() { So(testGetMeta(c, k), ShouldEqual, 1) foos := make([]Foo, 10) for i := range foos { foos[i].Val = 10 foos[i].Parent = k } So(ds.PutMulti(foos), ShouldBeNil) So(testGetMeta(c, k), ShouldEqual, 11) keys := make([]dsS.Key, len(foos)) for i, f := range foos { keys[i] = ds.KeyForObj(&f) } Convey("ensure that group versions persist across deletes", func() { So(ds.DeleteMulti(append(keys, k)), ShouldBeNil) // TODO(riannucci): replace with a Count query instead of this cast /* ents := ds.(*dsImpl).data.head.GetCollection("ents:") num, _ := ents.GetTotals() // /__entity_root_ids__,Foo // /Foo,1/__entity_group__,1 // /Foo,1/__entity_group_ids__,1 So(num, ShouldEqual, 3) */ So(testGetMeta(c, k), ShouldEqual, 22) So(ds.Put(&Foo{Id: 1}), ShouldBeNil) So(testGetMeta(c, k), ShouldEqual, 23) }) Convey("can Get", func() { vals := make([]dsS.PropertyMap, len(keys)) for i := range vals { vals[i] = dsS.PropertyMap{} vals[i].SetMeta("key", keys[i]) } So(ds.GetMulti(vals), ShouldBeNil) for i, val := range vals { So(val, ShouldResemble, dsS.PropertyMap{ "Val": {dsS.MkProperty(10)}, "$key": {dsS.MkPropertyNI(keys[i])}, }) } }) }) }) Convey("implements DSTransactioner", func() { Convey("Put", func() { f := &Foo{Val: 10} So(ds.Put(f), ShouldBeNil) k := ds.KeyForObj(f) So(k.String(), ShouldEqual, "/Foo,1") Convey("can Put new entity groups", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) f := &Foo{Val: 100} So(ds.Put(f), ShouldBeNil) So(f.Id, ShouldEqual, 2) f.Id = 0 f.Val = 200 So(ds.Put(f), ShouldBeNil) So(f.Id, ShouldEqual, 3) return nil }, &dsS.TransactionOptions{XG: true}) So(err, ShouldBeNil) f := &Foo{Id: 2} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 100) f.Id = 3 So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 200) }) Convey("can Put new entities in a current group", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) f := &Foo{Val: 100, Parent: k} So(ds.Put(f), ShouldBeNil) So(ds.KeyForObj(f).String(), ShouldEqual, "/Foo,1/Foo,1") f.Id = 0 f.Val = 200 So(ds.Put(f), ShouldBeNil) So(ds.KeyForObj(f).String(), ShouldEqual, "/Foo,1/Foo,2") return nil }, nil) So(err, ShouldBeNil) f := &Foo{Id: 1, Parent: k} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 100) f.Id = 2 So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 200) }) Convey("Deletes work too", func() { err := ds.RunInTransaction(func(c context.Context) error { return dsS.Get(c).Delete(k) }, nil) So(err, ShouldBeNil) So(ds.Get(&Foo{Id: 1}), ShouldEqual, dsS.ErrNoSuchEntity) }) Convey("A Get counts against your group count", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) pm := dsS.PropertyMap{} pm.SetMeta("key", ds.NewKey("Foo", "", 20, nil)) So(ds.Get(pm), ShouldEqual, dsS.ErrNoSuchEntity) pm.SetMeta("key", k) So(ds.Get(pm).Error(), ShouldContainSubstring, "cross-group") return nil }, nil) So(err, ShouldBeNil) }) Convey("Get takes a snapshot", func() { err := ds.RunInTransaction(func(c context.Context) error { txnDS := dsS.Get(c) So(txnDS.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) // Don't ever do this in a real program unless you want to guarantee // a failed transaction :) f.Val = 11 So(ds.Put(f), ShouldBeNil) So(txnDS.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) return nil }, nil) So(err, ShouldBeNil) f := &Foo{Id: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 11) }) Convey("and snapshots are consistent even after Puts", func() { err := ds.RunInTransaction(func(c context.Context) error { txnDS := dsS.Get(c) f := &Foo{Id: 1} So(txnDS.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) // Don't ever do this in a real program unless you want to guarantee // a failed transaction :) f.Val = 11 So(ds.Put(f), ShouldBeNil) So(txnDS.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) f.Val = 20 So(txnDS.Put(f), ShouldBeNil) So(txnDS.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) // still gets 10 return nil }, nil) So(err.Error(), ShouldContainSubstring, "concurrent") f := &Foo{Id: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 11) }) Convey("Reusing a transaction context is bad news", func() { txnDS := dsS.Interface(nil) err := ds.RunInTransaction(func(c context.Context) error { txnDS = dsS.Get(c) So(txnDS.Get(f), ShouldBeNil) return nil }, nil) So(err, ShouldBeNil) So(txnDS.Get(f).Error(), ShouldContainSubstring, "expired") }) Convey("Nested transactions are rejected", func() { err := ds.RunInTransaction(func(c context.Context) error { err := dsS.Get(c).RunInTransaction(func(c context.Context) error { panic("noooo") }, nil) So(err.Error(), ShouldContainSubstring, "nested transactions") return nil }, nil) So(err, ShouldBeNil) }) Convey("Concurrent transactions only accept one set of changes", func() { // Note: I think this implementation is actually /slightly/ wrong. // Accorting to my read of the docs for appengine, when you open a // transaction it actually (essentially) holds a reference to the // entire datastore. Our implementation takes a snapshot of the // entity group as soon as something observes/affects it. // // That said... I'm not sure if there's really a semantic difference. err := ds.RunInTransaction(func(c context.Context) error { So(dsS.Get(c).Put(&Foo{Id: 1, Val: 21}), ShouldBeNil) err := ds.RunInTransaction(func(c context.Context) error { So(dsS.Get(c).Put(&Foo{Id: 1, Val: 27}), ShouldBeNil) return nil }, nil) So(err, ShouldBeNil) return nil }, nil) So(err.Error(), ShouldContainSubstring, "concurrent") f := &Foo{Id: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 27) }) Convey("XG", func() { Convey("Modifying two groups with XG=false is invalid", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) f := &Foo{Id: 1, Val: 200} So(ds.Put(f), ShouldBeNil) f.Id = 2 err := ds.Put(f) So(err.Error(), ShouldContainSubstring, "cross-group") return err }, nil) So(err.Error(), ShouldContainSubstring, "cross-group") }) Convey("Modifying >25 groups with XG=true is invald", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) foos := make([]Foo, 25) for i := int64(1); i < 26; i++ { foos[i-1].Id = i foos[i-1].Val = 200 } So(ds.PutMulti(foos), ShouldBeNil) err := ds.Put(&Foo{Id: 26}) So(err.Error(), ShouldContainSubstring, "too many entity groups") return err }, &dsS.TransactionOptions{XG: true}) So(err.Error(), ShouldContainSubstring, "too many entity groups") }) }) Convey("Errors and panics", func() { Convey("returning an error aborts", func() { err := ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) So(ds.Put(&Foo{Id: 1, Val: 200}), ShouldBeNil) return fmt.Errorf("thingy") }, nil) So(err.Error(), ShouldEqual, "thingy") f := &Foo{Id: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) }) Convey("panicing aborts", func() { So(func() { ds.RunInTransaction(func(c context.Context) error { ds := dsS.Get(c) So(ds.Put(&Foo{Val: 200}), ShouldBeNil) panic("wheeeeee") }, nil) }, ShouldPanic) f := &Foo{Id: 1} So(ds.Get(f), ShouldBeNil) So(f.Val, ShouldEqual, 10) }) }) }) }) }) }
func TestCount(t *testing.T) { t.Parallel() Convey("Test Count filter", t, func() { c, fb := featureBreaker.FilterRDS(memory.Use(context.Background()), nil) c, ctr := FilterRDS(c) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) ds := datastore.Get(c) vals := []datastore.PropertyMap{{ "Val": {datastore.MkProperty(100)}, "$key": {datastore.MkPropertyNI(ds.NewKey("Kind", "", 1, nil))}, }} Convey("Calling a ds function should reflect in counter", func() { So(ds.PutMulti(vals), ShouldBeNil) So(ctr.NewKey.Successes(), ShouldEqual, 1) So(ctr.PutMulti.Successes(), ShouldEqual, 1) Convey("effects are cumulative", func() { So(ds.PutMulti(vals), ShouldBeNil) So(ctr.PutMulti.Successes(), ShouldEqual, 2) Convey("even within transactions", func() { ds.RunInTransaction(func(c context.Context) error { ds := datastore.Get(c) So(ds.PutMulti(append(vals, vals[0])), ShouldBeNil) return nil }, nil) }) }) }) Convey("errors count against errors", func() { fb.BreakFeatures(nil, "GetMulti") ds.GetMulti(vals) So(ctr.GetMulti.Errors(), ShouldEqual, 1) fb.UnbreakFeatures("GetMulti") So(ds.PutMulti(vals), ShouldBeNil) ds.GetMulti(vals) So(ctr.GetMulti.Errors(), ShouldEqual, 1) So(ctr.GetMulti.Successes(), ShouldEqual, 1) So(ctr.GetMulti.Total(), ShouldEqual, 2) }) }) Convey("works for memcache", t, func() { c, ctr := FilterMC(memory.Use(context.Background())) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) mc := memcache.Get(c) mc.Set(mc.NewItem("hello").SetValue([]byte("sup"))) So(mc.Get(mc.NewItem("Wat")), ShouldNotBeNil) mc.Get(mc.NewItem("hello")) So(ctr.SetMulti, shouldHaveSuccessesAndErrors, 1, 0) So(ctr.GetMulti, shouldHaveSuccessesAndErrors, 2, 0) So(ctr.NewItem, shouldHaveSuccessesAndErrors, 3, 0) }) Convey("works for taskqueue", t, func() { c, ctr := FilterTQ(memory.Use(context.Background())) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) tq := taskqueue.Get(c) tq.Add(&taskqueue.Task{Name: "wat"}, "") tq.Add(&taskqueue.Task{Name: "wat"}, "DNE_QUEUE") So(ctr.AddMulti, shouldHaveSuccessesAndErrors, 1, 1) }) Convey("works for global info", t, func() { c, fb := featureBreaker.FilterGI(memory.Use(context.Background()), nil) c, ctr := FilterGI(c) So(c, ShouldNotBeNil) So(ctr, ShouldNotBeNil) gi := info.Get(c) gi.Namespace("foo") fb.BreakFeatures(nil, "Namespace") gi.Namespace("boom") So(ctr.Namespace, shouldHaveSuccessesAndErrors, 1, 1) }) }