コード例 #1
0
ファイル: batch_cache_test.go プロジェクト: drborges/appx
func TestBatchCacheLoader(t *testing.T) {
	Convey("Given I have an empty batch of size 2", t, func() {
		batch := &appx.MemcacheLoadBatch{Size: 2}
		So(batch.Empty(), ShouldBeTrue)
		So(batch.Full(), ShouldBeFalse)

		Convey("When I add an entity to the batch", func() {
			batch.Add(NewUserWithFakeKey(User{Name: "borges"}))

			Convey("Then the batch is no longer empty", func() {
				So(batch.Empty(), ShouldBeFalse)

				Convey("And it is not yet full", func() {
					So(batch.Full(), ShouldBeFalse)
				})
			})
		})

		Convey("When I add enough entities", func() {
			batch.Add(NewUserWithFakeKey(User{Name: "borges"}))
			batch.Add(NewUserWithFakeKey(User{Name: "diego"}))

			Convey("Then the batch is full", func() {
				So(batch.Full(), ShouldBeTrue)
			})
		})

		Convey("When I commit the batch", func() {
			in, out := stream.New(1)

			entity1 := NewUserWithFakeKey(User{
				Name: "entity1",
				SSN:  "123123",
			})

			entity2 := NewUserWithFakeKey(User{
				Name: "entity2",
				SSN:  "321321",
			})

			batch.Add(entity1)
			batch.Add(entity2)
			batch.Commit(stream.NewEmitter(rivers.NewContext(), out))
			close(out)

			Convey("Then a copy of the batch is sent to the output stream", func() {
				committedBatch := (<-in).(*appx.MemcacheLoadBatch)
				So(committedBatch.Size, ShouldEqual, 2)
				So(committedBatch.Keys[0], ShouldEqual, entity1.CacheID())
				So(committedBatch.Keys[1], ShouldEqual, entity2.CacheID())
				So(committedBatch.Items[entity1.CacheID()], ShouldResemble, &appx.CachedEntity{Entity: entity1})
				So(committedBatch.Items[entity2.CacheID()], ShouldResemble, &appx.CachedEntity{Entity: entity2})

				Convey("And the batch is now empty", func() {
					So(batch.Empty(), ShouldBeTrue)
				})
			})
		})
	})
}
コード例 #2
0
ファイル: observable.go プロジェクト: drborges/rivers
func (observable *Observable) Produce() stream.Readable {
	if observable.Capacity <= 0 {
		observable.Capacity = 10
	}
	readable, writable := stream.New(observable.Capacity)

	go func() {
		defer observable.context.Recover()
		defer close(writable)

		if observable.Emit != nil {
			observable.Emit(stream.NewEmitter(observable.context, writable))
		}
	}()

	return readable
}
コード例 #3
0
ファイル: observer.go プロジェクト: drborges/rivers
func (observer *Observer) Transform(in stream.Readable) stream.Readable {
	readable, writable := stream.New(in.Capacity())
	emitter := stream.NewEmitter(observer.context, writable)

	go func() {
		defer observer.context.Recover()
		defer close(writable)

		for {
			select {
			case <-observer.context.Failure():
				return
			case <-observer.context.Done():
				return
			case <-time.After(observer.context.Deadline()):
				panic(stream.Timeout)
			default:
				data, more := <-in
				if !more {
					if observer.OnCompleted != nil {
						observer.OnCompleted(emitter)
					}
					return
				}

				if observer.OnNext == nil {
					continue
				}

				if err := observer.OnNext(data, emitter); err != nil {
					if err == stream.Done {
						// Tell producer to shutdown without errors
						observer.context.Close(nil)
						return
					}
					panic(err)
				}
			}
		}
	}()

	return readable
}
コード例 #4
0
func TestLoadBatchFromCache(t *testing.T) {
	gaeCtx, _ := aetest.NewContext(nil)
	defer gaeCtx.Close()

	Convey("Given I have a load batch from cache transformer", t, func() {
		riversCtx := rivers.NewContext()
		loadBatchProcessor := appx.NewStep(riversCtx).LoadBatchFromCache(gaeCtx)

		Convey("And I have a few entities in the cache", func() {
			user1 := NewUser(User{
				Name:  "Borges",
				Email: "*****@*****.**",
				SSN:   "123123123",
			})

			user2 := NewUser(User{
				Name:  "Diego",
				Email: "*****@*****.**",
				SSN:   "321321",
			})

			appx.NewKeyResolver(gaeCtx).Resolve(user1)
			appx.NewKeyResolver(gaeCtx).Resolve(user2)

			memcache.JSON.Set(gaeCtx, &memcache.Item{
				Key: user1.CacheID(),
				Object: appx.CachedEntity{
					Entity: user1,
					Key:    user1.Key(),
				},
			})

			memcache.JSON.Set(gaeCtx, &memcache.Item{
				Key: user2.CacheID(),
				Object: appx.CachedEntity{
					Entity: user2,
					Key:    user2.Key(),
				},
			})

			Convey("When I transform the incoming batch", func() {
				notCachedUser := NewUser(User{
					Name: "not cached",
					SSN:  "notcached",
				})

				userFromCache1 := NewUser(User{Name: user1.Name})
				userFromCache2 := NewUser(User{Name: user2.Name})

				batchItems := make(map[string]*appx.CachedEntity)
				batchItems[user1.CacheID()] = &appx.CachedEntity{
					Entity: userFromCache1,
				}
				batchItems[user2.CacheID()] = &appx.CachedEntity{
					Entity: userFromCache2,
				}
				batchItems[notCachedUser.CacheID()] = &appx.CachedEntity{
					Entity: notCachedUser,
				}

				batch := &appx.MemcacheLoadBatch{
					Keys:  []string{user1.CacheID(), user2.CacheID()},
					Items: batchItems,
				}

				in, out := stream.New(1)
				loadBatchProcessor(batch, stream.NewEmitter(rivers.NewContext(), out))
				close(out)

				Convey("Then cache misses are sent downstream", func() {
					So(in.ReadAll(), ShouldResemble, []stream.T{notCachedUser})

					Convey("And entities are loaded from cache", func() {
						So(userFromCache1, ShouldResemble, user1)
						So(userFromCache2, ShouldResemble, user2)
					})
				})
			})
		})
	})
}