Exemplo n.º 1
0
func TestResendChunks(t *testing.T) {
	req := core.ResendRequest{
		10:   []core.SequenceId{5, 6, 7, 8, 9},
		100:  []core.SequenceId{1},
		2500: []core.SequenceId{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18},
	}
	Convey("The data that comes out of a resend chunk is the same as the data that went into it.", t, func() {
		var config core.Config
		config.MaxChunkDataSize = 10000
		datas := core.MakeResendChunkDatas(&config, req)
		So(len(datas), ShouldEqual, 1)
		parsed, err := core.ParseResendChunkData(datas[0])
		So(err, ShouldBeNil)
		So(parsed, ShouldResemble, req)
	})
	Convey("Resend data can be split across multiple chunks.", t, func() {
		var config core.Config
		config.MaxChunkDataSize = 20
		datas := core.MakeResendChunkDatas(&config, req)
		So(len(datas), ShouldBeGreaterThan, 1)
		merged := make(core.ResendRequest)
		for _, data := range datas {
			parsed, err := core.ParseResendChunkData(data)
			So(err, ShouldBeNil)
			mergeResendData(merged, parsed)
		}
		So(merged, ShouldResemble, req)
	})
	Convey("Malformed resend chunks return errors.", t, func() {
		_, err := core.ParseResendChunkData([]byte{1})
		So(err, ShouldNotBeNil)
	})
}
Exemplo n.º 2
0
func TestPositionChunks(t *testing.T) {
	req := core.PositionUpdate{}
	for i := 1; i < 10; i++ {
		req[core.StreamId(i)] = core.SequenceId(i + 1)
	}
	Convey("The data that comes out of a position chunk is the same as the data that went into it.", t, func() {
		var config core.Config
		config.MaxChunkDataSize = 10000
		datas := core.MakePositionChunkDatas(&config, req)
		So(len(datas), ShouldEqual, 1)
		parsed, err := core.ParsePositionChunkData(datas[0])
		So(err, ShouldBeNil)
		So(parsed, ShouldResemble, req)
	})
	Convey("Position data can be split across multiple chunks.", t, func() {
		var config core.Config
		config.MaxChunkDataSize = 20
		datas := core.MakePositionChunkDatas(&config, req)
		So(len(datas), ShouldBeGreaterThan, 1)
		merged := make(core.PositionUpdate)
		for _, data := range datas {
			parsed, err := core.ParsePositionChunkData(data)
			So(err, ShouldBeNil)
			for stream, sequence := range parsed {
				_, ok := merged[stream]
				So(ok, ShouldBeFalse)
				merged[stream] = sequence
			}
		}
		So(merged, ShouldResemble, req)
	})
	Convey("Malformed position chunks return errors.", t, func() {
		_, err := core.ParsePositionChunkData([]byte{1})
		So(err, ShouldNotBeNil)
	})
}
Exemplo n.º 3
0
func SequenceTrackerTest(t *testing.T) {
	st := core.MakeSequenceTracker(2345, 77, 10)
	Convey("It remembers its own stream and node ids.", t, func() {
		So(st.StreamId(), ShouldEqual, 2345)
		So(st.NodeId(), ShouldEqual, 77)
	})
	Convey("Everything under maxContiguous is contained by the tracker.", t, func() {
		So(st.StreamId(), ShouldEqual, core.StreamId(2345))
		So(st.Contains(1), ShouldBeTrue)
		So(st.Contains(2), ShouldBeTrue)
		So(st.Contains(4), ShouldBeTrue)
		So(st.Contains(8), ShouldBeTrue)
		So(st.Contains(10), ShouldBeTrue)
		So(st.Contains(11), ShouldBeTrue)
		So(st.Contains(14), ShouldBeTrue)
		So(st.Contains(17), ShouldBeTrue)
		So(st.Contains(100), ShouldBeTrue)
		So(st.Contains(1000), ShouldBeTrue)
	})

	Convey("Scattered sequence ids are identified.", t, func() {
		st.AddSequenceId(12)
		st.AddSequenceId(13)
		st.AddSequenceId(14)
		st.AddSequenceId(20)
		st.AddSequenceId(22)
		sids := map[core.SequenceId]bool{
			10: false,
			11: false,
			12: true,
			13: true,
			14: true,
			15: false,
			16: false,
			17: false,
			18: false,
			19: false,
			20: true,
			21: false,
			22: true,
			23: false,
			24: false,
		}
		So(st.StreamId(), ShouldEqual, core.StreamId(2345))
		verifySequenceTracker(st, 9, sids)
		Convey("and chunkification/dechunkification works", func() {
			var config core.Config
			config.MaxChunkDataSize = 10
			datas := core.MakeSequenceTrackerChunkDatas(&config, st)
			So(len(datas), ShouldBeGreaterThan, 1)
			var sts []*core.SequenceTracker
			for _, data := range datas {
				st, err := core.ParseSequenceTrackerChunkData(data)
				So(err, ShouldBeNil)
				sts = append(sts, st)
			}

			// All trackers should agree on MaxContiguousSequence.
			So(st.MaxContiguousSequence(), ShouldEqual, 10)
			for i := range sts {
				So(sts[i].MaxContiguousSequence(), ShouldEqual, 10)
			}

			// For each scattered sequence id, at least one tracker should have it.  For sequences
			// not contained in the set, none should have it.
			for sequence, has := range sids {
				found := false
				for _, st := range sts {
					found = found || st.Contains(sequence)
				}
				So(found, ShouldEqual, has)
			}
		})
	})

	Convey("Serialization is correct after compaction.", t, func() {
		st.AddSequenceId(12)
		st.AddSequenceId(14)
		st.AddSequenceId(15)
		st.AddSequenceId(10)
		st.AddSequenceId(11) // Should compact up to 11
		st.AddSequenceId(13) // Should compact up to 15
		sids := map[core.SequenceId]bool{
			10: true,
			11: true,
			12: true,
			13: true,
			14: true,
			15: true,
			16: false,
			17: false,
			18: false,
		}
		verifySequenceTracker(st, 15, sids)

		Convey("and chunkification/dechunkification works", func() {
			var config core.Config
			config.MaxChunkDataSize = 10
			datas := core.MakeSequenceTrackerChunkDatas(&config, st)
			So(len(datas), ShouldBeGreaterThan, 1)
			var sts []*core.SequenceTracker
			for _, data := range datas {
				st, err := core.ParseSequenceTrackerChunkData(data)
				So(err, ShouldBeNil)
				sts = append(sts, st)
			}

			// All trackers should agree on MaxContiguousSequence.
			So(st.MaxContiguousSequence(), ShouldEqual, 10)
			for i := range sts {
				So(sts[i].MaxContiguousSequence(), ShouldEqual, 10)
			}

			// For each scattered sequence id, at least one tracker should have it.  For sequences
			// not contained in the set, none should have it.
			for sequence, has := range sids {
				found := false
				for _, st := range sts {
					found = found || st.Contains(sequence)
				}
				So(found, ShouldEqual, has)
			}
		})
	})
}