func main() { chunks.RegisterLevelDBFlags(flag.CommandLine) dynFlags := chunks.DynamoFlags("") flag.Usage = usage flag.Parse(true) if *portFlag == 0 || *authKeyFlag == "" { usage() return } var factory chunks.Factory if factory = dynFlags.CreateFactory(); factory != nil { fmt.Printf("Using dynamo ...\n") } else if *ldbDir != "" { factory = chunks.NewLevelDBStoreFactoryUseFlags(*ldbDir) fmt.Printf("Using leveldb ...\n") } else { factory = chunks.NewMemoryStoreFactory() fmt.Printf("Using mem ...\n") } factory = &cachingReadThroughStoreFactory{chunks.NewMemoryStore(), factory} defer factory.Shutter() startWebServer(factory, *authKeyFlag) }
func TestTwoClientsWithEmptyDataset(t *testing.T) { assert := assert.New(t) id1 := "testdataset" cs := chunks.NewMemoryStore() dsx := newDS(id1, cs) dsy := newDS(id1, cs) // dsx: || -> |a| a := types.String("a") dsx, err := dsx.CommitValue(a) assert.NoError(err) assert.True(dsx.Head().Get(datas.ValueField).Equals(a)) // dsy: || -> |b| _, ok := dsy.MaybeHead() assert.False(ok) b := types.String("b") dsy, err = dsy.CommitValue(b) assert.Error(err) // Commit failed, but ds1 now has latest head, so we should be able to just try again. // dsy: |a| -> |b| dsy, err = dsy.CommitValue(b) assert.NoError(err) assert.True(dsy.Head().Get(datas.ValueField).Equals(b)) }
func TestReadToMap(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := `a,1,true b,2,false ` r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B", "C"} kinds := KindSlice{types.StringKind, types.NumberKind, types.BoolKind} m := ReadToMap(r, headers, 0, kinds, ds) assert.Equal(uint64(2), m.Len()) assert.True(m.Type().Equals( types.MakeMapType(types.StringType, types.MakeStructType("", map[string]*types.Type{ "B": types.NumberType, "C": types.BoolType, })))) assert.True(m.Get(types.String("a")).Equals(types.NewStruct("", map[string]types.Value{ "B": types.Number(1), "C": types.Bool(true), }))) assert.True(m.Get(types.String("b")).Equals(types.NewStruct("", map[string]types.Value{ "B": types.Number(2), "C": types.Bool(false), }))) }
func TestHeadValueFunctions(t *testing.T) { assert := assert.New(t) id1 := "testdataset" id2 := "otherdataset" cs := chunks.NewMemoryStore() ds1 := newDS(id1, cs) // ds1: |a| a := types.String("a") ds1, err := ds1.CommitValue(a) assert.NoError(err) hv := ds1.Head().Get(datas.ValueField) assert.Equal(a, hv) assert.Equal(a, ds1.HeadValue()) hv, ok := ds1.MaybeHeadValue() assert.True(ok) assert.Equal(a, hv) ds2 := newDS(id2, cs) assert.Panics(func() { ds2.HeadValue() }) _, ok = ds2.MaybeHeadValue() assert.False(ok) }
func TestReadToList(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := `a,1,true b,2,false ` r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B", "C"} kinds := KindSlice{types.StringKind, types.NumberKind, types.BoolKind} l, typ := ReadToList(r, "test", headers, kinds, ds) assert.Equal(uint64(2), l.Len()) assert.Equal(types.StructKind, typ.Kind()) desc, ok := typ.Desc.(types.StructDesc) assert.True(ok) assert.Equal(desc.Len(), 3) assert.Equal(types.StringKind, desc.Field("A").Kind()) assert.Equal(types.NumberKind, desc.Field("B").Kind()) assert.Equal(types.BoolKind, desc.Field("C").Kind()) assert.True(l.Get(0).(types.Struct).Get("A").Equals(types.String("a"))) assert.True(l.Get(1).(types.Struct).Get("A").Equals(types.String("b"))) assert.True(l.Get(0).(types.Struct).Get("B").Equals(types.Number(1))) assert.True(l.Get(1).(types.Struct).Get("B").Equals(types.Number(2))) assert.True(l.Get(0).(types.Struct).Get("C").Equals(types.Bool(true))) assert.True(l.Get(1).(types.Struct).Get("C").Equals(types.Bool(false))) }
func TestHandleWriteValueBackpressure(t *testing.T) { assert := assert.New(t) cs := &backpressureCS{ChunkStore: chunks.NewMemoryStore()} ds := NewDatabase(cs) l := types.NewList( ds.WriteValue(types.Bool(true)), ds.WriteValue(types.Bool(false)), ) ds.WriteValue(l) hint := l.Hash() newItem := types.NewEmptyBlob() itemChunk := types.EncodeValue(newItem, nil) l2 := l.Insert(1, types.NewRef(newItem)) listChunk := types.EncodeValue(l2, nil) body := &bytes.Buffer{} serializeHints(body, map[hash.Hash]struct{}{hint: struct{}{}}) sz := chunks.NewSerializer(body) sz.Put(itemChunk) sz.Put(listChunk) sz.Close() w := httptest.NewRecorder() HandleWriteValue(w, &http.Request{Body: ioutil.NopCloser(body), Method: "POST"}, params{}, cs) if assert.Equal(httpStatusTooManyRequests, w.Code, "Handler error:\n%s", string(w.Body.Bytes())) { hashes := deserializeHashes(w.Body) assert.Len(hashes, 1) assert.Equal(l2.Hash(), hashes[0]) } }
func TestWriteValue(t *testing.T) { assert := assert.New(t) factory := chunks.NewMemoryStoreFactory() defer factory.Shutter() router = setupWebServer(factory) defer func() { router = nil }() testString := "Now, what?" authKey = "anauthkeyvalue" w := httptest.NewRecorder() r, err := newRequest("GET", dbName+constants.RootPath, nil) assert.NoError(err) router.ServeHTTP(w, r) lastRoot := w.Body assert.Equal(http.StatusOK, w.Code) tval := types.Bool(true) wval := types.String(testString) chunk1 := types.EncodeValue(tval, nil) chunk2 := types.EncodeValue(wval, nil) refList := types.NewList(types.NewRef(tval), types.NewRef(wval)) chunk3 := types.EncodeValue(refList, nil) body := &bytes.Buffer{} // we would use this func, but it's private so use next line instead: serializeHints(body, map[ref.Ref]struct{}{hint: struct{}{}}) err = binary.Write(body, binary.BigEndian, uint32(0)) assert.NoError(err) chunks.Serialize(chunk1, body) chunks.Serialize(chunk2, body) chunks.Serialize(chunk3, body) w = httptest.NewRecorder() r, err = newRequest("POST", dbName+constants.WriteValuePath+"?access_token="+authKey, ioutil.NopCloser(body)) assert.NoError(err) router.ServeHTTP(w, r) assert.Equal(http.StatusCreated, w.Code) w = httptest.NewRecorder() args := fmt.Sprintf("&last=%s¤t=%s", lastRoot, types.NewRef(refList).TargetHash()) r, _ = newRequest("POST", dbName+constants.RootPath+"?access_token="+authKey+args, ioutil.NopCloser(body)) router.ServeHTTP(w, r) assert.Equal(http.StatusOK, w.Code) whash := wval.Hash() hints := map[hash.Hash]struct{}{whash: struct{}{}} rdr := buildGetRefsRequestBody(hints) r, _ = newRequest("POST", dbName+constants.GetRefsPath, rdr) r.Header.Add("Content-Type", "application/x-www-form-urlencoded") router.ServeHTTP(w, r) assert.Equal(http.StatusOK, w.Code) ms := chunks.NewMemoryStore() chunks.Deserialize(w.Body, ms, nil) v := types.DecodeValue(ms.Get(whash), datas.NewDatabase(ms)) assert.Equal(testString, string(v.(types.String))) }
func TestDuplicateHeaderName(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := "1,2\n3,4\n" r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "A"} kinds := KindSlice{types.StringKind, types.StringKind} assert.Panics(func() { ReadToList(r, "test", headers, kinds, ds) }) }
func TestIdValidation(t *testing.T) { assert := assert.New(t) store := datas.NewDatabase(chunks.NewMemoryStore()) invalidDatasetNames := []string{" ", "", "a ", " a", "$", "#", ":", "\n", "💩"} for _, id := range invalidDatasetNames { assert.Panics(func() { NewDataset(store, id) }) } }
func TestEscapeFieldNames(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := "1\n" r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A A"} kinds := KindSlice{types.NumberKind} l, _ := ReadToList(r, "test", headers, kinds, ds) assert.Equal(uint64(1), l.Len()) assert.Equal(types.Number(1), l.Get(0).(types.Struct).Get(types.EscapeStructField("A A"))) }
func (spec DatabaseSpec) ChunkStore() (cs chunks.ChunkStore, err error) { switch spec.Protocol { case "ldb": cs = chunks.NewLevelDBStoreUseFlags(spec.Path, "") case "mem": cs = chunks.NewMemoryStore() default: return nil, fmt.Errorf("Unable to create chunkstore for protocol: %s", spec) } return }
func (spec DatabaseSpec) Database() (ds datas.Database, err error) { switch spec.Protocol { case "http", "https": ds = datas.NewRemoteDatabase(spec.String(), "Bearer "+spec.accessToken) case "ldb": ds = datas.NewDatabase(chunks.NewLevelDBStoreUseFlags(spec.Path, "")) case "mem": ds = datas.NewDatabase(chunks.NewMemoryStore()) default: err = fmt.Errorf("Invalid path prototocol: %s", spec.Protocol) } return }
func GetChunkStore(str string) (chunks.ChunkStore, error) { sp, err := parseDatabaseSpec(str) if err != nil { return nil, err } switch sp.Protocol { case "ldb": return getLDBStore(sp.Path), nil case "mem": return chunks.NewMemoryStore(), nil default: return nil, fmt.Errorf("Unable to create chunkstore for protocol: %s", str) } }
func TestAbsolutePaths(t *testing.T) { assert := assert.New(t) s0, s1 := types.String("foo"), types.String("bar") list := types.NewList(s0, s1) emptySet := types.NewSet() db := datas.NewDatabase(chunks.NewMemoryStore()) db.WriteValue(s0) db.WriteValue(s1) db.WriteValue(list) db.WriteValue(emptySet) var err error db, err = db.Commit("ds", datas.NewCommit(list, types.NewSet(), types.EmptyStruct)) assert.NoError(err) head := db.Head("ds") resolvesTo := func(exp types.Value, str string) { p, err := NewAbsolutePath(str) assert.NoError(err) act := p.Resolve(db) if exp == nil { assert.Nil(act) } else { assert.True(exp.Equals(act), "%s Expected %s Actual %s", str, types.EncodedValue(exp), types.EncodedValue(act)) } } resolvesTo(head, "ds") resolvesTo(emptySet, "ds.parents") resolvesTo(list, "ds.value") resolvesTo(s0, "ds.value[0]") resolvesTo(s1, "ds.value[1]") resolvesTo(head, "#"+head.Hash().String()) resolvesTo(list, "#"+list.Hash().String()) resolvesTo(s0, "#"+s0.Hash().String()) resolvesTo(s1, "#"+s1.Hash().String()) resolvesTo(s0, "#"+list.Hash().String()+"[0]") resolvesTo(s1, "#"+list.Hash().String()+"[1]") resolvesTo(nil, "foo") resolvesTo(nil, "foo.parents") resolvesTo(nil, "foo.value") resolvesTo(nil, "foo.value[0]") resolvesTo(nil, "#"+types.String("baz").Hash().String()) resolvesTo(nil, "#"+types.String("baz").Hash().String()+"[0]") }
func TestDefaults(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := "42,,,\n" r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B", "C", "D"} kinds := KindSlice{types.NumberKind, types.NumberKind, types.BoolKind, types.StringKind} l, _ := ReadToList(r, "test", headers, kinds, ds) assert.Equal(uint64(1), l.Len()) row := l.Get(0).(types.Struct) assert.Equal(types.Number(42), row.Get("A")) assert.Equal(types.Number(0), row.Get("B")) assert.Equal(types.Bool(false), row.Get("C")) assert.Equal(types.String(""), row.Get("D")) }
func TestBooleanStrings(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := "true,false\n1,0\ny,n\nY,N\nY,\n" r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"T", "F"} kinds := KindSlice{types.BoolKind, types.BoolKind} l, _ := ReadToList(r, "test", headers, kinds, ds) assert.Equal(uint64(5), l.Len()) for i := uint64(0); i < l.Len(); i++ { row := l.Get(i).(types.Struct) assert.True(types.Bool(true).Equals(row.Get("T"))) assert.True(types.Bool(false).Equals(row.Get("F"))) } }
func Run(t *testing.T, s IntegrationSuiteInterface) { s.SetT(t) s.npmInstall() cs := chunks.NewMemoryStore() s.setCs(cs) if s, ok := s.(SetupSuite); ok { s.Setup() } runServer(s, cs) if s, ok := s.(TeardownSuite); ok { s.Teardown() } }
func (spec databaseSpec) Database() (ds datas.Database, err error) { switch spec.Protocol { case "http", "https": err = d.Unwrap(d.Try(func() { ds = datas.NewRemoteDatabase(spec.String(), "Bearer "+spec.accessToken) })) case "ldb": err = d.Unwrap(d.Try(func() { ds = datas.NewDatabase(getLDBStore(spec.Path)) })) case "mem": ds = datas.NewDatabase(chunks.NewMemoryStore()) default: err = fmt.Errorf("Invalid path prototocol: %s", spec.Protocol) } return }
func TestExplicitBranchUsingDatasets(t *testing.T) { assert := assert.New(t) id1 := "testdataset" id2 := "othertestdataset" cs := chunks.NewMemoryStore() ds1 := newDS(id1, cs) // ds1: |a| a := types.String("a") ds1, err := ds1.CommitValue(a) assert.NoError(err) assert.True(ds1.Head().Get(datas.ValueField).Equals(a)) // ds1: |a| // \ds2 ds2 := newDS(id2, cs) ds2, err = ds2.CommitValue(ds1.Head().Get(datas.ValueField)) assert.NoError(err) assert.True(ds2.Head().Get(datas.ValueField).Equals(a)) // ds1: |a| <- |b| b := types.String("b") ds1, err = ds1.CommitValue(b) assert.NoError(err) assert.True(ds1.Head().Get(datas.ValueField).Equals(b)) // ds1: |a| <- |b| // \ds2 <- |c| c := types.String("c") ds2, err = ds2.CommitValue(c) assert.NoError(err) assert.True(ds2.Head().Get(datas.ValueField).Equals(c)) // ds1: |a| <- |b| <--|d| // \ds2 <- |c| <--/ mergeParents := types.NewSet(types.NewRef(ds1.Head()), types.NewRef(ds2.Head())) d := types.String("d") ds2, err = ds2.Commit(d, CommitOptions{Parents: mergeParents}) assert.NoError(err) assert.True(ds2.Head().Get(datas.ValueField).Equals(d)) ds1, err = ds1.Commit(d, CommitOptions{Parents: mergeParents}) assert.NoError(err) assert.True(ds1.Head().Get(datas.ValueField).Equals(d)) }
func TestReadParseError(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := `a,"b` r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B"} kinds := KindSlice{types.StringKind, types.StringKind} func() { defer func() { r := recover() assert.NotNil(r) _, ok := r.(*csv.ParseError) assert.True(ok, "Should be a ParseError") }() ReadToList(r, "test", headers, kinds, ds) }() }
func testTrailingHelper(t *testing.T, dataString string) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B"} kinds := KindSlice{types.StringKind, types.StringKind} l, typ := ReadToList(r, "test", headers, kinds, ds) assert.Equal(uint64(3), l.Len()) assert.Equal(types.StructKind, typ.Kind()) desc, ok := typ.Desc.(types.StructDesc) assert.True(ok) assert.Equal(desc.Len(), 2) assert.Equal(types.StringKind, desc.Field("A").Kind()) assert.Equal(types.StringKind, desc.Field("B").Kind()) }
func (suite *WalkAllTestSuite) TestWalkNestedComposites() { cs := chunks.NewMemoryStore() suite.walkWorker(suite.storeAndRef(types.NewList(suite.NewSet(cs), types.Number(8))), 5) suite.walkWorker(suite.storeAndRef(types.NewSet(suite.NewList(cs), suite.NewSet(cs))), 6) // {"string": "string", // "list": [false true], // "map": {"nested": "string"} // "mtlist": [] // "set": [5 7 8] // []: "wow" // } nested := types.NewMap( types.String("string"), types.String("string"), types.String("list"), suite.NewList(cs, types.Bool(false), types.Bool(true)), types.String("map"), suite.NewMap(cs, types.String("nested"), types.String("string")), types.String("mtlist"), suite.NewList(cs), types.String("set"), suite.NewSet(cs, types.Number(5), types.Number(7), types.Number(8)), suite.NewList(cs), types.String("wow"), // note that the dupe list chunk is skipped ) suite.walkWorker(suite.storeAndRef(nested), 25) }
func TestDatasetCommitTracker(t *testing.T) { assert := assert.New(t) id1 := "testdataset" id2 := "othertestdataset" cs := chunks.NewMemoryStore() ds1 := NewDataset(datas.NewDatabase(cs), id1) ds1Commit := types.String("Commit value for " + id1) ds1, err := ds1.Commit(ds1Commit) assert.NoError(err) ds2 := NewDataset(datas.NewDatabase(cs), id2) ds2Commit := types.String("Commit value for " + id2) ds2, err = ds2.Commit(ds2Commit) assert.NoError(err) assert.EqualValues(ds1Commit, ds1.Head().Get(datas.ValueField)) assert.EqualValues(ds2Commit, ds2.Head().Get(datas.ValueField)) assert.False(ds2.Head().Get(datas.ValueField).Equals(ds1Commit)) assert.False(ds1.Head().Get(datas.ValueField).Equals(ds2Commit)) assert.Equal("sha1-898dfd332626292e92cd4a5d85e5c486dce1d57f", cs.Root().String()) }
func TestDatasetCommitTracker(t *testing.T) { assert := assert.New(t) id1 := "testdataset" id2 := "othertestdataset" cs := chunks.NewMemoryStore() ds1 := NewDataset(datas.NewDatabase(cs), id1) ds1Commit := types.String("Commit value for " + id1) ds1, err := ds1.CommitValue(ds1Commit) assert.NoError(err) ds2 := NewDataset(datas.NewDatabase(cs), id2) ds2Commit := types.String("Commit value for " + id2) ds2, err = ds2.CommitValue(ds2Commit) assert.NoError(err) assert.EqualValues(ds1Commit, ds1.Head().Get(datas.ValueField)) assert.EqualValues(ds2Commit, ds2.Head().Get(datas.ValueField)) assert.False(ds2.Head().Get(datas.ValueField).Equals(ds1Commit)) assert.False(ds1.Head().Get(datas.ValueField).Equals(ds2Commit)) assert.Equal("tcu8fn066i70qi99pkd5u3gq0lqncek7", cs.Root().String()) }
func TestTwoClientsWithNonEmptyDataset(t *testing.T) { assert := assert.New(t) id1 := "testdataset" cs := chunks.NewMemoryStore() a := types.String("a") { // ds1: || -> |a| ds1 := newDS(id1, cs) ds1, err := ds1.Commit(a) assert.NoError(err) assert.True(ds1.Head().Get(datas.ValueField).Equals(a)) } dsx := newDS(id1, cs) dsy := newDS(id1, cs) // dsx: |a| -> |b| assert.True(dsx.Head().Get(datas.ValueField).Equals(a)) b := types.String("b") dsx, err := dsx.Commit(b) assert.NoError(err) assert.True(dsx.Head().Get(datas.ValueField).Equals(b)) // dsy: |a| -> |c| assert.True(dsy.Head().Get(datas.ValueField).Equals(a)) c := types.String("c") dsy, err = dsy.Commit(c) assert.Error(err) assert.True(dsy.Head().Get(datas.ValueField).Equals(b)) // Commit failed, but dsy now has latest head, so we should be able to just try again. // dsy: |b| -> |c| dsy, err = dsy.Commit(c) assert.NoError(err) assert.True(dsy.Head().Get(datas.ValueField).Equals(c)) }
func main() { profile.RegisterProfileFlags(flag.CommandLine) flag.Parse(true) buildCount := *count insertCount := buildCount / 50 defer profile.MaybeStartProfile().Stop() collectionTypes := []string{"List", "Set", "Map"} buildFns := []buildCollectionFn{buildList, buildSet, buildMap} buildIncrFns := []buildCollectionFn{buildListIncrementally, buildSetIncrementally, buildMapIncrementally} readFns := []readCollectionFn{readList, readSet, readMap} elementTypes := []string{"numbers (8 B)", "strings (32 B)", "structs (64 B)"} elementSizes := []uint64{numberSize, stringSize, structSize} valueFns := []createValueFn{createNumber, createString, createStruct} for i, colType := range collectionTypes { fmt.Printf("Testing %s: \t\tbuild %d\t\t\tscan %d\t\t\tinsert %d\n", colType, buildCount, buildCount, insertCount) for j, elementType := range elementTypes { valueFn := valueFns[j] // Build One-Time ms := chunks.NewMemoryStore() ds := dataset.NewDataset(datas.NewDatabase(ms), "test") t1 := time.Now() col := buildFns[i](buildCount, valueFn) ds, err := ds.CommitValue(col) d.Chk.NoError(err) buildDuration := time.Since(t1) // Read t1 = time.Now() col = ds.HeadValue().(types.Collection) readFns[i](col) readDuration := time.Since(t1) // Build Incrementally ms = chunks.NewMemoryStore() ds = dataset.NewDataset(datas.NewDatabase(ms), "test") t1 = time.Now() col = buildIncrFns[i](insertCount, valueFn) ds, err = ds.CommitValue(col) d.Chk.NoError(err) incrDuration := time.Since(t1) elementSize := elementSizes[j] buildSize := elementSize * buildCount incrSize := elementSize * insertCount fmt.Printf("%s\t\t%s\t\t%s\t\t%s\n", elementType, rate(buildDuration, buildSize), rate(readDuration, buildSize), rate(incrDuration, incrSize)) } fmt.Println() } fmt.Printf("Testing Blob: \t\tbuild %d MB\t\t\tscan %d MB\n", *blobSize/1000000, *blobSize/1000000) ms := chunks.NewMemoryStore() ds := dataset.NewDataset(datas.NewDatabase(ms), "test") blobBytes := makeBlobBytes(*blobSize) t1 := time.Now() blob := types.NewBlob(bytes.NewReader(blobBytes)) ds.CommitValue(blob) buildDuration := time.Since(t1) ds = dataset.NewDataset(datas.NewDatabase(ms), "test") t1 = time.Now() blob = ds.HeadValue().(types.Blob) outBytes, _ := ioutil.ReadAll(blob.Reader()) readDuration := time.Since(t1) d.Chk.True(bytes.Compare(blobBytes, outBytes) == 0) fmt.Printf("\t\t\t%s\t\t%s\n\n", rate(buildDuration, *blobSize), rate(readDuration, *blobSize)) }