func (s *testSuite) TestSync() { source1 := dataset.NewDataset(datas.NewDatabase(chunks.NewLevelDBStore(s.LdbDir, "", 1, false)), "foo") source1, err := source1.Commit(types.Number(42)) s.NoError(err) source2, err := source1.Commit(types.Number(43)) s.NoError(err) source1HeadRef := source1.Head().Hash() source2.Database().Close() // Close Database backing both Datasets sourceSpec := test_util.CreateValueSpecString("ldb", s.LdbDir, source1HeadRef.String()) ldb2dir := path.Join(s.TempDir, "ldb2") sinkDatasetSpec := test_util.CreateValueSpecString("ldb", ldb2dir, "bar") out := s.Run(main, []string{sourceSpec, sinkDatasetSpec}) s.Equal("", out) dest := dataset.NewDataset(datas.NewDatabase(chunks.NewLevelDBStore(ldb2dir, "", 1, false)), "bar") s.True(types.Number(42).Equals(dest.HeadValue())) dest.Database().Close() sourceDataset := test_util.CreateValueSpecString("ldb", s.LdbDir, "foo") out = s.Run(main, []string{sourceDataset, sinkDatasetSpec}) s.Equal("", out) dest = dataset.NewDataset(datas.NewDatabase(chunks.NewLevelDBStore(ldb2dir, "", 1, false)), "bar") s.True(types.Number(43).Equals(dest.HeadValue())) dest.Database().Close() }
func (spec DatabaseSpec) Database() (ds datas.Database, err error) { switch spec.Protocol { case "http", "https": ds = datas.NewRemoteDatabase(spec.String(), "Bearer "+spec.accessToken) case "ldb": ds = datas.NewDatabase(chunks.NewLevelDBStoreUseFlags(spec.Path, "")) case "mem": ds = datas.NewDatabase(chunks.NewMemoryStore()) default: err = fmt.Errorf("Invalid path prototocol: %s", spec.Protocol) } return }
func (s *testSuite) TestCSVImporterFromBlob() { test := func(pathFlag string) { defer os.RemoveAll(s.LdbDir) newDB := func() datas.Database { cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) return datas.NewDatabase(cs) } db := newDB() rawDS := dataset.NewDataset(db, "raw") csv := &bytes.Buffer{} writeCSV(csv) rawDS.CommitValue(types.NewBlob(csv)) db.Close() stdout, stderr := s.Run(main, []string{ "--no-progress", "--column-types", "String,Number", pathFlag, spec.CreateValueSpecString("ldb", s.LdbDir, "raw.value"), spec.CreateValueSpecString("ldb", s.LdbDir, "csv"), }) s.Equal("", stdout) s.Equal("", stderr) db = newDB() defer db.Close() csvDS := dataset.NewDataset(db, "csv") validateCSV(s, csvDS.HeadValue().(types.List)) } test("--path") test("-p") }
func TestReadRef(t *testing.T) { assert := assert.New(t) dir, err := ioutil.TempDir(os.TempDir(), "") assert.NoError(err) datasetId := "dsName" ldbPath := path.Join(dir, "/name") cs1 := chunks.NewLevelDBStoreUseFlags(ldbPath, "") database1 := datas.NewDatabase(cs1) dataset1 := dataset.NewDataset(database1, datasetId) commit := types.String("Commit Value") dataset1, err = dataset1.Commit(commit) assert.NoError(err) r1 := dataset1.Head().Hash() dataset1.Database().Close() spec2 := fmt.Sprintf("ldb:%s::%s", ldbPath, r1.String()) sp2, err := ParsePathSpec(spec2) assert.NoError(err) database, v2, err := sp2.Value() assert.NoError(err) assert.EqualValues(r1.String(), v2.Hash().String()) database.Close() }
func (s *testSuite) TestCSVImporterWithPipe() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("a|b\n1|2\n") d.Chk.NoError(err) setName := "csv" dataspec := spec.CreateValueSpecString("ldb", s.LdbDir, setName) stdout, stderr := s.Run(main, []string{"--no-progress", "--column-types", "String,Number", "--delimiter", "|", input.Name(), dataspec}) s.Equal("", stdout) s.Equal("", stderr) cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) defer ds.Database().Close() defer os.RemoveAll(s.LdbDir) l := ds.HeadValue().(types.List) s.Equal(uint64(1), l.Len()) v := l.Get(0) st := v.(types.Struct) s.Equal(types.String("1"), st.Get("a")) s.Equal(types.Number(2), st.Get("b")) }
func (s *testSuite) TestCSVImporterWithExternalHeader() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("7,8\n") d.Chk.NoError(err) setName := "csv" dataspec := test_util.CreateValueSpecString("ldb", s.LdbDir, setName) out := s.Run(main, []string{"-no-progress", "-column-types", "String,Number", "-header", "x,y", dataspec, input.Name()}) s.Equal("", out) cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) defer ds.Database().Close() defer os.RemoveAll(s.LdbDir) l := ds.HeadValue().(types.List) s.Equal(uint64(1), l.Len()) v := l.Get(0) st := v.(types.Struct) s.Equal(types.String("7"), st.Get("x")) s.Equal(types.Number(8), st.Get("y")) }
func (s *testSuite) TestCSVImporterToMap() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("a,b,c\n") d.Chk.NoError(err) for i := 0; i < 20; i++ { _, err = input.WriteString(fmt.Sprintf("a%d,%d,%d\n", i, i, i*2)) d.Chk.NoError(err) } _, err = input.Seek(0, 0) d.Chk.NoError(err) setName := "csv" dataspec := test_util.CreateValueSpecString("ldb", s.LdbDir, setName) out := s.Run(main, []string{"-no-progress", "-column-types", "String,Number,Number", "-dest-type", "map:1", dataspec, input.Name()}) s.Equal("", out) cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) defer ds.Database().Close() defer os.RemoveAll(s.LdbDir) m := ds.HeadValue().(types.Map) s.Equal(uint64(20), m.Len()) for i := 0; i < 20; i++ { m.Get(types.Number(i)).(types.Struct).Equals(types.NewStruct("", map[string]types.Value{ "a": types.String(fmt.Sprintf("a%d", i)), "c": types.Number(i * 2), })) } }
func TestLDBDatabase(t *testing.T) { assert := assert.New(t) d1 := os.TempDir() dir, err := ioutil.TempDir(d1, "flags") assert.NoError(err) ldbDir := path.Join(dir, "store") spec := fmt.Sprintf("ldb:%s", path.Join(dir, "store")) cs := chunks.NewLevelDBStoreUseFlags(ldbDir, "") ds := datas.NewDatabase(cs) s1 := types.String("A String") s1Ref := ds.WriteValue(s1) ds.Commit("testDs", datas.NewCommit().Set(datas.ValueField, s1Ref)) ds.Close() sp, errRead := ParseDatabaseSpec(spec) assert.NoError(errRead) store, err := sp.Database() assert.NoError(err) assert.Equal(s1, store.ReadValue(s1.Hash())) store.Close() os.Remove(dir) }
func (s *testSuite) TestCSVImportSkipRecordsCustomHeader() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("a,b\n") d.Chk.NoError(err) _, err = input.WriteString("7,8\n") d.Chk.NoError(err) setName := "csv" dataspec := spec.CreateValueSpecString("ldb", s.LdbDir, setName) stdout, stderr := s.Run(main, []string{"--no-progress", "--skip-records", "1", "--header", "x,y", input.Name(), dataspec}) s.Equal("", stdout) s.Equal("", stderr) cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) defer ds.Database().Close() defer os.RemoveAll(s.LdbDir) l := ds.HeadValue().(types.List) s.Equal(uint64(1), l.Len()) v := l.Get(0) st := v.(types.Struct) s.Equal(types.String("7"), st.Get("x")) s.Equal(types.String("8"), st.Get("y")) }
func TestReadToList(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := `a,1,true b,2,false ` r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B", "C"} kinds := KindSlice{types.StringKind, types.NumberKind, types.BoolKind} l, typ := ReadToList(r, "test", headers, kinds, ds) assert.Equal(uint64(2), l.Len()) assert.Equal(types.StructKind, typ.Kind()) desc, ok := typ.Desc.(types.StructDesc) assert.True(ok) assert.Equal(desc.Len(), 3) assert.Equal(types.StringKind, desc.Field("A").Kind()) assert.Equal(types.NumberKind, desc.Field("B").Kind()) assert.Equal(types.BoolKind, desc.Field("C").Kind()) assert.True(l.Get(0).(types.Struct).Get("A").Equals(types.String("a"))) assert.True(l.Get(1).(types.Struct).Get("A").Equals(types.String("b"))) assert.True(l.Get(0).(types.Struct).Get("B").Equals(types.Number(1))) assert.True(l.Get(1).(types.Struct).Get("B").Equals(types.Number(2))) assert.True(l.Get(0).(types.Struct).Get("C").Equals(types.Bool(true))) assert.True(l.Get(1).(types.Struct).Get("C").Equals(types.Bool(false))) }
func TestReadToMap(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := `a,1,true b,2,false ` r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B", "C"} kinds := KindSlice{types.StringKind, types.NumberKind, types.BoolKind} m := ReadToMap(r, headers, 0, kinds, ds) assert.Equal(uint64(2), m.Len()) assert.True(m.Type().Equals( types.MakeMapType(types.StringType, types.MakeStructType("", map[string]*types.Type{ "B": types.NumberType, "C": types.BoolType, })))) assert.True(m.Get(types.String("a")).Equals(types.NewStruct("", map[string]types.Value{ "B": types.Number(1), "C": types.Bool(true), }))) assert.True(m.Get(types.String("b")).Equals(types.NewStruct("", map[string]types.Value{ "B": types.Number(2), "C": types.Bool(false), }))) }
func TestLDBObject(t *testing.T) { assert := assert.New(t) dir, err := ioutil.TempDir(os.TempDir(), "") assert.NoError(err) ldbpath := path.Join(dir, "xx-yy") dsId := "dsId" cs1 := chunks.NewLevelDBStoreUseFlags(ldbpath, "") store1 := datas.NewDatabase(cs1) dataset1 := dataset.NewDataset(store1, dsId) s1 := types.String("Commit Value") r1 := store1.WriteValue(s1) _, err = dataset1.Commit(r1) assert.NoError(err) store1.Close() spec2 := fmt.Sprintf("ldb:%s::%s", ldbpath, dsId) assert.NoError(err) sp1, err := ParseDatasetSpec(spec2) assert.NoError(err) dataset2, err := sp1.Dataset() assert.NoError(err) r2 := dataset2.HeadValue() s2 := r2.(types.Ref).TargetValue(dataset2.Database()) assert.Equal(s1, s2) dataset2.Database().Close() spec3 := fmt.Sprintf("ldb:%s::%s", ldbpath, s1.Hash().String()) sp3, err := ParsePathSpec(spec3) database, v3, err := sp3.Value() assert.Equal(s1, v3) database.Close() }
func TestLDBDataset(t *testing.T) { assert := assert.New(t) dir, err := ioutil.TempDir(os.TempDir(), "") assert.NoError(err) ldbPath := path.Join(dir, "name") cs := chunks.NewLevelDBStoreUseFlags(ldbPath, "") ds := datas.NewDatabase(cs) id := "dsName" set := dataset.NewDataset(ds, id) commit := types.String("Commit Value") set, err = set.Commit(commit) assert.NoError(err) ds.Close() spec := fmt.Sprintf("ldb:%s::%s", ldbPath, id) sp, err := ParseDatasetSpec(spec) assert.NoError(err) dataset, err := sp.Dataset() assert.NoError(err) assert.EqualValues(commit, dataset.HeadValue()) os.Remove(dir) }
func TestWriteValue(t *testing.T) { assert := assert.New(t) factory := chunks.NewMemoryStoreFactory() defer factory.Shutter() router = setupWebServer(factory) defer func() { router = nil }() testString := "Now, what?" authKey = "anauthkeyvalue" w := httptest.NewRecorder() r, err := newRequest("GET", dbName+constants.RootPath, nil) assert.NoError(err) router.ServeHTTP(w, r) lastRoot := w.Body assert.Equal(http.StatusOK, w.Code) tval := types.Bool(true) wval := types.String(testString) chunk1 := types.EncodeValue(tval, nil) chunk2 := types.EncodeValue(wval, nil) refList := types.NewList(types.NewRef(tval), types.NewRef(wval)) chunk3 := types.EncodeValue(refList, nil) body := &bytes.Buffer{} // we would use this func, but it's private so use next line instead: serializeHints(body, map[ref.Ref]struct{}{hint: struct{}{}}) err = binary.Write(body, binary.BigEndian, uint32(0)) assert.NoError(err) chunks.Serialize(chunk1, body) chunks.Serialize(chunk2, body) chunks.Serialize(chunk3, body) w = httptest.NewRecorder() r, err = newRequest("POST", dbName+constants.WriteValuePath+"?access_token="+authKey, ioutil.NopCloser(body)) assert.NoError(err) router.ServeHTTP(w, r) assert.Equal(http.StatusCreated, w.Code) w = httptest.NewRecorder() args := fmt.Sprintf("&last=%s¤t=%s", lastRoot, types.NewRef(refList).TargetHash()) r, _ = newRequest("POST", dbName+constants.RootPath+"?access_token="+authKey+args, ioutil.NopCloser(body)) router.ServeHTTP(w, r) assert.Equal(http.StatusOK, w.Code) whash := wval.Hash() hints := map[hash.Hash]struct{}{whash: struct{}{}} rdr := buildGetRefsRequestBody(hints) r, _ = newRequest("POST", dbName+constants.GetRefsPath, rdr) r.Header.Add("Content-Type", "application/x-www-form-urlencoded") router.ServeHTTP(w, r) assert.Equal(http.StatusOK, w.Code) ms := chunks.NewMemoryStore() chunks.Deserialize(w.Body, ms, nil) v := types.DecodeValue(ms.Get(whash), datas.NewDatabase(ms)) assert.Equal(testString, string(v.(types.String))) }
func (spec databaseSpec) Database() (ds datas.Database, err error) { switch spec.Protocol { case "http", "https": err = d.Unwrap(d.Try(func() { ds = datas.NewRemoteDatabase(spec.String(), "Bearer "+spec.accessToken) })) case "ldb": err = d.Unwrap(d.Try(func() { ds = datas.NewDatabase(getLDBStore(spec.Path)) })) case "mem": ds = datas.NewDatabase(chunks.NewMemoryStore()) default: err = fmt.Errorf("Invalid path prototocol: %s", spec.Protocol) } return }
func TestDuplicateHeaderName(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := "1,2\n3,4\n" r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "A"} kinds := KindSlice{types.StringKind, types.StringKind} assert.Panics(func() { ReadToList(r, "test", headers, kinds, ds) }) }
// FIXME: run with pipe func (s *testSuite) TestCSVExporter() { setName := "csv" header := []string{"a", "b", "c"} payload := [][]string{ []string{"5", "7", "100"}, []string{"4", "10", "255"}, []string{"512", "12", "55"}, } structName := "SomeStruct" // Setup data store cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) // Build Struct fields based on header f := make(types.TypeMap, len(header)) for _, key := range header { f[key] = types.StringType } typ := types.MakeStructType(structName, f) // Build data rows structs := make([]types.Value, len(payload)) for i, row := range payload { fields := make(map[string]types.Value) for j, v := range row { name := header[j] fields[name] = types.String(v) } structs[i] = types.NewStructWithType(typ, fields) } ds.Commit(types.NewList(structs...)) ds.Database().Close() // Run exporter dataspec := test_util.CreateValueSpecString("ldb", s.LdbDir, setName) out := s.Run(main, []string{dataspec}) // Verify output csvReader := csv.NewReader(strings.NewReader(out)) row, err := csvReader.Read() d.Chk.NoError(err) s.Equal(header, row) for i := 0; i < len(payload); i++ { row, err := csvReader.Read() d.Chk.NoError(err) s.Equal(payload[i], row) } _, err = csvReader.Read() s.Equal(io.EOF, err) }
func TestEscapeFieldNames(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := "1\n" r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A A"} kinds := KindSlice{types.NumberKind} l, _ := ReadToList(r, "test", headers, kinds, ds) assert.Equal(uint64(1), l.Len()) assert.Equal(types.Number(1), l.Get(0).(types.Struct).Get(types.EscapeStructField("A A"))) }
func TestIdValidation(t *testing.T) { assert := assert.New(t) store := datas.NewDatabase(chunks.NewMemoryStore()) invalidDatasetNames := []string{" ", "", "a ", " a", "$", "#", ":", "\n", "💩"} for _, id := range invalidDatasetNames { assert.Panics(func() { NewDataset(store, id) }) } }
func (s *testSuite) TestEmptyNomsDs() { dir := s.LdbDir cs := chunks.NewLevelDBStore(dir+"/name", "", 24, false) ds := datas.NewDatabase(cs) ds.Close() dbSpec := test_util.CreateDatabaseSpecString("ldb", dir+"/name") rtnVal := s.Run(main, []string{dbSpec}) s.Equal("", rtnVal) }
func TestDatasetCommitTracker(t *testing.T) { assert := assert.New(t) id1 := "testdataset" id2 := "othertestdataset" cs := chunks.NewMemoryStore() ds1 := NewDataset(datas.NewDatabase(cs), id1) ds1Commit := types.String("Commit value for " + id1) ds1, err := ds1.Commit(ds1Commit) assert.NoError(err) ds2 := NewDataset(datas.NewDatabase(cs), id2) ds2Commit := types.String("Commit value for " + id2) ds2, err = ds2.Commit(ds2Commit) assert.NoError(err) assert.EqualValues(ds1Commit, ds1.Head().Get(datas.ValueField)) assert.EqualValues(ds2Commit, ds2.Head().Get(datas.ValueField)) assert.False(ds2.Head().Get(datas.ValueField).Equals(ds1Commit)) assert.False(ds1.Head().Get(datas.ValueField).Equals(ds2Commit)) assert.Equal("sha1-898dfd332626292e92cd4a5d85e5c486dce1d57f", cs.Root().String()) }
func TestDatasetCommitTracker(t *testing.T) { assert := assert.New(t) id1 := "testdataset" id2 := "othertestdataset" cs := chunks.NewMemoryStore() ds1 := NewDataset(datas.NewDatabase(cs), id1) ds1Commit := types.String("Commit value for " + id1) ds1, err := ds1.CommitValue(ds1Commit) assert.NoError(err) ds2 := NewDataset(datas.NewDatabase(cs), id2) ds2Commit := types.String("Commit value for " + id2) ds2, err = ds2.CommitValue(ds2Commit) assert.NoError(err) assert.EqualValues(ds1Commit, ds1.Head().Get(datas.ValueField)) assert.EqualValues(ds2Commit, ds2.Head().Get(datas.ValueField)) assert.False(ds2.Head().Get(datas.ValueField).Equals(ds1Commit)) assert.False(ds1.Head().Get(datas.ValueField).Equals(ds2Commit)) assert.Equal("tcu8fn066i70qi99pkd5u3gq0lqncek7", cs.Root().String()) }
func TestAbsolutePaths(t *testing.T) { assert := assert.New(t) s0, s1 := types.String("foo"), types.String("bar") list := types.NewList(s0, s1) emptySet := types.NewSet() db := datas.NewDatabase(chunks.NewMemoryStore()) db.WriteValue(s0) db.WriteValue(s1) db.WriteValue(list) db.WriteValue(emptySet) var err error db, err = db.Commit("ds", datas.NewCommit(list, types.NewSet(), types.EmptyStruct)) assert.NoError(err) head := db.Head("ds") resolvesTo := func(exp types.Value, str string) { p, err := NewAbsolutePath(str) assert.NoError(err) act := p.Resolve(db) if exp == nil { assert.Nil(act) } else { assert.True(exp.Equals(act), "%s Expected %s Actual %s", str, types.EncodedValue(exp), types.EncodedValue(act)) } } resolvesTo(head, "ds") resolvesTo(emptySet, "ds.parents") resolvesTo(list, "ds.value") resolvesTo(s0, "ds.value[0]") resolvesTo(s1, "ds.value[1]") resolvesTo(head, "#"+head.Hash().String()) resolvesTo(list, "#"+list.Hash().String()) resolvesTo(s0, "#"+s0.Hash().String()) resolvesTo(s1, "#"+s1.Hash().String()) resolvesTo(s0, "#"+list.Hash().String()+"[0]") resolvesTo(s1, "#"+list.Hash().String()+"[1]") resolvesTo(nil, "foo") resolvesTo(nil, "foo.parents") resolvesTo(nil, "foo.value") resolvesTo(nil, "foo.value[0]") resolvesTo(nil, "#"+types.String("baz").Hash().String()) resolvesTo(nil, "#"+types.String("baz").Hash().String()+"[0]") }
func TestDefaults(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := "42,,,\n" r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B", "C", "D"} kinds := KindSlice{types.NumberKind, types.NumberKind, types.BoolKind, types.StringKind} l, _ := ReadToList(r, "test", headers, kinds, ds) assert.Equal(uint64(1), l.Len()) row := l.Get(0).(types.Struct) assert.Equal(types.Number(42), row.Get("A")) assert.Equal(types.Number(0), row.Get("B")) assert.Equal(types.Bool(false), row.Get("C")) assert.Equal(types.String(""), row.Get("D")) }
func TestBooleanStrings(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := "true,false\n1,0\ny,n\nY,N\nY,\n" r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"T", "F"} kinds := KindSlice{types.BoolKind, types.BoolKind} l, _ := ReadToList(r, "test", headers, kinds, ds) assert.Equal(uint64(5), l.Len()) for i := uint64(0); i < l.Len(); i++ { row := l.Get(i).(types.Struct) assert.True(types.Bool(true).Equals(row.Get("T"))) assert.True(types.Bool(false).Equals(row.Get("F"))) } }
func (s *testSuite) TestNomsDs() { dir := s.LdbDir cs := chunks.NewLevelDBStore(dir+"/name", "", 24, false) ds := datas.NewDatabase(cs) id := "testdataset" set := dataset.NewDataset(ds, id) set, err := set.Commit(types.String("Commit Value")) s.NoError(err) id2 := "testdataset2" set2 := dataset.NewDataset(ds, id2) set2, err = set2.Commit(types.String("Commit Value2")) s.NoError(err) err = ds.Close() s.NoError(err) dbSpec := test_util.CreateDatabaseSpecString("ldb", dir+"/name") datasetName := test_util.CreateValueSpecString("ldb", dir+"/name", id) dataset2Name := test_util.CreateValueSpecString("ldb", dir+"/name", id2) // both datasets show up rtnVal := s.Run(main, []string{dbSpec}) s.Equal(id+"\n"+id2+"\n", rtnVal) // both datasets again, to make sure printing doesn't change them rtnVal = s.Run(main, []string{dbSpec}) s.Equal(id+"\n"+id2+"\n", rtnVal) // delete one dataset, print message at delete rtnVal = s.Run(main, []string{"-d", datasetName}) s.Equal("Deleted dataset "+id+" (was sha1-d54b79552cda9ebe8e446eeb19aab0e69b6ceee3)\n\n", rtnVal) // print datasets, just one left rtnVal = s.Run(main, []string{dbSpec}) s.Equal(id2+"\n", rtnVal) // delete the second dataset rtnVal = s.Run(main, []string{"-d", dataset2Name}) s.Equal("Deleted dataset "+id2+" (was sha1-7b75b0ebfc2a0815ba6fb2b31d03c8f9976ae530)\n\n", rtnVal) // print datasets, none left rtnVal = s.Run(main, []string{dbSpec}) s.Equal("", rtnVal) }
func (s *nomsDsTestSuite) TestNomsDs() { dir := s.LdbDir cs := chunks.NewLevelDBStore(dir+"/name", "", 24, false) ds := datas.NewDatabase(cs) id := "testdataset" set := dataset.NewDataset(ds, id) set, err := set.CommitValue(types.String("Commit Value")) s.NoError(err) id2 := "testdataset2" set2 := dataset.NewDataset(ds, id2) set2, err = set2.CommitValue(types.String("Commit Value2")) s.NoError(err) err = ds.Close() s.NoError(err) dbSpec := spec.CreateDatabaseSpecString("ldb", dir+"/name") datasetName := spec.CreateValueSpecString("ldb", dir+"/name", id) dataset2Name := spec.CreateValueSpecString("ldb", dir+"/name", id2) // both datasets show up rtnVal, _ := s.Run(main, []string{"ds", dbSpec}) s.Equal(id+"\n"+id2+"\n", rtnVal) // both datasets again, to make sure printing doesn't change them rtnVal, _ = s.Run(main, []string{"ds", dbSpec}) s.Equal(id+"\n"+id2+"\n", rtnVal) // delete one dataset, print message at delete rtnVal, _ = s.Run(main, []string{"ds", "-d", datasetName}) s.Equal("Deleted "+datasetName+" (was #6ebc05f71q4sk2psi534fom9se228161)\n", rtnVal) // print datasets, just one left rtnVal, _ = s.Run(main, []string{"ds", dbSpec}) s.Equal(id2+"\n", rtnVal) // delete the second dataset rtnVal, _ = s.Run(main, []string{"ds", "-d", dataset2Name}) s.Equal("Deleted "+dataset2Name+" (was #f5qtovr9mv7mjj2uoq7flcfpksgf0s2j)\n", rtnVal) // print datasets, none left rtnVal, _ = s.Run(main, []string{"ds", dbSpec}) s.Equal("", rtnVal) }
func (s *nomsSyncTestSuite) TestSyncValidation() { source1 := dataset.NewDataset(datas.NewDatabase(chunks.NewLevelDBStore(s.LdbDir, "", 1, false)), "src") source1, err := source1.CommitValue(types.Number(42)) s.NoError(err) source1HeadRef := source1.Head().Hash() source1.Database().Close() sourceSpecMissingHashSymbol := spec.CreateValueSpecString("ldb", s.LdbDir, source1HeadRef.String()) ldb2dir := path.Join(s.TempDir, "ldb2") sinkDatasetSpec := spec.CreateValueSpecString("ldb", ldb2dir, "dest") defer func() { err := recover() s.Equal(exitError{-1}, err) }() s.Run(main, []string{"sync", sourceSpecMissingHashSymbol, sinkDatasetSpec}) }
func TestReadParseError(t *testing.T) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) dataString := `a,"b` r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B"} kinds := KindSlice{types.StringKind, types.StringKind} func() { defer func() { r := recover() assert.NotNil(r) _, ok := r.(*csv.ParseError) assert.True(ok, "Should be a ParseError") }() ReadToList(r, "test", headers, kinds, ds) }() }
func testTrailingHelper(t *testing.T, dataString string) { assert := assert.New(t) ds := datas.NewDatabase(chunks.NewMemoryStore()) r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B"} kinds := KindSlice{types.StringKind, types.StringKind} l, typ := ReadToList(r, "test", headers, kinds, ds) assert.Equal(uint64(3), l.Len()) assert.Equal(types.StructKind, typ.Kind()) desc, ok := typ.Desc.(types.StructDesc) assert.True(ok) assert.Equal(desc.Len(), 2) assert.Equal(types.StringKind, desc.Field("A").Kind()) assert.Equal(types.StringKind, desc.Field("B").Kind()) }