func (s *testSuite) TestSync() { source1 := dataset.NewDataset(datas.NewDatabase(chunks.NewLevelDBStore(s.LdbDir, "", 1, false)), "foo") source1, err := source1.Commit(types.Number(42)) s.NoError(err) source2, err := source1.Commit(types.Number(43)) s.NoError(err) source1HeadRef := source1.Head().Hash() source2.Database().Close() // Close Database backing both Datasets sourceSpec := test_util.CreateValueSpecString("ldb", s.LdbDir, source1HeadRef.String()) ldb2dir := path.Join(s.TempDir, "ldb2") sinkDatasetSpec := test_util.CreateValueSpecString("ldb", ldb2dir, "bar") out := s.Run(main, []string{sourceSpec, sinkDatasetSpec}) s.Equal("", out) dest := dataset.NewDataset(datas.NewDatabase(chunks.NewLevelDBStore(ldb2dir, "", 1, false)), "bar") s.True(types.Number(42).Equals(dest.HeadValue())) dest.Database().Close() sourceDataset := test_util.CreateValueSpecString("ldb", s.LdbDir, "foo") out = s.Run(main, []string{sourceDataset, sinkDatasetSpec}) s.Equal("", out) dest = dataset.NewDataset(datas.NewDatabase(chunks.NewLevelDBStore(ldb2dir, "", 1, false)), "bar") s.True(types.Number(43).Equals(dest.HeadValue())) dest.Database().Close() }
func (s *testSuite) TestCSVImporterFromBlob() { test := func(pathFlag string) { defer os.RemoveAll(s.LdbDir) newDB := func() datas.Database { cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) return datas.NewDatabase(cs) } db := newDB() rawDS := dataset.NewDataset(db, "raw") csv := &bytes.Buffer{} writeCSV(csv) rawDS.CommitValue(types.NewBlob(csv)) db.Close() stdout, stderr := s.Run(main, []string{ "--no-progress", "--column-types", "String,Number", pathFlag, spec.CreateValueSpecString("ldb", s.LdbDir, "raw.value"), spec.CreateValueSpecString("ldb", s.LdbDir, "csv"), }) s.Equal("", stdout) s.Equal("", stderr) db = newDB() defer db.Close() csvDS := dataset.NewDataset(db, "csv") validateCSV(s, csvDS.HeadValue().(types.List)) } test("--path") test("-p") }
func (s *nomsShowTestSuite) TestNomsGraph2() { str := test_util.CreateDatabaseSpecString("ldb", s.LdbDir) dbSpec, err := spec.ParseDatabaseSpec(str) s.NoError(err) db, err := dbSpec.Database() s.NoError(err) ba := dataset.NewDataset(db, "ba") ba, err = addCommit(ba, "1") s.NoError(err) bb := dataset.NewDataset(db, "bb") bb, err = addCommit(bb, "10") s.NoError(err) bc := dataset.NewDataset(db, "bc") bc, err = addCommit(bc, "100") s.NoError(err) ba, err = mergeDatasets(ba, bb, "11") s.NoError(err) _, err = mergeDatasets(ba, bc, "101") s.NoError(err) db.Close() s.Equal(graphRes2, s.Run(main, []string{"-graph", "-show-value=true", test_util.CreateValueSpecString("ldb", s.LdbDir, "ba")})) s.Equal(diffRes2, s.Run(main, []string{"-graph", "-show-value=false", test_util.CreateValueSpecString("ldb", s.LdbDir, "ba")})) }
func (s *nomsLogTestSuite) TestNomsGraph2() { str := spec.CreateDatabaseSpecString("ldb", s.LdbDir) db, err := spec.GetDatabase(str) s.NoError(err) ba := dataset.NewDataset(db, "ba") ba, err = addCommit(ba, "1") s.NoError(err) bb := dataset.NewDataset(db, "bb") bb, err = addCommit(bb, "10") s.NoError(err) bc := dataset.NewDataset(db, "bc") bc, err = addCommit(bc, "100") s.NoError(err) ba, err = mergeDatasets(ba, bb, "11") s.NoError(err) _, err = mergeDatasets(ba, bc, "101") s.NoError(err) db.Close() res, _ := s.Run(main, []string{"log", "--graph", "--show-value=true", spec.CreateValueSpecString("ldb", s.LdbDir, "ba")}) s.Equal(graphRes2, res) res, _ = s.Run(main, []string{"log", "--graph", "--show-value=false", spec.CreateValueSpecString("ldb", s.LdbDir, "ba")}) s.Equal(diffRes2, res) }
func (s *nomsShowTestSuite) TestNomsGraph1() { str := test_util.CreateDatabaseSpecString("ldb", s.LdbDir) dbSpec, err := spec.ParseDatabaseSpec(str) s.NoError(err) db, err := dbSpec.Database() s.NoError(err) b1 := dataset.NewDataset(db, "b1") b1, err = addCommit(b1, "1") s.NoError(err) b1, err = addCommit(b1, "2") s.NoError(err) b1, err = addCommit(b1, "3") s.NoError(err) b2 := dataset.NewDataset(db, "b2") b2, err = addBranchedDataset(b2, b1, "3.1") s.NoError(err) b1, err = addCommit(b1, "3.2") s.NoError(err) b1, err = addCommit(b1, "3.6") s.NoError(err) b3 := dataset.NewDataset(db, "b3") b3, err = addBranchedDataset(b3, b2, "3.1.3") s.NoError(err) b3, err = addCommit(b3, "3.1.5") s.NoError(err) b3, err = addCommit(b3, "3.1.7") s.NoError(err) b2, err = mergeDatasets(b2, b3, "3.5") s.NoError(err) b2, err = addCommit(b2, "3.7") s.NoError(err) b1, err = mergeDatasets(b1, b2, "4") s.NoError(err) b1, err = addCommit(b1, "5") s.NoError(err) b1, err = addCommit(b1, "6") s.NoError(err) b1, err = addCommit(b1, "7") s.NoError(err) b1.Database().Close() s.Equal(graphRes1, s.Run(main, []string{"-graph", "-show-value=true", test_util.CreateValueSpecString("ldb", s.LdbDir, "b1")})) s.Equal(diffRes1, s.Run(main, []string{"-graph", "-show-value=false", test_util.CreateValueSpecString("ldb", s.LdbDir, "b1")})) }
func (s *testSuite) Setup() { db := s.Database() defer db.Close() ds := dataset.NewDataset(db, dsName) _, err := ds.CommitValue(types.Number(42)) s.NoError(err) }
func (s *nomsLogTestSuite) TestNArg() { str := spec.CreateDatabaseSpecString("ldb", s.LdbDir) dsName := "nArgTest" db, err := spec.GetDatabase(str) s.NoError(err) ds := dataset.NewDataset(db, dsName) ds, err = addCommit(ds, "1") h1 := ds.Head().Hash() s.NoError(err) ds, err = addCommit(ds, "2") s.NoError(err) h2 := ds.Head().Hash() ds, err = addCommit(ds, "3") s.NoError(err) h3 := ds.Head().Hash() db.Close() dsSpec := spec.CreateValueSpecString("ldb", s.LdbDir, dsName) res, _ := s.Run(main, []string{"log", "-n1", dsSpec}) s.NotContains(res, h1.String()) res, _ = s.Run(main, []string{"log", "-n0", dsSpec}) s.Contains(res, h3.String()) s.Contains(res, h2.String()) s.Contains(res, h1.String()) vSpec := spec.CreateValueSpecString("ldb", s.LdbDir, "#"+h3.String()) res, _ = s.Run(main, []string{"log", "-n1", vSpec}) s.NotContains(res, h1.String()) res, _ = s.Run(main, []string{"log", "-n0", vSpec}) s.Contains(res, h3.String()) s.Contains(res, h2.String()) s.Contains(res, h1.String()) }
func (s *nomsShowTestSuite) TestTruncation() { toNomsList := func(l []string) types.List { nv := []types.Value{} for _, v := range l { nv = append(nv, types.String(v)) } return types.NewList(nv...) } str := test_util.CreateDatabaseSpecString("ldb", s.LdbDir) dbSpec, err := spec.ParseDatabaseSpec(str) s.NoError(err) db, err := dbSpec.Database() s.NoError(err) t := dataset.NewDataset(db, "truncate") t, err = addCommit(t, "the first line") s.NoError(err) l := []string{"one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "eleven"} _, err = addCommitWithValue(t, toNomsList(l)) s.NoError(err) db.Close() dsSpec := test_util.CreateValueSpecString("ldb", s.LdbDir, "truncate") s.Equal(truncRes1, s.Run(main, []string{"-graph", "-show-value=true", dsSpec})) s.Equal(diffTrunc1, s.Run(main, []string{"-graph", "-show-value=false", dsSpec})) s.Equal(truncRes2, s.Run(main, []string{"-graph", "-show-value=true", "-max-lines=-1", dsSpec})) s.Equal(diffTrunc2, s.Run(main, []string{"-graph", "-show-value=false", "-max-lines=-1", dsSpec})) s.Equal(truncRes3, s.Run(main, []string{"-graph", "-show-value=true", "-max-lines=0", dsSpec})) s.Equal(diffTrunc3, s.Run(main, []string{"-graph", "-show-value=false", "-max-lines=0", dsSpec})) }
func TestLDBObject(t *testing.T) { assert := assert.New(t) dir, err := ioutil.TempDir(os.TempDir(), "") assert.NoError(err) ldbpath := path.Join(dir, "xx-yy") dsId := "dsId" cs1 := chunks.NewLevelDBStoreUseFlags(ldbpath, "") store1 := datas.NewDatabase(cs1) dataset1 := dataset.NewDataset(store1, dsId) s1 := types.String("Commit Value") r1 := store1.WriteValue(s1) _, err = dataset1.Commit(r1) assert.NoError(err) store1.Close() spec2 := fmt.Sprintf("ldb:%s::%s", ldbpath, dsId) assert.NoError(err) sp1, err := ParseDatasetSpec(spec2) assert.NoError(err) dataset2, err := sp1.Dataset() assert.NoError(err) r2 := dataset2.HeadValue() s2 := r2.(types.Ref).TargetValue(dataset2.Database()) assert.Equal(s1, s2) dataset2.Database().Close() spec3 := fmt.Sprintf("ldb:%s::%s", ldbpath, s1.Hash().String()) sp3, err := ParsePathSpec(spec3) database, v3, err := sp3.Value() assert.Equal(s1, v3) database.Close() }
func (s *testSuite) TestCSVImporterWithPipe() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("a|b\n1|2\n") d.Chk.NoError(err) setName := "csv" dataspec := spec.CreateValueSpecString("ldb", s.LdbDir, setName) stdout, stderr := s.Run(main, []string{"--no-progress", "--column-types", "String,Number", "--delimiter", "|", input.Name(), dataspec}) s.Equal("", stdout) s.Equal("", stderr) cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) defer ds.Database().Close() defer os.RemoveAll(s.LdbDir) l := ds.HeadValue().(types.List) s.Equal(uint64(1), l.Len()) v := l.Get(0) st := v.(types.Struct) s.Equal(types.String("1"), st.Get("a")) s.Equal(types.Number(2), st.Get("b")) }
func (s *testSuite) TestCSVImportSkipRecordsCustomHeader() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("a,b\n") d.Chk.NoError(err) _, err = input.WriteString("7,8\n") d.Chk.NoError(err) setName := "csv" dataspec := spec.CreateValueSpecString("ldb", s.LdbDir, setName) stdout, stderr := s.Run(main, []string{"--no-progress", "--skip-records", "1", "--header", "x,y", input.Name(), dataspec}) s.Equal("", stdout) s.Equal("", stderr) cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) defer ds.Database().Close() defer os.RemoveAll(s.LdbDir) l := ds.HeadValue().(types.List) s.Equal(uint64(1), l.Len()) v := l.Get(0) st := v.(types.Struct) s.Equal(types.String("7"), st.Get("x")) s.Equal(types.String("8"), st.Get("y")) }
func (s *testSuite) TestCSVImporterToMap() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("a,b,c\n") d.Chk.NoError(err) for i := 0; i < 20; i++ { _, err = input.WriteString(fmt.Sprintf("a%d,%d,%d\n", i, i, i*2)) d.Chk.NoError(err) } _, err = input.Seek(0, 0) d.Chk.NoError(err) setName := "csv" dataspec := test_util.CreateValueSpecString("ldb", s.LdbDir, setName) out := s.Run(main, []string{"-no-progress", "-column-types", "String,Number,Number", "-dest-type", "map:1", dataspec, input.Name()}) s.Equal("", out) cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) defer ds.Database().Close() defer os.RemoveAll(s.LdbDir) m := ds.HeadValue().(types.Map) s.Equal(uint64(20), m.Len()) for i := 0; i < 20; i++ { m.Get(types.Number(i)).(types.Struct).Equals(types.NewStruct("", map[string]types.Value{ "a": types.String(fmt.Sprintf("a%d", i)), "c": types.Number(i * 2), })) } }
func (s *testSuite) TestCSVImporterWithExternalHeader() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("7,8\n") d.Chk.NoError(err) setName := "csv" dataspec := test_util.CreateValueSpecString("ldb", s.LdbDir, setName) out := s.Run(main, []string{"-no-progress", "-column-types", "String,Number", "-header", "x,y", dataspec, input.Name()}) s.Equal("", out) cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) defer ds.Database().Close() defer os.RemoveAll(s.LdbDir) l := ds.HeadValue().(types.List) s.Equal(uint64(1), l.Len()) v := l.Get(0) st := v.(types.Struct) s.Equal(types.String("7"), st.Get("x")) s.Equal(types.Number(8), st.Get("y")) }
func (s *nomsShowTestSuite) TestNArg() { str := test_util.CreateDatabaseSpecString("ldb", s.LdbDir) dsName := "nArgTest" dbSpec, err := spec.ParseDatabaseSpec(str) s.NoError(err) db, err := dbSpec.Database() s.NoError(err) ds := dataset.NewDataset(db, dsName) ds, err = addCommit(ds, "1") h1 := ds.Head().Hash() s.NoError(err) ds, err = addCommit(ds, "2") s.NoError(err) h2 := ds.Head().Hash() ds, err = addCommit(ds, "3") s.NoError(err) h3 := ds.Head().Hash() db.Close() dsSpec := test_util.CreateValueSpecString("ldb", s.LdbDir, dsName) s.NotContains(s.Run(main, []string{"-n=1", dsSpec}), h1.String()) res := s.Run(main, []string{"-n=0", dsSpec}) s.Contains(res, h3.String()) s.Contains(res, h2.String()) s.Contains(res, h1.String()) vSpec := test_util.CreateValueSpecString("ldb", s.LdbDir, h3.String()) s.NotContains(s.Run(main, []string{"-n=1", vSpec}), h1.String()) res = s.Run(main, []string{"-n=0", vSpec}) s.Contains(res, h3.String()) s.Contains(res, h2.String()) s.Contains(res, h1.String()) }
func TestLDBDataset(t *testing.T) { assert := assert.New(t) dir, err := ioutil.TempDir(os.TempDir(), "") assert.NoError(err) ldbPath := path.Join(dir, "name") cs := chunks.NewLevelDBStoreUseFlags(ldbPath, "") ds := datas.NewDatabase(cs) id := "dsName" set := dataset.NewDataset(ds, id) commit := types.String("Commit Value") set, err = set.Commit(commit) assert.NoError(err) ds.Close() spec := fmt.Sprintf("ldb:%s::%s", ldbPath, id) sp, err := ParseDatasetSpec(spec) assert.NoError(err) dataset, err := sp.Dataset() assert.NoError(err) assert.EqualValues(commit, dataset.HeadValue()) os.Remove(dir) }
func TestReadRef(t *testing.T) { assert := assert.New(t) dir, err := ioutil.TempDir(os.TempDir(), "") assert.NoError(err) datasetId := "dsName" ldbPath := path.Join(dir, "/name") cs1 := chunks.NewLevelDBStoreUseFlags(ldbPath, "") database1 := datas.NewDatabase(cs1) dataset1 := dataset.NewDataset(database1, datasetId) commit := types.String("Commit Value") dataset1, err = dataset1.Commit(commit) assert.NoError(err) r1 := dataset1.Head().Hash() dataset1.Database().Close() spec2 := fmt.Sprintf("ldb:%s::%s", ldbPath, r1.String()) sp2, err := ParsePathSpec(spec2) assert.NoError(err) database, v2, err := sp2.Value() assert.NoError(err) assert.EqualValues(r1.String(), v2.Hash().String()) database.Close() }
func (s *nomsDsTestSuite) TestNomsDs() { dir := s.LdbDir cs := chunks.NewLevelDBStore(dir+"/name", "", 24, false) ds := datas.NewDatabase(cs) id := "testdataset" set := dataset.NewDataset(ds, id) set, err := set.CommitValue(types.String("Commit Value")) s.NoError(err) id2 := "testdataset2" set2 := dataset.NewDataset(ds, id2) set2, err = set2.CommitValue(types.String("Commit Value2")) s.NoError(err) err = ds.Close() s.NoError(err) dbSpec := spec.CreateDatabaseSpecString("ldb", dir+"/name") datasetName := spec.CreateValueSpecString("ldb", dir+"/name", id) dataset2Name := spec.CreateValueSpecString("ldb", dir+"/name", id2) // both datasets show up rtnVal, _ := s.Run(main, []string{"ds", dbSpec}) s.Equal(id+"\n"+id2+"\n", rtnVal) // both datasets again, to make sure printing doesn't change them rtnVal, _ = s.Run(main, []string{"ds", dbSpec}) s.Equal(id+"\n"+id2+"\n", rtnVal) // delete one dataset, print message at delete rtnVal, _ = s.Run(main, []string{"ds", "-d", datasetName}) s.Equal("Deleted "+datasetName+" (was #6ebc05f71q4sk2psi534fom9se228161)\n", rtnVal) // print datasets, just one left rtnVal, _ = s.Run(main, []string{"ds", dbSpec}) s.Equal(id2+"\n", rtnVal) // delete the second dataset rtnVal, _ = s.Run(main, []string{"ds", "-d", dataset2Name}) s.Equal("Deleted "+dataset2Name+" (was #f5qtovr9mv7mjj2uoq7flcfpksgf0s2j)\n", rtnVal) // print datasets, none left rtnVal, _ = s.Run(main, []string{"ds", dbSpec}) s.Equal("", rtnVal) }
func (s *testSuite) TestNomsDs() { dir := s.LdbDir cs := chunks.NewLevelDBStore(dir+"/name", "", 24, false) ds := datas.NewDatabase(cs) id := "testdataset" set := dataset.NewDataset(ds, id) set, err := set.Commit(types.String("Commit Value")) s.NoError(err) id2 := "testdataset2" set2 := dataset.NewDataset(ds, id2) set2, err = set2.Commit(types.String("Commit Value2")) s.NoError(err) err = ds.Close() s.NoError(err) dbSpec := test_util.CreateDatabaseSpecString("ldb", dir+"/name") datasetName := test_util.CreateValueSpecString("ldb", dir+"/name", id) dataset2Name := test_util.CreateValueSpecString("ldb", dir+"/name", id2) // both datasets show up rtnVal := s.Run(main, []string{dbSpec}) s.Equal(id+"\n"+id2+"\n", rtnVal) // both datasets again, to make sure printing doesn't change them rtnVal = s.Run(main, []string{dbSpec}) s.Equal(id+"\n"+id2+"\n", rtnVal) // delete one dataset, print message at delete rtnVal = s.Run(main, []string{"-d", datasetName}) s.Equal("Deleted dataset "+id+" (was sha1-d54b79552cda9ebe8e446eeb19aab0e69b6ceee3)\n\n", rtnVal) // print datasets, just one left rtnVal = s.Run(main, []string{dbSpec}) s.Equal(id2+"\n", rtnVal) // delete the second dataset rtnVal = s.Run(main, []string{"-d", dataset2Name}) s.Equal("Deleted dataset "+id2+" (was sha1-7b75b0ebfc2a0815ba6fb2b31d03c8f9976ae530)\n\n", rtnVal) // print datasets, none left rtnVal = s.Run(main, []string{dbSpec}) s.Equal("", rtnVal) }
func (s *testSuite) Teardown() { s.Equal("43\n", s.NodeOutput()) db := s.Database() defer db.Close() ds := dataset.NewDataset(db, dsName) s.True(ds.HeadValue().Equals(types.Number(43))) }
func (spec datasetSpec) Dataset() (dataset.Dataset, error) { store, err := spec.DbSpec.Database() if err != nil { return dataset.Dataset{}, err } return dataset.NewDataset(store, spec.DatasetName), nil }
// FIXME: run with pipe func (s *testSuite) TestCSVExporter() { setName := "csv" header := []string{"a", "b", "c"} payload := [][]string{ []string{"5", "7", "100"}, []string{"4", "10", "255"}, []string{"512", "12", "55"}, } structName := "SomeStruct" // Setup data store cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) // Build Struct fields based on header f := make(types.TypeMap, len(header)) for _, key := range header { f[key] = types.StringType } typ := types.MakeStructType(structName, f) // Build data rows structs := make([]types.Value, len(payload)) for i, row := range payload { fields := make(map[string]types.Value) for j, v := range row { name := header[j] fields[name] = types.String(v) } structs[i] = types.NewStructWithType(typ, fields) } ds.Commit(types.NewList(structs...)) ds.Database().Close() // Run exporter dataspec := test_util.CreateValueSpecString("ldb", s.LdbDir, setName) out := s.Run(main, []string{dataspec}) // Verify output csvReader := csv.NewReader(strings.NewReader(out)) row, err := csvReader.Read() d.Chk.NoError(err) s.Equal(header, row) for i := 0; i < len(payload); i++ { row, err := csvReader.Read() d.Chk.NoError(err) s.Equal(payload[i], row) } _, err = csvReader.Read() s.Equal(io.EOF, err) }
func (s *nomsShowTestSuite) TestNomsGraph3() { str := test_util.CreateDatabaseSpecString("ldb", s.LdbDir) dbSpec, err := spec.ParseDatabaseSpec(str) s.NoError(err) db, err := dbSpec.Database() s.NoError(err) w := dataset.NewDataset(db, "w") w, err = addCommit(w, "1") s.NoError(err) w, err = addCommit(w, "2") s.NoError(err) x := dataset.NewDataset(db, "x") x, err = addBranchedDataset(x, w, "20-x") s.NoError(err) y := dataset.NewDataset(db, "y") y, err = addBranchedDataset(y, w, "200-y") s.NoError(err) z := dataset.NewDataset(db, "z") z, err = addBranchedDataset(z, w, "2000-z") s.NoError(err) w, err = mergeDatasets(w, x, "22-wx") s.NoError(err) w, err = mergeDatasets(w, y, "222-wy") s.NoError(err) _, err = mergeDatasets(w, z, "2222-wz") s.NoError(err) db.Close() s.Equal(graphRes3, s.Run(main, []string{"-graph", "-show-value=true", test_util.CreateValueSpecString("ldb", s.LdbDir, "w")})) s.Equal(diffRes3, s.Run(main, []string{"-graph", "-show-value=false", test_util.CreateValueSpecString("ldb", s.LdbDir, "w")})) }
func (s *nomsLogTestSuite) TestNomsGraph3() { str := spec.CreateDatabaseSpecString("ldb", s.LdbDir) db, err := spec.GetDatabase(str) s.NoError(err) w := dataset.NewDataset(db, "w") w, err = addCommit(w, "1") s.NoError(err) w, err = addCommit(w, "2") s.NoError(err) x := dataset.NewDataset(db, "x") x, err = addBranchedDataset(x, w, "20-x") s.NoError(err) y := dataset.NewDataset(db, "y") y, err = addBranchedDataset(y, w, "200-y") s.NoError(err) z := dataset.NewDataset(db, "z") z, err = addBranchedDataset(z, w, "2000-z") s.NoError(err) w, err = mergeDatasets(w, x, "22-wx") s.NoError(err) w, err = mergeDatasets(w, y, "222-wy") s.NoError(err) _, err = mergeDatasets(w, z, "2222-wz") s.NoError(err) db.Close() res, _ := s.Run(main, []string{"log", "--graph", "--show-value=true", spec.CreateValueSpecString("ldb", s.LdbDir, "w")}) test.EqualsIgnoreHashes(s.T(), graphRes3, res) res, _ = s.Run(main, []string{"log", "--graph", "--show-value=false", spec.CreateValueSpecString("ldb", s.LdbDir, "w")}) test.EqualsIgnoreHashes(s.T(), diffRes3, res) }
func (s *nomsSyncTestSuite) TestSyncValidation() { source1 := dataset.NewDataset(datas.NewDatabase(chunks.NewLevelDBStore(s.LdbDir, "", 1, false)), "src") source1, err := source1.CommitValue(types.Number(42)) s.NoError(err) source1HeadRef := source1.Head().Hash() source1.Database().Close() sourceSpecMissingHashSymbol := spec.CreateValueSpecString("ldb", s.LdbDir, source1HeadRef.String()) ldb2dir := path.Join(s.TempDir, "ldb2") sinkDatasetSpec := spec.CreateValueSpecString("ldb", ldb2dir, "dest") defer func() { err := recover() s.Equal(exitError{-1}, err) }() s.Run(main, []string{"sync", sourceSpecMissingHashSymbol, sinkDatasetSpec}) }
func (s *testSuite) TestCSVImporter() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) writeCSV(input) defer input.Close() defer os.Remove(input.Name()) setName := "csv" dataspec := spec.CreateValueSpecString("ldb", s.LdbDir, setName) stdout, stderr := s.Run(main, []string{"--no-progress", "--column-types", "String,Number", input.Name(), dataspec}) s.Equal("", stdout) s.Equal("", stderr) cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) defer ds.Database().Close() defer os.RemoveAll(s.LdbDir) validateCSV(s, ds.HeadValue().(types.List)) }
func (s *testSuite) Teardown() { out := s.NodeOutput() s.Contains(out, "1 of 1 entries") s.Contains(out, "done") db := s.Database() defer db.Close() ds := dataset.NewDataset(db, dsName) v := ds.HeadValue() s.True(v.Type().Equals(types.MakeStructType("File", []string{"content"}, []*types.Type{ types.MakeRefType(types.BlobType), }, ))) s.Equal("File", v.(types.Struct).Type().Desc.(types.StructDesc).Name) b := v.(types.Struct).Get("content").(types.Ref).TargetValue(db).(types.Blob) bs, err := ioutil.ReadAll(b.Reader()) s.NoError(err) s.Equal([]byte("Hello World!\n"), bs) }
func (s *nomsLogTestSuite) TestTruncation() { toNomsList := func(l []string) types.List { nv := []types.Value{} for _, v := range l { nv = append(nv, types.String(v)) } return types.NewList(nv...) } str := spec.CreateDatabaseSpecString("ldb", s.LdbDir) db, err := spec.GetDatabase(str) s.NoError(err) t := dataset.NewDataset(db, "truncate") t, err = addCommit(t, "the first line") s.NoError(err) l := []string{"one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "eleven"} _, err = addCommitWithValue(t, toNomsList(l)) s.NoError(err) db.Close() dsSpec := spec.CreateValueSpecString("ldb", s.LdbDir, "truncate") res, _ := s.Run(main, []string{"log", "--graph", "--show-value=true", dsSpec}) test.EqualsIgnoreHashes(s.T(), truncRes1, res) res, _ = s.Run(main, []string{"log", "--graph", "--show-value=false", dsSpec}) test.EqualsIgnoreHashes(s.T(), diffTrunc1, res) res, _ = s.Run(main, []string{"log", "--graph", "--show-value=true", "--max-lines=-1", dsSpec}) test.EqualsIgnoreHashes(s.T(), truncRes2, res) res, _ = s.Run(main, []string{"log", "--graph", "--show-value=false", "--max-lines=-1", dsSpec}) test.EqualsIgnoreHashes(s.T(), diffTrunc2, res) res, _ = s.Run(main, []string{"log", "--graph", "--show-value=true", "--max-lines=0", dsSpec}) test.EqualsIgnoreHashes(s.T(), truncRes3, res) res, _ = s.Run(main, []string{"log", "--graph", "--show-value=false", "--max-lines=0", dsSpec}) test.EqualsIgnoreHashes(s.T(), diffTrunc3, res) }
func (s *testSuite) TestCSVImporter() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("a,b\n") d.Chk.NoError(err) for i := 0; i < 100; i++ { _, err = input.WriteString(fmt.Sprintf("a%d,%d\n", i, i)) d.Chk.NoError(err) } _, err = input.Seek(0, 0) d.Chk.NoError(err) setName := "csv" dataspec := test_util.CreateValueSpecString("ldb", s.LdbDir, setName) out := s.Run(main, []string{"-no-progress", "-column-types", "String,Number", dataspec, input.Name()}) s.Equal("", out) cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) defer ds.Database().Close() defer os.RemoveAll(s.LdbDir) l := ds.HeadValue().(types.List) s.Equal(uint64(100), l.Len()) i := uint64(0) l.IterAll(func(v types.Value, j uint64) { s.Equal(i, j) st := v.(types.Struct) s.Equal(types.String(fmt.Sprintf("a%d", i)), st.Get("a")) s.Equal(types.Number(i), st.Get("b")) i++ }) }
func (s *nomsLogTestSuite) TestEmptyCommit() { str := spec.CreateDatabaseSpecString("ldb", s.LdbDir) db, err := spec.GetDatabase(str) s.NoError(err) ds := dataset.NewDataset(db, "ds1") meta := types.NewStruct("Meta", map[string]types.Value{ "longNameForTest": types.String("Yoo"), "test2": types.String("Hoo"), }) ds, err = ds.Commit(types.String("1"), dataset.CommitOptions{Meta: meta}) s.NoError(err) ds.Commit(types.String("2"), dataset.CommitOptions{}) db.Close() dsSpec := spec.CreateValueSpecString("ldb", s.LdbDir, "ds1") res, _ := s.Run(main, []string{"log", "--show-value=false", dsSpec}) test.EqualsIgnoreHashes(s.T(), metaRes1, res) res, _ = s.Run(main, []string{"log", "--show-value=false", "--oneline", dsSpec}) test.EqualsIgnoreHashes(s.T(), metaRes2, res) }
func main() { profile.RegisterProfileFlags(flag.CommandLine) flag.Parse(true) buildCount := *count insertCount := buildCount / 50 defer profile.MaybeStartProfile().Stop() collectionTypes := []string{"List", "Set", "Map"} buildFns := []buildCollectionFn{buildList, buildSet, buildMap} buildIncrFns := []buildCollectionFn{buildListIncrementally, buildSetIncrementally, buildMapIncrementally} readFns := []readCollectionFn{readList, readSet, readMap} elementTypes := []string{"numbers (8 B)", "strings (32 B)", "structs (64 B)"} elementSizes := []uint64{numberSize, stringSize, structSize} valueFns := []createValueFn{createNumber, createString, createStruct} for i, colType := range collectionTypes { fmt.Printf("Testing %s: \t\tbuild %d\t\t\tscan %d\t\t\tinsert %d\n", colType, buildCount, buildCount, insertCount) for j, elementType := range elementTypes { valueFn := valueFns[j] // Build One-Time ms := chunks.NewMemoryStore() ds := dataset.NewDataset(datas.NewDatabase(ms), "test") t1 := time.Now() col := buildFns[i](buildCount, valueFn) ds, err := ds.CommitValue(col) d.Chk.NoError(err) buildDuration := time.Since(t1) // Read t1 = time.Now() col = ds.HeadValue().(types.Collection) readFns[i](col) readDuration := time.Since(t1) // Build Incrementally ms = chunks.NewMemoryStore() ds = dataset.NewDataset(datas.NewDatabase(ms), "test") t1 = time.Now() col = buildIncrFns[i](insertCount, valueFn) ds, err = ds.CommitValue(col) d.Chk.NoError(err) incrDuration := time.Since(t1) elementSize := elementSizes[j] buildSize := elementSize * buildCount incrSize := elementSize * insertCount fmt.Printf("%s\t\t%s\t\t%s\t\t%s\n", elementType, rate(buildDuration, buildSize), rate(readDuration, buildSize), rate(incrDuration, incrSize)) } fmt.Println() } fmt.Printf("Testing Blob: \t\tbuild %d MB\t\t\tscan %d MB\n", *blobSize/1000000, *blobSize/1000000) ms := chunks.NewMemoryStore() ds := dataset.NewDataset(datas.NewDatabase(ms), "test") blobBytes := makeBlobBytes(*blobSize) t1 := time.Now() blob := types.NewBlob(bytes.NewReader(blobBytes)) ds.CommitValue(blob) buildDuration := time.Since(t1) ds = dataset.NewDataset(datas.NewDatabase(ms), "test") t1 = time.Now() blob = ds.HeadValue().(types.Blob) outBytes, _ := ioutil.ReadAll(blob.Reader()) readDuration := time.Since(t1) d.Chk.True(bytes.Compare(blobBytes, outBytes) == 0) fmt.Printf("\t\t\t%s\t\t%s\n\n", rate(buildDuration, *blobSize), rate(readDuration, *blobSize)) }