func (s *nomsShowTestSuite) TestNomsShow() { datasetName := "dsTest" str := test_util.CreateValueSpecString("ldb", s.LdbDir, datasetName) sp, err := spec.ParseDatasetSpec(str) d.Chk.NoError(err) ds, err := sp.Dataset() d.Chk.NoError(err) s1 := types.String("test string") r := writeTestData(ds, s1) s.Equal(res1, s.Run(main, []string{str})) spec1 := test_util.CreateValueSpecString("ldb", s.LdbDir, r.TargetHash().String()) s.Equal(res2, s.Run(main, []string{spec1})) ds, err = sp.Dataset() list := types.NewList(types.String("elem1"), types.Number(2), types.String("elem3")) r = writeTestData(ds, list) s.Equal(res3, s.Run(main, []string{str})) spec1 = test_util.CreateValueSpecString("ldb", s.LdbDir, r.TargetHash().String()) s.Equal(res4, s.Run(main, []string{spec1})) ds, err = sp.Dataset() _ = writeTestData(ds, s1) s.Equal(res5, s.Run(main, []string{str})) }
func (s *nomsShowTestSuite) TestNArg() { str := test_util.CreateDatabaseSpecString("ldb", s.LdbDir) dsName := "nArgTest" dbSpec, err := spec.ParseDatabaseSpec(str) s.NoError(err) db, err := dbSpec.Database() s.NoError(err) ds := dataset.NewDataset(db, dsName) ds, err = addCommit(ds, "1") h1 := ds.Head().Hash() s.NoError(err) ds, err = addCommit(ds, "2") s.NoError(err) h2 := ds.Head().Hash() ds, err = addCommit(ds, "3") s.NoError(err) h3 := ds.Head().Hash() db.Close() dsSpec := test_util.CreateValueSpecString("ldb", s.LdbDir, dsName) s.NotContains(s.Run(main, []string{"-n=1", dsSpec}), h1.String()) res := s.Run(main, []string{"-n=0", dsSpec}) s.Contains(res, h3.String()) s.Contains(res, h2.String()) s.Contains(res, h1.String()) vSpec := test_util.CreateValueSpecString("ldb", s.LdbDir, h3.String()) s.NotContains(s.Run(main, []string{"-n=1", vSpec}), h1.String()) res = s.Run(main, []string{"-n=0", vSpec}) s.Contains(res, h3.String()) s.Contains(res, h2.String()) s.Contains(res, h1.String()) }
func (s *nomsShowTestSuite) TestNomsGraph2() { str := test_util.CreateDatabaseSpecString("ldb", s.LdbDir) dbSpec, err := spec.ParseDatabaseSpec(str) s.NoError(err) db, err := dbSpec.Database() s.NoError(err) ba := dataset.NewDataset(db, "ba") ba, err = addCommit(ba, "1") s.NoError(err) bb := dataset.NewDataset(db, "bb") bb, err = addCommit(bb, "10") s.NoError(err) bc := dataset.NewDataset(db, "bc") bc, err = addCommit(bc, "100") s.NoError(err) ba, err = mergeDatasets(ba, bb, "11") s.NoError(err) _, err = mergeDatasets(ba, bc, "101") s.NoError(err) db.Close() s.Equal(graphRes2, s.Run(main, []string{"-graph", "-show-value=true", test_util.CreateValueSpecString("ldb", s.LdbDir, "ba")})) s.Equal(diffRes2, s.Run(main, []string{"-graph", "-show-value=false", test_util.CreateValueSpecString("ldb", s.LdbDir, "ba")})) }
func (s *testSuite) TestSync() { source1 := dataset.NewDataset(datas.NewDatabase(chunks.NewLevelDBStore(s.LdbDir, "", 1, false)), "foo") source1, err := source1.Commit(types.Number(42)) s.NoError(err) source2, err := source1.Commit(types.Number(43)) s.NoError(err) source1HeadRef := source1.Head().Hash() source2.Database().Close() // Close Database backing both Datasets sourceSpec := test_util.CreateValueSpecString("ldb", s.LdbDir, source1HeadRef.String()) ldb2dir := path.Join(s.TempDir, "ldb2") sinkDatasetSpec := test_util.CreateValueSpecString("ldb", ldb2dir, "bar") out := s.Run(main, []string{sourceSpec, sinkDatasetSpec}) s.Equal("", out) dest := dataset.NewDataset(datas.NewDatabase(chunks.NewLevelDBStore(ldb2dir, "", 1, false)), "bar") s.True(types.Number(42).Equals(dest.HeadValue())) dest.Database().Close() sourceDataset := test_util.CreateValueSpecString("ldb", s.LdbDir, "foo") out = s.Run(main, []string{sourceDataset, sinkDatasetSpec}) s.Equal("", out) dest = dataset.NewDataset(datas.NewDatabase(chunks.NewLevelDBStore(ldb2dir, "", 1, false)), "bar") s.True(types.Number(43).Equals(dest.HeadValue())) dest.Database().Close() }
func (s *nomsShowTestSuite) TestNomsGraph1() { str := test_util.CreateDatabaseSpecString("ldb", s.LdbDir) dbSpec, err := spec.ParseDatabaseSpec(str) s.NoError(err) db, err := dbSpec.Database() s.NoError(err) b1 := dataset.NewDataset(db, "b1") b1, err = addCommit(b1, "1") s.NoError(err) b1, err = addCommit(b1, "2") s.NoError(err) b1, err = addCommit(b1, "3") s.NoError(err) b2 := dataset.NewDataset(db, "b2") b2, err = addBranchedDataset(b2, b1, "3.1") s.NoError(err) b1, err = addCommit(b1, "3.2") s.NoError(err) b1, err = addCommit(b1, "3.6") s.NoError(err) b3 := dataset.NewDataset(db, "b3") b3, err = addBranchedDataset(b3, b2, "3.1.3") s.NoError(err) b3, err = addCommit(b3, "3.1.5") s.NoError(err) b3, err = addCommit(b3, "3.1.7") s.NoError(err) b2, err = mergeDatasets(b2, b3, "3.5") s.NoError(err) b2, err = addCommit(b2, "3.7") s.NoError(err) b1, err = mergeDatasets(b1, b2, "4") s.NoError(err) b1, err = addCommit(b1, "5") s.NoError(err) b1, err = addCommit(b1, "6") s.NoError(err) b1, err = addCommit(b1, "7") s.NoError(err) b1.Database().Close() s.Equal(graphRes1, s.Run(main, []string{"-graph", "-show-value=true", test_util.CreateValueSpecString("ldb", s.LdbDir, "b1")})) s.Equal(diffRes1, s.Run(main, []string{"-graph", "-show-value=false", test_util.CreateValueSpecString("ldb", s.LdbDir, "b1")})) }
func (s *nomsShowTestSuite) TestTruncation() { toNomsList := func(l []string) types.List { nv := []types.Value{} for _, v := range l { nv = append(nv, types.String(v)) } return types.NewList(nv...) } str := test_util.CreateDatabaseSpecString("ldb", s.LdbDir) dbSpec, err := spec.ParseDatabaseSpec(str) s.NoError(err) db, err := dbSpec.Database() s.NoError(err) t := dataset.NewDataset(db, "truncate") t, err = addCommit(t, "the first line") s.NoError(err) l := []string{"one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "eleven"} _, err = addCommitWithValue(t, toNomsList(l)) s.NoError(err) db.Close() dsSpec := test_util.CreateValueSpecString("ldb", s.LdbDir, "truncate") s.Equal(truncRes1, s.Run(main, []string{"-graph", "-show-value=true", dsSpec})) s.Equal(diffTrunc1, s.Run(main, []string{"-graph", "-show-value=false", dsSpec})) s.Equal(truncRes2, s.Run(main, []string{"-graph", "-show-value=true", "-max-lines=-1", dsSpec})) s.Equal(diffTrunc2, s.Run(main, []string{"-graph", "-show-value=false", "-max-lines=-1", dsSpec})) s.Equal(truncRes3, s.Run(main, []string{"-graph", "-show-value=true", "-max-lines=0", dsSpec})) s.Equal(diffTrunc3, s.Run(main, []string{"-graph", "-show-value=false", "-max-lines=0", dsSpec})) }
func (s *testSuite) TestCSVImporterToMap() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("a,b,c\n") d.Chk.NoError(err) for i := 0; i < 20; i++ { _, err = input.WriteString(fmt.Sprintf("a%d,%d,%d\n", i, i, i*2)) d.Chk.NoError(err) } _, err = input.Seek(0, 0) d.Chk.NoError(err) setName := "csv" dataspec := test_util.CreateValueSpecString("ldb", s.LdbDir, setName) out := s.Run(main, []string{"-no-progress", "-column-types", "String,Number,Number", "-dest-type", "map:1", dataspec, input.Name()}) s.Equal("", out) cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) defer ds.Database().Close() defer os.RemoveAll(s.LdbDir) m := ds.HeadValue().(types.Map) s.Equal(uint64(20), m.Len()) for i := 0; i < 20; i++ { m.Get(types.Number(i)).(types.Struct).Equals(types.NewStruct("", map[string]types.Value{ "a": types.String(fmt.Sprintf("a%d", i)), "c": types.Number(i * 2), })) } }
func (s *testSuite) TestCSVImporterWithExternalHeader() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("7,8\n") d.Chk.NoError(err) setName := "csv" dataspec := test_util.CreateValueSpecString("ldb", s.LdbDir, setName) out := s.Run(main, []string{"-no-progress", "-column-types", "String,Number", "-header", "x,y", dataspec, input.Name()}) s.Equal("", out) cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) defer ds.Database().Close() defer os.RemoveAll(s.LdbDir) l := ds.HeadValue().(types.List) s.Equal(uint64(1), l.Len()) v := l.Get(0) st := v.(types.Struct) s.Equal(types.String("7"), st.Get("x")) s.Equal(types.Number(8), st.Get("y")) }
func (s *counterTestSuite) TestCounter() { spec := test_util.CreateValueSpecString("ldb", s.LdbDir, "counter") args := []string{spec} s.Equal("1\n", s.Run(main, args)) s.Equal("2\n", s.Run(main, args)) s.Equal("3\n", s.Run(main, args)) }
func (s *testSuite) TestNomsDs() { dir := s.LdbDir cs := chunks.NewLevelDBStore(dir+"/name", "", 24, false) ds := datas.NewDatabase(cs) id := "testdataset" set := dataset.NewDataset(ds, id) set, err := set.Commit(types.String("Commit Value")) s.NoError(err) id2 := "testdataset2" set2 := dataset.NewDataset(ds, id2) set2, err = set2.Commit(types.String("Commit Value2")) s.NoError(err) err = ds.Close() s.NoError(err) dbSpec := test_util.CreateDatabaseSpecString("ldb", dir+"/name") datasetName := test_util.CreateValueSpecString("ldb", dir+"/name", id) dataset2Name := test_util.CreateValueSpecString("ldb", dir+"/name", id2) // both datasets show up rtnVal := s.Run(main, []string{dbSpec}) s.Equal(id+"\n"+id2+"\n", rtnVal) // both datasets again, to make sure printing doesn't change them rtnVal = s.Run(main, []string{dbSpec}) s.Equal(id+"\n"+id2+"\n", rtnVal) // delete one dataset, print message at delete rtnVal = s.Run(main, []string{"-d", datasetName}) s.Equal("Deleted dataset "+id+" (was sha1-d54b79552cda9ebe8e446eeb19aab0e69b6ceee3)\n\n", rtnVal) // print datasets, just one left rtnVal = s.Run(main, []string{dbSpec}) s.Equal(id2+"\n", rtnVal) // delete the second dataset rtnVal = s.Run(main, []string{"-d", dataset2Name}) s.Equal("Deleted dataset "+id2+" (was sha1-7b75b0ebfc2a0815ba6fb2b31d03c8f9976ae530)\n\n", rtnVal) // print datasets, none left rtnVal = s.Run(main, []string{dbSpec}) s.Equal("", rtnVal) }
// FIXME: run with pipe func (s *testSuite) TestCSVExporter() { setName := "csv" header := []string{"a", "b", "c"} payload := [][]string{ []string{"5", "7", "100"}, []string{"4", "10", "255"}, []string{"512", "12", "55"}, } structName := "SomeStruct" // Setup data store cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) // Build Struct fields based on header f := make(types.TypeMap, len(header)) for _, key := range header { f[key] = types.StringType } typ := types.MakeStructType(structName, f) // Build data rows structs := make([]types.Value, len(payload)) for i, row := range payload { fields := make(map[string]types.Value) for j, v := range row { name := header[j] fields[name] = types.String(v) } structs[i] = types.NewStructWithType(typ, fields) } ds.Commit(types.NewList(structs...)) ds.Database().Close() // Run exporter dataspec := test_util.CreateValueSpecString("ldb", s.LdbDir, setName) out := s.Run(main, []string{dataspec}) // Verify output csvReader := csv.NewReader(strings.NewReader(out)) row, err := csvReader.Read() d.Chk.NoError(err) s.Equal(header, row) for i := 0; i < len(payload); i++ { row, err := csvReader.Read() d.Chk.NoError(err) s.Equal(payload[i], row) } _, err = csvReader.Read() s.Equal(io.EOF, err) }
func (s *nomsShowTestSuite) TestNomsGraph3() { str := test_util.CreateDatabaseSpecString("ldb", s.LdbDir) dbSpec, err := spec.ParseDatabaseSpec(str) s.NoError(err) db, err := dbSpec.Database() s.NoError(err) w := dataset.NewDataset(db, "w") w, err = addCommit(w, "1") s.NoError(err) w, err = addCommit(w, "2") s.NoError(err) x := dataset.NewDataset(db, "x") x, err = addBranchedDataset(x, w, "20-x") s.NoError(err) y := dataset.NewDataset(db, "y") y, err = addBranchedDataset(y, w, "200-y") s.NoError(err) z := dataset.NewDataset(db, "z") z, err = addBranchedDataset(z, w, "2000-z") s.NoError(err) w, err = mergeDatasets(w, x, "22-wx") s.NoError(err) w, err = mergeDatasets(w, y, "222-wy") s.NoError(err) _, err = mergeDatasets(w, z, "2222-wz") s.NoError(err) db.Close() s.Equal(graphRes3, s.Run(main, []string{"-graph", "-show-value=true", test_util.CreateValueSpecString("ldb", s.LdbDir, "w")})) s.Equal(diffRes3, s.Run(main, []string{"-graph", "-show-value=false", test_util.CreateValueSpecString("ldb", s.LdbDir, "w")})) }
func (s *nomsShowTestSuite) TestNomsLog() { datasetName := "dsTest" str := test_util.CreateValueSpecString("ldb", s.LdbDir, datasetName) sp, err := spec.ParseDatasetSpec(str) s.NoError(err) ds, err := sp.Dataset() s.NoError(err) ds.Database().Close() s.Panics(func() { s.Run(main, []string{str}) }) testCommitInResults(s, str, 1) testCommitInResults(s, str, 2) }
func (s *testSuite) TestCSVImporter() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("a,b\n") d.Chk.NoError(err) for i := 0; i < 100; i++ { _, err = input.WriteString(fmt.Sprintf("a%d,%d\n", i, i)) d.Chk.NoError(err) } _, err = input.Seek(0, 0) d.Chk.NoError(err) setName := "csv" dataspec := test_util.CreateValueSpecString("ldb", s.LdbDir, setName) out := s.Run(main, []string{"-no-progress", "-column-types", "String,Number", dataspec, input.Name()}) s.Equal("", out) cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) defer ds.Database().Close() defer os.RemoveAll(s.LdbDir) l := ds.HeadValue().(types.List) s.Equal(uint64(100), l.Len()) i := uint64(0) l.IterAll(func(v types.Value, j uint64) { s.Equal(i, j) st := v.(types.Struct) s.Equal(types.String(fmt.Sprintf("a%d", i)), st.Get("a")) s.Equal(types.Number(i), st.Get("b")) i++ }) }