func (s *nomsDiffTestSuite) TestNomsDiffSummarize() { datasetName := "diffSummarizeTest" str := spec.CreateValueSpecString("ldb", s.LdbDir, datasetName) ds, err := spec.GetDataset(str) s.NoError(err) defer ds.Database().Close() ds, err = addCommit(ds, "first commit") s.NoError(err) r1 := spec.CreateHashSpecString("ldb", s.LdbDir, ds.HeadRef().TargetHash()) ds, err = addCommit(ds, "second commit") s.NoError(err) r2 := spec.CreateHashSpecString("ldb", s.LdbDir, ds.HeadRef().TargetHash()) out, _ := s.Run(main, []string{"diff", "--summarize", r1, r2}) s.Contains(out, "Comparing commit values") s.Contains(out, "1 insertion (100.00%), 1 deletion (100.00%), 0 changes (0.00%), (1 value vs 1 value)") out, _ = s.Run(main, []string{"diff", "--summarize", r1 + ".value", r2 + ".value"}) s.NotContains(out, "Comparing commit values") ds, err = ds.CommitValue(types.NewList(types.Number(1), types.Number(2), types.Number(3), types.Number(4))) s.NoError(err) r3 := spec.CreateHashSpecString("ldb", s.LdbDir, ds.HeadRef().TargetHash()) + ".value" ds, err = ds.CommitValue(types.NewList(types.Number(1), types.Number(222), types.Number(4))) s.NoError(err) r4 := spec.CreateHashSpecString("ldb", s.LdbDir, ds.HeadRef().TargetHash()) + ".value" out, _ = s.Run(main, []string{"diff", "--summarize", r3, r4}) s.Contains(out, "1 insertion (25.00%), 2 deletions (50.00%), 0 changes (0.00%), (4 values vs 3 values)") }
func (suite *LibTestSuite) TestCompositeTypes() { // [false true] suite.EqualValues( types.NewList().Append(types.Bool(false)).Append(types.Bool(true)), NomsValueFromDecodedJSON([]interface{}{false, true}, false)) // [[false true]] suite.EqualValues( types.NewList().Append( types.NewList().Append(types.Bool(false)).Append(types.Bool(true))), NomsValueFromDecodedJSON([]interface{}{[]interface{}{false, true}}, false)) // {"string": "string", // "list": [false true], // "map": {"nested": "string"} // } m := types.NewMap( types.String("string"), types.String("string"), types.String("list"), types.NewList().Append(types.Bool(false)).Append(types.Bool(true)), types.String("map"), types.NewMap( types.String("nested"), types.String("string"))) o := NomsValueFromDecodedJSON(map[string]interface{}{ "string": "string", "list": []interface{}{false, true}, "map": map[string]interface{}{"nested": "string"}, }, false) suite.True(m.Equals(o)) }
func (suite *WalkTestSuite) SetupTest() { suite.vs = types.NewTestValueStore() suite.shouldSeeItem = types.String("zzz") suite.shouldSee = types.NewList(suite.shouldSeeItem) suite.deadValue = types.Number(0xDEADBEEF) suite.mustSkip = types.NewList(suite.deadValue) }
func (suite *WalkTestSuite) TestStopWalkImmediately() { actual := 0 SomeP(types.NewList(types.NewSet(), types.NewList()), suite.vs, func(v types.Value, r *types.Ref) bool { actual++ return true }, 1) suite.Equal(1, actual) }
func (suite *WalkAllTestSuite) TestWalkComposites() { suite.walkWorker(suite.storeAndRef(types.NewList()), 2) suite.walkWorker(suite.storeAndRef(types.NewList(types.Bool(false), types.Number(8))), 4) suite.walkWorker(suite.storeAndRef(types.NewSet()), 2) suite.walkWorker(suite.storeAndRef(types.NewSet(types.Bool(false), types.Number(8))), 4) suite.walkWorker(suite.storeAndRef(types.NewMap()), 2) suite.walkWorker(suite.storeAndRef(types.NewMap(types.Number(8), types.Bool(true), types.Number(0), types.Bool(false))), 6) }
func buildListOfHeight(height int, vw types.ValueWriter) types.List { unique := 0 l := types.NewList(types.Number(unique), types.Number(unique+1)) unique += 2 for i := 0; i < height; i++ { r1, r2 := vw.WriteValue(types.Number(unique)), vw.WriteValue(l) unique++ l = types.NewList(r1, r2) } return l }
func (s *nomsShowTestSuite) TestNomsShow() { datasetName := "dsTest" str := spec.CreateValueSpecString("ldb", s.LdbDir, datasetName) s1 := types.String("test string") r := writeTestData(str, s1) res, _ := s.Run(main, []string{"show", str}) s.Equal(res1, res) str1 := spec.CreateValueSpecString("ldb", s.LdbDir, "#"+r.TargetHash().String()) res, _ = s.Run(main, []string{"show", str1}) s.Equal(res2, res) list := types.NewList(types.String("elem1"), types.Number(2), types.String("elem3")) r = writeTestData(str, list) res, _ = s.Run(main, []string{"show", str}) test.EqualsIgnoreHashes(s.T(), res3, res) str1 = spec.CreateValueSpecString("ldb", s.LdbDir, "#"+r.TargetHash().String()) res, _ = s.Run(main, []string{"show", str1}) s.Equal(res4, res) _ = writeTestData(str, s1) res, _ = s.Run(main, []string{"show", str}) test.EqualsIgnoreHashes(s.T(), res5, res) }
func (s *nomsShowTestSuite) TestNomsShow() { datasetName := "dsTest" str := test_util.CreateValueSpecString("ldb", s.LdbDir, datasetName) sp, err := spec.ParseDatasetSpec(str) d.Chk.NoError(err) ds, err := sp.Dataset() d.Chk.NoError(err) s1 := types.String("test string") r := writeTestData(ds, s1) s.Equal(res1, s.Run(main, []string{str})) spec1 := test_util.CreateValueSpecString("ldb", s.LdbDir, r.TargetHash().String()) s.Equal(res2, s.Run(main, []string{spec1})) ds, err = sp.Dataset() list := types.NewList(types.String("elem1"), types.Number(2), types.String("elem3")) r = writeTestData(ds, list) s.Equal(res3, s.Run(main, []string{str})) spec1 = test_util.CreateValueSpecString("ldb", s.LdbDir, r.TargetHash().String()) s.Equal(res4, s.Run(main, []string{spec1})) ds, err = sp.Dataset() _ = writeTestData(ds, s1) s.Equal(res5, s.Run(main, []string{str})) }
func TestHandleWriteValueBackpressure(t *testing.T) { assert := assert.New(t) cs := &backpressureCS{ChunkStore: chunks.NewMemoryStore()} ds := NewDatabase(cs) l := types.NewList( ds.WriteValue(types.Bool(true)), ds.WriteValue(types.Bool(false)), ) ds.WriteValue(l) hint := l.Hash() newItem := types.NewEmptyBlob() itemChunk := types.EncodeValue(newItem, nil) l2 := l.Insert(1, types.NewRef(newItem)) listChunk := types.EncodeValue(l2, nil) body := &bytes.Buffer{} serializeHints(body, map[hash.Hash]struct{}{hint: struct{}{}}) sz := chunks.NewSerializer(body) sz.Put(itemChunk) sz.Put(listChunk) sz.Close() w := httptest.NewRecorder() HandleWriteValue(w, &http.Request{Body: ioutil.NopCloser(body), Method: "POST"}, params{}, cs) if assert.Equal(httpStatusTooManyRequests, w.Code, "Handler error:\n%s", string(w.Body.Bytes())) { hashes := deserializeHashes(w.Body) assert.Len(hashes, 1) assert.Equal(l2.Hash(), hashes[0]) } }
func TestHandleWriteValue(t *testing.T) { assert := assert.New(t) cs := chunks.NewTestStore() ds := NewDatabase(cs) l := types.NewList( ds.WriteValue(types.Bool(true)), ds.WriteValue(types.Bool(false)), ) ds.WriteValue(l) hint := l.Hash() newItem := types.NewEmptyBlob() itemChunk := types.EncodeValue(newItem, nil) l2 := l.Insert(1, types.NewRef(newItem)) listChunk := types.EncodeValue(l2, nil) body := &bytes.Buffer{} serializeHints(body, map[hash.Hash]struct{}{hint: struct{}{}}) sz := chunks.NewSerializer(body) sz.Put(itemChunk) sz.Put(listChunk) sz.Close() w := httptest.NewRecorder() HandleWriteValue(w, &http.Request{Body: ioutil.NopCloser(body), Method: "POST"}, params{}, cs) if assert.Equal(http.StatusCreated, w.Code, "Handler error:\n%s", string(w.Body.Bytes())) { ds2 := NewDatabase(cs) v := ds2.ReadValue(l2.Hash()) if assert.NotNil(v) { assert.True(v.Equals(l2), "%+v != %+v", v, l2) } } }
func (s *nomsShowTestSuite) TestTruncation() { toNomsList := func(l []string) types.List { nv := []types.Value{} for _, v := range l { nv = append(nv, types.String(v)) } return types.NewList(nv...) } str := test_util.CreateDatabaseSpecString("ldb", s.LdbDir) dbSpec, err := spec.ParseDatabaseSpec(str) s.NoError(err) db, err := dbSpec.Database() s.NoError(err) t := dataset.NewDataset(db, "truncate") t, err = addCommit(t, "the first line") s.NoError(err) l := []string{"one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "eleven"} _, err = addCommitWithValue(t, toNomsList(l)) s.NoError(err) db.Close() dsSpec := test_util.CreateValueSpecString("ldb", s.LdbDir, "truncate") s.Equal(truncRes1, s.Run(main, []string{"-graph", "-show-value=true", dsSpec})) s.Equal(diffTrunc1, s.Run(main, []string{"-graph", "-show-value=false", dsSpec})) s.Equal(truncRes2, s.Run(main, []string{"-graph", "-show-value=true", "-max-lines=-1", dsSpec})) s.Equal(diffTrunc2, s.Run(main, []string{"-graph", "-show-value=false", "-max-lines=-1", dsSpec})) s.Equal(truncRes3, s.Run(main, []string{"-graph", "-show-value=true", "-max-lines=0", dsSpec})) s.Equal(diffTrunc3, s.Run(main, []string{"-graph", "-show-value=false", "-max-lines=0", dsSpec})) }
func TestWriteValue(t *testing.T) { assert := assert.New(t) factory := chunks.NewMemoryStoreFactory() defer factory.Shutter() router = setupWebServer(factory) defer func() { router = nil }() testString := "Now, what?" authKey = "anauthkeyvalue" w := httptest.NewRecorder() r, err := newRequest("GET", dbName+constants.RootPath, nil) assert.NoError(err) router.ServeHTTP(w, r) lastRoot := w.Body assert.Equal(http.StatusOK, w.Code) tval := types.Bool(true) wval := types.String(testString) chunk1 := types.EncodeValue(tval, nil) chunk2 := types.EncodeValue(wval, nil) refList := types.NewList(types.NewRef(tval), types.NewRef(wval)) chunk3 := types.EncodeValue(refList, nil) body := &bytes.Buffer{} // we would use this func, but it's private so use next line instead: serializeHints(body, map[ref.Ref]struct{}{hint: struct{}{}}) err = binary.Write(body, binary.BigEndian, uint32(0)) assert.NoError(err) chunks.Serialize(chunk1, body) chunks.Serialize(chunk2, body) chunks.Serialize(chunk3, body) w = httptest.NewRecorder() r, err = newRequest("POST", dbName+constants.WriteValuePath+"?access_token="+authKey, ioutil.NopCloser(body)) assert.NoError(err) router.ServeHTTP(w, r) assert.Equal(http.StatusCreated, w.Code) w = httptest.NewRecorder() args := fmt.Sprintf("&last=%s¤t=%s", lastRoot, types.NewRef(refList).TargetHash()) r, _ = newRequest("POST", dbName+constants.RootPath+"?access_token="+authKey+args, ioutil.NopCloser(body)) router.ServeHTTP(w, r) assert.Equal(http.StatusOK, w.Code) whash := wval.Hash() hints := map[hash.Hash]struct{}{whash: struct{}{}} rdr := buildGetRefsRequestBody(hints) r, _ = newRequest("POST", dbName+constants.GetRefsPath, rdr) r.Header.Add("Content-Type", "application/x-www-form-urlencoded") router.ServeHTTP(w, r) assert.Equal(http.StatusOK, w.Code) ms := chunks.NewMemoryStore() chunks.Deserialize(w.Body, ms, nil) v := types.DecodeValue(ms.Get(whash), datas.NewDatabase(ms)) assert.Equal(testString, string(v.(types.String))) }
// Skipping a sub-tree must allow other items in the list to be processed. func (suite *WalkTestSuite) SkipTestSkipListElement() { wholeList := types.NewList(suite.mustSkip, suite.shouldSee, suite.shouldSee) reached := suite.skipWorker(wholeList) for _, v := range []types.Value{wholeList, suite.mustSkip, suite.shouldSee, suite.shouldSeeItem} { suite.Contains(reached, v, "Doesn't contain %+v", v) } suite.Len(reached, 6) }
func buildListIncrementally(count uint64, createFn createValueFn) types.Collection { l := types.NewList() for i := uint64(0); i < count; i++ { l = l.Insert(i, createFn(i)) } return l }
func buildList(count uint64, createFn createValueFn) types.Collection { values := make([]types.Value, count) for i := uint64(0); i < count; i++ { values[i] = createFn(i) } return types.NewList(values...) }
func TestPullDeepRefTopDown(t *testing.T) { assert := assert.New(t) sink := createTestDataset("sink") source := createTestDataset("source") sourceInitialValue := types.NewList( types.NewList(NewList(source)), types.NewSet(NewSet(source)), types.NewMap(NewMap(source), NewMap(source))) source, err := source.Commit(sourceInitialValue) assert.NoError(err) sink, err = sink.pull(source.Database(), types.NewRef(source.Head()), 1) assert.NoError(err) assert.True(source.Head().Equals(sink.Head())) }
// FIXME: run with pipe func (s *testSuite) TestCSVExporter() { setName := "csv" header := []string{"a", "b", "c"} payload := [][]string{ []string{"5", "7", "100"}, []string{"4", "10", "255"}, []string{"512", "12", "55"}, } structName := "SomeStruct" // Setup data store cs := chunks.NewLevelDBStore(s.LdbDir, "", 1, false) ds := dataset.NewDataset(datas.NewDatabase(cs), setName) // Build Struct fields based on header f := make(types.TypeMap, len(header)) for _, key := range header { f[key] = types.StringType } typ := types.MakeStructType(structName, f) // Build data rows structs := make([]types.Value, len(payload)) for i, row := range payload { fields := make(map[string]types.Value) for j, v := range row { name := header[j] fields[name] = types.String(v) } structs[i] = types.NewStructWithType(typ, fields) } ds.Commit(types.NewList(structs...)) ds.Database().Close() // Run exporter dataspec := test_util.CreateValueSpecString("ldb", s.LdbDir, setName) out := s.Run(main, []string{dataspec}) // Verify output csvReader := csv.NewReader(strings.NewReader(out)) row, err := csvReader.Read() d.Chk.NoError(err) s.Equal(header, row) for i := 0; i < len(payload); i++ { row, err := csvReader.Read() d.Chk.NoError(err) s.Equal(payload[i], row) } _, err = csvReader.Read() s.Equal(io.EOF, err) }
func (suite *HTTPBatchStoreSuite) TestPutChunksInOrder() { vals := []types.Value{ types.String("abc"), types.String("def"), } l := types.NewList() for _, val := range vals { suite.store.SchedulePut(types.EncodeValue(val, nil), 1, types.Hints{}) l = l.Append(types.NewRef(val)) } suite.store.SchedulePut(types.EncodeValue(l, nil), 2, types.Hints{}) suite.store.Flush() suite.Equal(3, suite.cs.Writes) }
// NomsValueFromDecodedJSON takes a generic Go interface{} and recursively // tries to resolve the types within so that it can build up and return // a Noms Value with the same structure. // // Currently, the only types supported are the Go versions of legal JSON types: // Primitives: // - float64 // - bool // - string // - nil // // Composites: // - []interface{} // - map[string]interface{} func NomsValueFromDecodedJSON(o interface{}, useStruct bool) types.Value { switch o := o.(type) { case string: return types.String(o) case bool: return types.Bool(o) case float64: return types.Number(o) case nil: return nil case []interface{}: items := make([]types.Value, 0, len(o)) for _, v := range o { nv := NomsValueFromDecodedJSON(v, useStruct) if nv != nil { items = append(items, nv) } } return types.NewList(items...) case map[string]interface{}: var v types.Value if useStruct { fields := make(map[string]types.Value, len(o)) for k, v := range o { nv := NomsValueFromDecodedJSON(v, useStruct) if nv != nil { k := types.EscapeStructField(k) fields[k] = nv } } v = types.NewStruct("", fields) } else { kv := make([]types.Value, 0, len(o)*2) for k, v := range o { nv := NomsValueFromDecodedJSON(v, useStruct) if nv != nil { kv = append(kv, types.String(k), nv) } } v = types.NewMap(kv...) } return v default: d.Chk.Fail("Nomsification failed.", "I don't understand %+v, which is of type %s!\n", o, reflect.TypeOf(o).String()) } return nil }
func TestAbsolutePaths(t *testing.T) { assert := assert.New(t) s0, s1 := types.String("foo"), types.String("bar") list := types.NewList(s0, s1) emptySet := types.NewSet() db := datas.NewDatabase(chunks.NewMemoryStore()) db.WriteValue(s0) db.WriteValue(s1) db.WriteValue(list) db.WriteValue(emptySet) var err error db, err = db.Commit("ds", datas.NewCommit(list, types.NewSet(), types.EmptyStruct)) assert.NoError(err) head := db.Head("ds") resolvesTo := func(exp types.Value, str string) { p, err := NewAbsolutePath(str) assert.NoError(err) act := p.Resolve(db) if exp == nil { assert.Nil(act) } else { assert.True(exp.Equals(act), "%s Expected %s Actual %s", str, types.EncodedValue(exp), types.EncodedValue(act)) } } resolvesTo(head, "ds") resolvesTo(emptySet, "ds.parents") resolvesTo(list, "ds.value") resolvesTo(s0, "ds.value[0]") resolvesTo(s1, "ds.value[1]") resolvesTo(head, "#"+head.Hash().String()) resolvesTo(list, "#"+list.Hash().String()) resolvesTo(s0, "#"+s0.Hash().String()) resolvesTo(s1, "#"+s1.Hash().String()) resolvesTo(s0, "#"+list.Hash().String()+"[0]") resolvesTo(s1, "#"+list.Hash().String()+"[1]") resolvesTo(nil, "foo") resolvesTo(nil, "foo.parents") resolvesTo(nil, "foo.value") resolvesTo(nil, "foo.value[0]") resolvesTo(nil, "#"+types.String("baz").Hash().String()) resolvesTo(nil, "#"+types.String("baz").Hash().String()+"[0]") }
func (suite *HTTPBatchStoreSuite) TestPutChunkWithHints() { vals := []types.Value{ types.String("abc"), types.String("def"), } chnx := []chunks.Chunk{ types.EncodeValue(vals[0], nil), types.EncodeValue(vals[1], nil), } suite.NoError(suite.cs.PutMany(chnx)) l := types.NewList(types.NewRef(vals[0]), types.NewRef(vals[1])) suite.store.SchedulePut(types.EncodeValue(l, nil), 2, types.Hints{ chnx[0].Hash(): struct{}{}, chnx[1].Hash(): struct{}{}, }) suite.store.Flush() suite.Equal(3, suite.cs.Writes) }
func (suite *LibTestSuite) TestCompositeTypeWithStruct() { // {"string": "string", // "list": [false true], // "struct": {"nested": "string"} // } tstruct := types.NewStruct("", types.StructData{ "string": types.String("string"), "list": types.NewList().Append(types.Bool(false)).Append(types.Bool(true)), "struct": types.NewStruct("", types.StructData{ "nested": types.String("string"), }), }) o := NomsValueFromDecodedJSON(map[string]interface{}{ "string": "string", "list": []interface{}{false, true}, "struct": map[string]interface{}{"nested": "string"}, }, true) suite.True(tstruct.Equals(o)) }
func (suite *HTTPBatchStoreSuite) TestPutChunksBackpressure() { bpcs := &backpressureCS{ChunkStore: suite.cs} bs := newHTTPBatchStoreForTest(bpcs) defer bs.Close() defer bpcs.Close() vals := []types.Value{ types.String("abc"), types.String("def"), } l := types.NewList() for _, v := range vals { bs.SchedulePut(types.EncodeValue(v, nil), 1, types.Hints{}) l = l.Append(types.NewRef(v)) } bs.SchedulePut(types.EncodeValue(l, nil), 2, types.Hints{}) bs.Flush() suite.Equal(6, suite.cs.Writes) }
func (suite *WalkAllTestSuite) TestWalkNestedComposites() { cs := chunks.NewMemoryStore() suite.walkWorker(suite.storeAndRef(types.NewList(suite.NewSet(cs), types.Number(8))), 5) suite.walkWorker(suite.storeAndRef(types.NewSet(suite.NewList(cs), suite.NewSet(cs))), 6) // {"string": "string", // "list": [false true], // "map": {"nested": "string"} // "mtlist": [] // "set": [5 7 8] // []: "wow" // } nested := types.NewMap( types.String("string"), types.String("string"), types.String("list"), suite.NewList(cs, types.Bool(false), types.Bool(true)), types.String("map"), suite.NewMap(cs, types.String("nested"), types.String("string")), types.String("mtlist"), suite.NewList(cs), types.String("set"), suite.NewSet(cs, types.Number(5), types.Number(7), types.Number(8)), suite.NewList(cs), types.String("wow"), // note that the dupe list chunk is skipped ) suite.walkWorker(suite.storeAndRef(nested), 25) }
func (s *nomsLogTestSuite) TestTruncation() { toNomsList := func(l []string) types.List { nv := []types.Value{} for _, v := range l { nv = append(nv, types.String(v)) } return types.NewList(nv...) } str := spec.CreateDatabaseSpecString("ldb", s.LdbDir) db, err := spec.GetDatabase(str) s.NoError(err) t := dataset.NewDataset(db, "truncate") t, err = addCommit(t, "the first line") s.NoError(err) l := []string{"one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "eleven"} _, err = addCommitWithValue(t, toNomsList(l)) s.NoError(err) db.Close() dsSpec := spec.CreateValueSpecString("ldb", s.LdbDir, "truncate") res, _ := s.Run(main, []string{"log", "--graph", "--show-value=true", dsSpec}) test.EqualsIgnoreHashes(s.T(), truncRes1, res) res, _ = s.Run(main, []string{"log", "--graph", "--show-value=false", dsSpec}) test.EqualsIgnoreHashes(s.T(), diffTrunc1, res) res, _ = s.Run(main, []string{"log", "--graph", "--show-value=true", "--max-lines=-1", dsSpec}) test.EqualsIgnoreHashes(s.T(), truncRes2, res) res, _ = s.Run(main, []string{"log", "--graph", "--show-value=false", "--max-lines=-1", dsSpec}) test.EqualsIgnoreHashes(s.T(), diffTrunc2, res) res, _ = s.Run(main, []string{"log", "--graph", "--show-value=true", "--max-lines=0", dsSpec}) test.EqualsIgnoreHashes(s.T(), truncRes3, res) res, _ = s.Run(main, []string{"log", "--graph", "--show-value=false", "--max-lines=0", dsSpec}) test.EqualsIgnoreHashes(s.T(), diffTrunc3, res) }
func (suite *DatabaseSuite) TestDatabaseHeightOfCollections() { setOfStringType := types.MakeSetType(types.StringType) setOfRefOfStringType := types.MakeSetType(types.MakeRefType(types.StringType)) // Set<String> v1 := types.String("hello") v2 := types.String("world") s1 := types.NewSet(v1, v2) suite.Equal(uint64(1), suite.ds.WriteValue(s1).Height()) // Set<Ref<String>> s2 := types.NewSet(suite.ds.WriteValue(v1), suite.ds.WriteValue(v2)) suite.Equal(uint64(2), suite.ds.WriteValue(s2).Height()) // List<Set<String>> v3 := types.String("foo") v4 := types.String("bar") s3 := types.NewSet(v3, v4) l1 := types.NewList(s1, s3) suite.Equal(uint64(1), suite.ds.WriteValue(l1).Height()) // List<Ref<Set<String>> l2 := types.NewList(suite.ds.WriteValue(s1), suite.ds.WriteValue(s3)) suite.Equal(uint64(2), suite.ds.WriteValue(l2).Height()) // List<Ref<Set<Ref<String>>> s4 := types.NewSet(suite.ds.WriteValue(v3), suite.ds.WriteValue(v4)) l3 := types.NewList(suite.ds.WriteValue(s4)) suite.Equal(uint64(3), suite.ds.WriteValue(l3).Height()) // List<Set<String> | RefValue<Set<String>>> l4 := types.NewList(s1, suite.ds.WriteValue(s3)) suite.Equal(uint64(2), suite.ds.WriteValue(l4).Height()) l5 := types.NewList(suite.ds.WriteValue(s1), s3) suite.Equal(uint64(2), suite.ds.WriteValue(l5).Height()) // Familiar with the "New Jersey Turnpike" drink? Here's the noms version of that... everything := []types.Value{v1, v2, s1, s2, v3, v4, s3, l1, l2, s4, l3, l4, l5} andMore := make([]types.Value, 0, len(everything)*3+2) for _, v := range everything { andMore = append(andMore, v, v.Type(), suite.ds.WriteValue(v)) } andMore = append(andMore, setOfStringType, setOfRefOfStringType) suite.ds.WriteValue(types.NewList(andMore...)) }
func createList(kv ...interface{}) types.List { keyValues := valsToTypesValues(kv...) return types.NewList(keyValues...) }
func NewList(ds Dataset, vs ...types.Value) types.Ref { v := types.NewList(vs...) return ds.Database().WriteValue(v) }
func (suite *WalkAllTestSuite) NewList(cs chunks.ChunkStore, vs ...types.Value) types.Ref { v := types.NewList(vs...) return suite.vs.WriteValue(v) }
func main() { err := d.Try(func() { spec.RegisterDatabaseFlags(flag.CommandLine) profile.RegisterProfileFlags(flag.CommandLine) flag.Usage = customUsage flag.Parse(true) if flag.NArg() != 2 { d.CheckError(errors.New("Expected directory path followed by dataset")) } dir := flag.Arg(0) ds, err := spec.GetDataset(flag.Arg(1)) d.CheckError(err) defer profile.MaybeStartProfile().Stop() cpuCount := runtime.NumCPU() filesChan := make(chan fileIndex, 1024) refsChan := make(chan refIndex, 1024) getFilePaths := func() { index := 0 err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { d.PanicIfTrue(err != nil, "Cannot traverse directories") if !info.IsDir() && filepath.Ext(path) == ".xml" { filesChan <- fileIndex{path, index} index++ } return nil }) d.PanicIfError(err) close(filesChan) } wg := sync.WaitGroup{} importXML := func() { expectedType := types.NewMap() for f := range filesChan { file, err := os.Open(f.path) d.PanicIfTrue(err != nil, "Error getting XML") xmlObject, err := mxj.NewMapXmlReader(file) d.PanicIfTrue(err != nil, "Error decoding XML") object := xmlObject.Old() file.Close() nomsObj := jsontonoms.NomsValueFromDecodedJSON(object, false) d.Chk.IsType(expectedType, nomsObj) var r types.Ref if !*noIO { r = ds.Database().WriteValue(nomsObj) } refsChan <- refIndex{r, f.index} } wg.Done() } go getFilePaths() for i := 0; i < cpuCount*8; i++ { wg.Add(1) go importXML() } go func() { wg.Wait() close(refsChan) // done converting xml to noms }() refList := refIndexList{} for r := range refsChan { refList = append(refList, r) } sort.Sort(refList) refs := make([]types.Value, len(refList)) for idx, r := range refList { refs[idx] = r.ref } rl := types.NewList(refs...) if !*noIO { _, err := ds.CommitValue(rl) d.PanicIfError(err) } }) if err != nil { log.Fatal(err) } }