func assertOutput(inPath, lang, goldenPath string, t *testing.T) { assert := assert.New(t) emptyDS := datas.NewDataStore(chunks.NewMemoryStore()) // Will be DataStore containing imports depsDir, err := ioutil.TempDir("", "") assert.NoError(err) defer os.RemoveAll(depsDir) inFile, err := os.Open(inPath) assert.NoError(err) defer inFile.Close() goldenFile, err := os.Open(goldenPath) assert.NoError(err) defer goldenFile.Close() goldenBytes, err := ioutil.ReadAll(goldenFile) d.Chk.NoError(err) var buf bytes.Buffer pkg := pkg.ParseNomDL("gen", inFile, filepath.Dir(inPath), emptyDS) written := map[string]bool{} gen := newCodeGen(&buf, getBareFileName(inPath), lang, written, depsMap{}, pkg) gen.WritePackage() bs := buf.Bytes() if lang == "go" { bs, err = imports.Process("", bs, nil) d.Chk.NoError(err) } assert.Equal(string(goldenBytes), string(bs), "%s did not generate the same string", inPath) }
func TestRead(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) dataString := `a,1,true b,2,false ` r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B", "C"} kinds := KindSlice{types.StringKind, types.Int8Kind, types.BoolKind} l, typeRef, typeDef := Read(r, "test", headers, kinds, ds) assert.Equal(uint64(2), l.Len()) assert.True(typeRef.IsUnresolved()) desc, ok := typeDef.Desc.(types.StructDesc) assert.True(ok) assert.Len(desc.Fields, 3) assert.Equal("A", desc.Fields[0].Name) assert.Equal("B", desc.Fields[1].Name) assert.Equal("C", desc.Fields[2].Name) assert.True(l.Get(0).(types.Struct).Get("A").Equals(types.NewString("a"))) assert.True(l.Get(1).(types.Struct).Get("A").Equals(types.NewString("b"))) assert.True(l.Get(0).(types.Struct).Get("B").Equals(types.Int8(1))) assert.True(l.Get(1).(types.Struct).Get("B").Equals(types.Int8(2))) assert.True(l.Get(0).(types.Struct).Get("C").Equals(types.Bool(true))) assert.True(l.Get(1).(types.Struct).Get("C").Equals(types.Bool(false))) }
func TestDataStoreDelete(t *testing.T) { assert := assert.New(t) cs := chunks.NewMemoryStore() ds := NewDataStore(cs) datasetID1, datasetID2 := "ds1", "ds2" datasets := ds.Datasets() assert.Zero(datasets.Len()) // |a| a := types.NewString("a") ds, err := ds.Commit(datasetID1, NewCommit().SetValue(a)) assert.NoError(err) assert.True(ds.Head(datasetID1).Value().Equals(a)) // ds1; |a|, ds2: |b| b := types.NewString("b") ds, err = ds.Commit(datasetID2, NewCommit().SetValue(b)) assert.NoError(err) assert.True(ds.Head(datasetID2).Value().Equals(b)) ds, err = ds.Delete(datasetID1) assert.NoError(err) assert.True(ds.Head(datasetID2).Value().Equals(b)) h, present := ds.MaybeHead(datasetID1) assert.False(present, "Dataset %s should not be present, but head is %v", datasetID1, h.Value()) // Get a fresh datastore, and verify that only ds1 is present newDs := NewDataStore(cs) datasets = newDs.Datasets() assert.Equal(uint64(1), datasets.Len()) _, present = ds.MaybeHead(datasetID2) assert.True(present, "Dataset %s should be present", datasetID2) }
func TestDataStoreDeleteConcurrent(t *testing.T) { assert := assert.New(t) cs := chunks.NewMemoryStore() ds := NewDataStore(cs) datasetID := "ds1" datasets := ds.Datasets() assert.Zero(datasets.Len()) // |a| a := types.NewString("a") aCommit := NewCommit().SetValue(a) ds, err := ds.Commit(datasetID, aCommit) assert.NoError(err) // |a| <- |b| b := types.NewString("b") bCommit := NewCommit().SetValue(b).SetParents(NewSetOfRefOfCommit().Insert(NewRefOfCommit(aCommit.Ref()))) ds2, err := ds.Commit(datasetID, bCommit) assert.NoError(err) assert.True(ds.Head(datasetID).Value().Equals(a)) assert.True(ds2.Head(datasetID).Value().Equals(b)) ds, err = ds.Delete(datasetID) assert.NoError(err) h, present := ds.MaybeHead(datasetID) assert.False(present, "Dataset %s should not be present, but head is %v", datasetID, h.Value()) h, present = ds2.MaybeHead(datasetID) assert.True(present, "Dataset %s should be present", datasetID) // Get a fresh datastore, and verify that no datastores are present newDs := NewDataStore(cs) datasets = newDs.Datasets() assert.Equal(uint64(0), datasets.Len()) }
func TestEnumIsValue(t *testing.T) { ds := datas.NewDataStore(chunks.NewMemoryStore()) var v types.Value = gen.NewEnumStruct() ref := ds.WriteValue(v).TargetRef() v2 := ds.ReadValue(ref) assert.True(t, v.Equals(v2)) }
func TestCanUseDefFromImport(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) dir, err := ioutil.TempDir("", "") assert.NoError(err) defer os.RemoveAll(dir) byPathNomDL := filepath.Join(dir, "filedep.noms") err = ioutil.WriteFile(byPathNomDL, []byte("struct FromFile{i:Int8}"), 0600) assert.NoError(err) r1 := strings.NewReader(` struct A { B: B } struct B { X: Int64 }`) pkg1 := pkg.ParseNomDL("test1", r1, dir, ds) pkgRef1 := ds.WriteValue(pkg1.Package).TargetRef() r2 := strings.NewReader(fmt.Sprintf(` alias Other = import "%s" struct C { C: Map<Int64, Other.A> } `, pkgRef1)) pkg2 := pkg.ParseNomDL("test2", r2, dir, ds) gen2 := newCodeGen(nil, "test2", "go", map[string]bool{}, depsMap{pkg1.Ref(): pkg1.Package}, pkg2) assert.True(gen2.canUseDef(pkg2.Types()[0], gen2.pkg.Package)) }
func TestGenerateDeps(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) dir, err := ioutil.TempDir("", "codegen_test_") assert.NoError(err) defer os.RemoveAll(dir) leaf1 := types.NewPackage([]types.Type{types.MakeEnumType("e1", "a", "b")}, []ref.Ref{}) leaf1Ref := ds.WriteValue(leaf1).TargetRef() leaf2 := types.NewPackage([]types.Type{types.MakePrimitiveType(types.BoolKind)}, []ref.Ref{}) leaf2Ref := ds.WriteValue(leaf2).TargetRef() depender := types.NewPackage([]types.Type{}, []ref.Ref{leaf1Ref}) dependerRef := ds.WriteValue(depender).TargetRef() top := types.NewPackage([]types.Type{}, []ref.Ref{leaf2Ref, dependerRef}) types.RegisterPackage(&top) localPkgs := refSet{top.Ref(): true} generateDepCode(filepath.Base(dir), dir, map[string]bool{}, top, localPkgs, ds) leaf1Path := filepath.Join(dir, code.ToTag(leaf1.Ref())+".go") leaf2Path := filepath.Join(dir, code.ToTag(leaf2.Ref())+".go") leaf3Path := filepath.Join(dir, code.ToTag(depender.Ref())+".go") _, err = os.Stat(leaf1Path) assert.NoError(err) _, err = os.Stat(leaf2Path) assert.NoError(err) _, err = os.Stat(leaf3Path) assert.NoError(err) }
func TestTwoClientsWithEmptyDataset(t *testing.T) { assert := assert.New(t) id1 := "testdataset" cs := chunks.NewMemoryStore() dsx := newDS(id1, cs) dsy := newDS(id1, cs) // dsx: || -> |a| a := types.NewString("a") dsx, err := dsx.Commit(a) assert.NoError(err) assert.True(dsx.Head().Value().Equals(a)) // dsy: || -> |b| _, ok := dsy.MaybeHead() assert.False(ok) b := types.NewString("b") dsy, err = dsy.Commit(b) assert.Error(err) // Commit failed, but ds1 now has latest head, so we should be able to just try again. // dsy: |a| -> |b| dsy, err = dsy.Commit(b) assert.NoError(err) assert.True(dsy.Head().Value().Equals(b)) }
func TestExplicitBranchUsingDatasets(t *testing.T) { assert := assert.New(t) id1 := "testdataset" id2 := "othertestdataset" cs := chunks.NewMemoryStore() ds1 := newDS(id1, cs) // ds1: |a| a := types.NewString("a") ds1, err := ds1.Commit(a) assert.NoError(err) assert.True(ds1.Head().Value().Equals(a)) // ds1: |a| // \ds2 ds2 := newDS(id2, cs) ds2, err = ds2.Commit(ds1.Head().Value()) assert.NoError(err) assert.True(ds2.Head().Value().Equals(a)) // ds1: |a| <- |b| b := types.NewString("b") ds1, err = ds1.Commit(b) assert.NoError(err) assert.True(ds1.Head().Value().Equals(b)) // ds1: |a| <- |b| // \ds2 <- |c| c := types.NewString("c") ds2, err = ds2.Commit(c) assert.NoError(err) assert.True(ds2.Head().Value().Equals(c)) // ds1: |a| <- |b| <--|d| // \ds2 <- |c| <--/ mergeParents := datas.NewSetOfRefOfCommit().Insert(datas.NewRefOfCommit(ds1.Head().Ref())).Insert(datas.NewRefOfCommit(ds2.Head().Ref())) d := types.NewString("d") ds2, err = ds2.CommitWithParents(d, mergeParents) assert.NoError(err) assert.True(ds2.Head().Value().Equals(d)) ds1, err = ds1.CommitWithParents(d, mergeParents) assert.NoError(err) assert.True(ds1.Head().Value().Equals(d)) }
func TestReadParseError(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) dataString := `a,"b` r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B"} kinds := KindSlice{types.StringKind, types.StringKind} func() { defer func() { r := recover() assert.NotNil(r) _, ok := r.(*csv.ParseError) assert.True(ok, "Should be a ParseError") }() Read(r, "test", headers, kinds, ds) }() }
func (suite *ImportTestSuite) SetupTest() { suite.vrw = datas.NewDataStore(chunks.NewMemoryStore()) ns := types.MakeStructType("NestedDepStruct", []types.Field{}, types.Choices{ types.Field{"b", types.MakePrimitiveType(types.BoolKind), false}, types.Field{"i", types.MakePrimitiveType(types.Int8Kind), false}, }) suite.nested = types.NewPackage([]types.Type{ns}, []ref.Ref{}) suite.nestedRef = suite.vrw.WriteValue(suite.nested).TargetRef() fs := types.MakeStructType("ForeignStruct", []types.Field{ types.Field{"b", types.MakeType(ref.Ref{}, 1), false}, types.Field{"n", types.MakeType(suite.nestedRef, 0), false}, }, types.Choices{}) fe := types.MakeEnumType("ForeignEnum", "uno", "dos") suite.imported = types.NewPackage([]types.Type{fs, fe}, []ref.Ref{suite.nestedRef}) suite.importRef = suite.vrw.WriteValue(suite.imported).TargetRef() }
func testTrailingHelper(t *testing.T, dataString string) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B"} kinds := KindSlice{types.StringKind, types.StringKind} l, typeRef, typeDef := Read(r, "test", headers, kinds, ds) assert.Equal(uint64(3), l.Len()) assert.True(typeRef.IsUnresolved()) desc, ok := typeDef.Desc.(types.StructDesc) assert.True(ok) assert.Len(desc.Fields, 2) assert.Equal("A", desc.Fields[0].Name) assert.Equal("B", desc.Fields[1].Name) }
func TestCommitNewPackages(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) pkgDS := dataset.NewDataset(ds, "packages") dir, err := ioutil.TempDir("", "") assert.NoError(err) defer os.RemoveAll(dir) inFile := filepath.Join(dir, "in.noms") err = ioutil.WriteFile(inFile, []byte("struct Simple{a:Bool}"), 0600) assert.NoError(err) p := parsePackageFile("name", inFile, pkgDS) localPkgs := refSet{p.Ref(): true} pkgDS = generate("name", inFile, filepath.Join(dir, "out.go"), dir, map[string]bool{}, p, localPkgs, pkgDS) s := pkgDS.Head().Value().(types.SetOfRefOfPackage) assert.EqualValues(1, s.Len()) tr := s.First().TargetValue(ds).Types()[0] assert.EqualValues(types.StructKind, tr.Kind()) }
func TestStructIsValue(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) var v types.Value = gen.StructWithListDef{ L: gen.ListOfUint8Def{0, 1, 2}, B: true, S: "world", I: 42, }.New() ref := ds.WriteValue(v).TargetRef() v2 := ds.ReadValue(ref) assert.True(v.Equals(v2)) s2 := v2.(gen.StructWithList) assert.True(s2.L().Equals(gen.NewListOfUint8().Append(0, 1, 2))) assert.True(s2.B()) assert.Equal("world", s2.S()) assert.Equal(int64(42), s2.I()) }
func (suite *WalkAllTestSuite) TestWalkNestedComposites() { cs := chunks.NewMemoryStore() suite.walkWorker(suite.storeAndRef(types.NewList(suite.NewSet(cs), types.Int32(8))), 5) suite.walkWorker(suite.storeAndRef(types.NewSet(suite.NewList(cs), suite.NewSet(cs))), 6) // {"string": "string", // "list": [false true], // "map": {"nested": "string"} // "mtlist": [] // "set": [5 7 8] // []: "wow" // } nested := types.NewMap( types.NewString("string"), types.NewString("string"), types.NewString("list"), suite.NewList(cs, types.Bool(false), types.Bool(true)), types.NewString("map"), suite.NewMap(cs, types.NewString("nested"), types.NewString("string")), types.NewString("mtlist"), suite.NewList(cs), types.NewString("set"), suite.NewSet(cs, types.Int32(5), types.Int32(7), types.Int32(8)), suite.NewList(cs), types.NewString("wow"), // note that the dupe list chunk is skipped ) suite.walkWorker(suite.storeAndRef(nested), 25) }
func TestDataStoreConcurrency(t *testing.T) { assert := assert.New(t) cs := chunks.NewMemoryStore() ds := NewDataStore(cs) datasetID := "ds1" // Setup: // |a| <- |b| a := types.NewString("a") aCommit := NewCommit().SetValue(a) ds, err := ds.Commit(datasetID, aCommit) b := types.NewString("b") bCommit := NewCommit().SetValue(b).SetParents(NewSetOfRefOfCommit().Insert(NewRefOfCommit(aCommit.Ref()))) ds, err = ds.Commit(datasetID, bCommit) assert.NoError(err) assert.True(ds.Head(datasetID).Value().Equals(b)) // Important to create this here. ds2 := NewDataStore(cs) // Change 1: // |a| <- |b| <- |c| c := types.NewString("c") cCommit := NewCommit().SetValue(c).SetParents(NewSetOfRefOfCommit().Insert(NewRefOfCommit(bCommit.Ref()))) ds, err = ds.Commit(datasetID, cCommit) assert.NoError(err) assert.True(ds.Head(datasetID).Value().Equals(c)) // Change 2: // |a| <- |b| <- |e| // Should be disallowed, DataStore returned by Commit() should have |c| as Head. e := types.NewString("e") eCommit := NewCommit().SetValue(e).SetParents(NewSetOfRefOfCommit().Insert(NewRefOfCommit(bCommit.Ref()))) ds2, err = ds2.Commit(datasetID, eCommit) assert.Error(err) assert.True(ds.Head(datasetID).Value().Equals(c)) }
func TestDatasetCommitTracker(t *testing.T) { assert := assert.New(t) id1 := "testdataset" id2 := "othertestdataset" cs := chunks.NewMemoryStore() ds1 := NewDataset(datas.NewDataStore(cs), id1) ds1Commit := types.NewString("Commit value for " + id1) ds1, err := ds1.Commit(ds1Commit) assert.NoError(err) ds2 := NewDataset(datas.NewDataStore(cs), id2) ds2Commit := types.NewString("Commit value for " + id2) ds2, err = ds2.Commit(ds2Commit) assert.NoError(err) assert.EqualValues(ds1Commit, ds1.Head().Value()) assert.EqualValues(ds2Commit, ds2.Head().Value()) assert.False(ds2.Head().Value().Equals(ds1Commit)) assert.False(ds1.Head().Value().Equals(ds2Commit)) assert.Equal("sha1-145d840b4bf5d48f64d0c6ca91a5bc5d7d4eef3e", cs.Root().String()) }
func TestStructWithRef(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) set := gen.SetOfFloat32Def{0: true, 1: true, 2: true}.New() ds.WriteValue(set) str := gen.StructWithRefDef{ R: set.Ref(), }.New() ds.WriteValue(str) r := str.R() r2 := gen.NewRefOfSetOfFloat32(set.Ref()) assert.True(r.Equals(r2)) assert.True(r2.TargetValue(ds).Equals(set)) set2 := r2.TargetValue(ds) assert.True(set.Equals(set2)) def := str.Def() assert.EqualValues(set.Ref(), def.R) }
func main() { flags := datas.NewFlags() flag.Parse() ds, ok := flags.CreateDataStore() if !ok { ds = datas.NewDataStore(chunks.NewMemoryStore()) } defer ds.Close() if *pkgDSFlag != "" { if !ok { log.Print("Package dataset provided, but DataStore could not be opened.") flag.Usage() return } } else { log.Print("No package dataset provided; will be unable to process imports.") *pkgDSFlag = "default" } if *outputLang != goExt && *outputLang != jsExt { log.Print("Invalid out-lang provided.") flag.Usage() return } pkgDS := dataset.NewDataset(ds, *pkgDSFlag) // Ensure that, if pkgDS has stuff in it, its head is a SetOfRefOfPackage. if h, ok := pkgDS.MaybeHead(); ok { d.Chk.IsType(types.SetOfRefOfPackage{}, h.Value()) } localPkgs := refSet{} outDir, err := filepath.Abs(*outDirFlag) d.Chk.NoError(err, "Could not canonicalize -out-dir: %v", err) packageName := "" if *outputLang == goExt { packageName = getGoPackageName(outDir) } if *inFlag != "" { out := getOutFileName(filepath.Base(*inFlag)) p := parsePackageFile(packageName, *inFlag, pkgDS) localPkgs[p.Ref()] = true generate(packageName, *inFlag, filepath.Join(outDir, out), outDir, map[string]bool{}, p, localPkgs, pkgDS) return } // Generate code from all .noms file in the current directory nomsFiles, err := filepath.Glob("*" + ext) d.Chk.NoError(err) written := map[string]bool{} packages := map[string]pkg.Parsed{} for _, inFile := range nomsFiles { p := parsePackageFile(packageName, inFile, pkgDS) localPkgs[p.Ref()] = true packages[inFile] = p } // Sort to have deterministic output. keys := make([]string, 0, len(packages)) sort.Strings(keys) for inFile := range packages { keys = append(keys, inFile) } for _, inFile := range keys { p := packages[inFile] pkgDS = generate(packageName, inFile, filepath.Join(outDir, getOutFileName(inFile)), outDir, written, p, localPkgs, pkgDS) } }
func TestCanUseDef(t *testing.T) { assert := assert.New(t) emptyDS := datas.NewDataStore(chunks.NewMemoryStore()) depsDir, err := ioutil.TempDir("", "") assert.NoError(err) defer os.RemoveAll(depsDir) assertCanUseDef := func(s string, using, named bool) { pkg := pkg.ParseNomDL("fakefile", bytes.NewBufferString(s), "", emptyDS) gen := newCodeGen(nil, "fakefile", "go", map[string]bool{}, depsMap{}, pkg) for _, t := range pkg.UsingDeclarations { assert.Equal(using, gen.canUseDef(t, gen.pkg.Package)) } for _, t := range pkg.Types() { assert.Equal(named, gen.canUseDef(t, gen.pkg.Package)) } } good := ` using List<Int8> using Set<Int8> using Map<Int8, Int8> using Map<Int8, Set<Int8>> using Map<Int8, Map<Int8, Int8>> struct Simple { x: Int8 } using Set<Simple> using Map<Simple, Int8> using Map<Simple, Simple> ` assertCanUseDef(good, true, true) good = ` struct Tree { children: List<Tree> } ` assertCanUseDef(good, true, true) bad := ` struct WithList { x: List<Int8> } using Set<WithList> using Map<WithList, Int8> struct WithSet { x: Set<Int8> } using Set<WithSet> using Map<WithSet, Int8> struct WithMap { x: Map<Int8, Int8> } using Set<WithMap> using Map<WithMap, Int8> ` assertCanUseDef(bad, false, true) bad = ` struct Commit { value: Value parents: Set<Commit> } ` assertCanUseDef(bad, false, false) bad = ` Set<Set<Int8>> Set<Map<Int8, Int8>> Set<List<Int8>> Map<Set<Int8>, Int8> Map<Map<Int8, Int8>, Int8> Map<List<Int8>, Int8> ` for _, line := range strings.Split(bad, "\n") { if strings.TrimSpace(line) == "" { continue } assertCanUseDef(fmt.Sprintf("using %s", line), false, false) assertCanUseDef(fmt.Sprintf("struct S { x: %s }", line), false, false) } }
func TestGetAlbums(t *testing.T) { assert := assert.New(t) store := datas.NewDataStore(chunks.NewMemoryStore()) testDs := dataset.NewDataset(store, "test") ds = &testDs progress := progressTracker{} methods := map[string]string{ "flickr.photosets.getList": `{ "photosets": { "photoset": [ { "id": "42", "photos": 2, "title": { "_content": "My Photoset" }, "description": { "_content": "" } } ] } }`, "flickr.photosets.getInfo": `{ "photoset": { "id": "42", "username": "******", "photos": 2, "title": { "_content": "My Photoset" }, "description": { "_content": "" } } }`, "flickr.photosets.getPhotos": `{ "photoset": { "id": "42", "photo": [ { "id": "0", "title": "_0", "datetaken": "2011-08-13 04:54:40", "url_s": "https:\/\/staticflickr.com\/0\/0.jpg", "height_s": "159", "width_s": "240", "url_m": "https:\/\/staticflickr.com\/0\/1.jpg", "height_m": "332", "width_m": "500", "url_l": "https:\/\/staticflickr.com\/0\/2.jpg", "height_l": "679", "width_l": "1024", "url_o": "https:\/\/staticflickr.com\/0\/3.jpg", "height_o": "679", "width_o": "1024", "longitude": 0, "latitude": 0 }, { "id": "1", "title": "_1", "datetaken": "2011-12-13 04:51:08", "url_s": "https:\/\/staticflickr.com\/1\/0.jpg", "height_s": "159", "width_s": "240", "url_m": "https:\/\/staticflickr.com\/1\/1.jpg", "height_m": "332", "width_m": "500", "url_l": "https:\/\/staticflickr.com\/1\/2.jpg", "height_l": "679", "width_l": "1024", "url_o": "https:\/\/staticflickr.com\/1\/3.jpg", "height_o": "6790", "width_o": "10240", "latitude": 48.8582641, "longitude": 2.2923184 } ], "title": "My Photoset" } }`, } albums := getAlbums(fakeFlickrAPI{methods}, &progress) assert.Equal(uint64(1), albums.Len()) album := albums.Get("42").TargetValue(store) assert.Equal("42", album.Id()) assert.Equal("My Photoset", album.Title()) photos := album.Photos() assert.Equal(uint64(2), photos.Len()) var photo0, photo1 RemotePhoto photos.IterAll(func(photo RefOfRemotePhoto) { p := photo.TargetValue(store) switch id := p.Id(); id { case "0": photo0 = p case "1": photo1 = p default: panic("unexpected photo " + id) } }) assert.Equal("0", photo0.Id()) assert.Equal("_0", photo0.Title()) assert.Equal(int64(1313236480000), photo0.Date().MsSinceEpoch()) assert.Equal(float32(0), photo0.Geoposition().Latitude()) assert.Equal(float32(0), photo0.Geoposition().Longitude()) assert.Equal(uint64(3), photo0.Sizes().Len()) // two of the images are the same assert.Equal(uint64(0), photo0.Tags().Len()) assert.Equal("1", photo1.Id()) assert.Equal("_1", photo1.Title()) // This photo was taken in Paris (by finding the lat/long of the Eiffel Tower), so its date should be interpreted according to that timezone, which is 9 hours ahead of PST (as of this moment). assert.Equal(int64(1323780668000-(9000*3600)), photo1.Date().MsSinceEpoch()) assert.Equal(float32(48.8582641), photo1.Geoposition().Latitude()) assert.Equal(float32(2.2923184), photo1.Geoposition().Longitude()) assert.Equal(uint64(4), photo1.Sizes().Len()) // all images are different sizes assert.Equal(uint64(0), photo1.Tags().Len()) }
func TestDataStoreCommit(t *testing.T) { assert := assert.New(t) cs := chunks.NewMemoryStore() ds := NewDataStore(cs) datasetID := "ds1" datasets := ds.Datasets() assert.Zero(datasets.Len()) // |a| a := types.NewString("a") aCommit := NewCommit().SetValue(a) ds2, err := ds.Commit(datasetID, aCommit) assert.NoError(err) // The old datastore still has no head. _, ok := ds.MaybeHead(datasetID) assert.False(ok) // The new datastore has |a|. aCommit1 := ds2.Head(datasetID) assert.True(aCommit1.Value().Equals(a)) ds = ds2 // |a| <- |b| b := types.NewString("b") bCommit := NewCommit().SetValue(b).SetParents(NewSetOfRefOfCommit().Insert(NewRefOfCommit(aCommit.Ref()))) ds, err = ds.Commit(datasetID, bCommit) assert.NoError(err) assert.True(ds.Head(datasetID).Value().Equals(b)) // |a| <- |b| // \----|c| // Should be disallowed. c := types.NewString("c") cCommit := NewCommit().SetValue(c) ds, err = ds.Commit(datasetID, cCommit) assert.Error(err) assert.True(ds.Head(datasetID).Value().Equals(b)) // |a| <- |b| <- |d| d := types.NewString("d") dCommit := NewCommit().SetValue(d).SetParents(NewSetOfRefOfCommit().Insert(NewRefOfCommit(bCommit.Ref()))) ds, err = ds.Commit(datasetID, dCommit) assert.NoError(err) assert.True(ds.Head(datasetID).Value().Equals(d)) // Attempt to recommit |b| with |a| as parent. // Should be disallowed. ds, err = ds.Commit(datasetID, bCommit) assert.Error(err) assert.True(ds.Head(datasetID).Value().Equals(d)) // Add a commit to a different datasetId _, err = ds.Commit("otherDs", aCommit) assert.NoError(err) // Get a fresh datastore, and verify that both datasets are present newDs := NewDataStore(cs) datasets2 := newDs.Datasets() assert.Equal(uint64(2), datasets2.Len()) }
func (suite *ImportTestSuite) TestImports() { find := func(n string, typ types.Type) types.Field { suite.Equal(types.StructKind, typ.Kind()) for _, f := range typ.Desc.(types.StructDesc).Fields { if f.Name == n { return f } } suite.Fail("Could not find field", "%s not present", n) return types.Field{} } findChoice := func(n string, typ types.Type) types.Field { suite.Equal(types.StructKind, typ.Kind()) for _, f := range typ.Desc.(types.StructDesc).Union { if f.Name == n { return f } } suite.Fail("Could not find choice", "%s not present", n) return types.Field{} } refFromNomsFile := func(path string) ref.Ref { ds := datas.NewDataStore(chunks.NewMemoryStore()) inFile, err := os.Open(path) suite.NoError(err) defer inFile.Close() parsedDep := ParseNomDL("", inFile, filepath.Dir(path), ds) return ds.WriteValue(parsedDep.Package).TargetRef() } dir, err := ioutil.TempDir("", "") suite.NoError(err) defer os.RemoveAll(dir) byPathNomDL := filepath.Join(dir, "filedep.noms") err = ioutil.WriteFile(byPathNomDL, []byte("struct FromFile{i:Int8}"), 0600) suite.NoError(err) r := strings.NewReader(fmt.Sprintf(` alias Other = import "%s" alias ByPath = import "%s" using List<Other.ForeignEnum> using List<Local1> struct Local1 { a: Other.ForeignStruct b: Int16 c: Local2 } struct Local2 { a: ByPath.FromFile b: Other.ForeignEnum } struct Union { union { a: Other.ForeignStruct b: Local2 } } struct WithUnion { a: Other.ForeignStruct b: union { s: Local1 t: Other.ForeignEnum } }`, suite.importRef, filepath.Base(byPathNomDL))) p := ParseNomDL("testing", r, dir, suite.vrw) named := p.Types()[0] suite.Equal("Local1", named.Name()) field := find("a", named) suite.EqualValues(suite.importRef, field.T.PackageRef()) field = find("c", named) suite.EqualValues(ref.Ref{}, field.T.PackageRef()) named = p.Types()[1] suite.Equal("Local2", named.Name()) field = find("a", named) suite.EqualValues(refFromNomsFile(byPathNomDL), field.T.PackageRef()) field = find("b", named) suite.EqualValues(suite.importRef, field.T.PackageRef()) named = p.Types()[2] suite.Equal("Union", named.Name()) field = findChoice("a", named) suite.EqualValues(suite.importRef, field.T.PackageRef()) field = findChoice("b", named) suite.EqualValues(ref.Ref{}, field.T.PackageRef()) named = p.Types()[3] suite.Equal("WithUnion", named.Name()) field = find("a", named) suite.EqualValues(suite.importRef, field.T.PackageRef()) namedUnion := find("b", named).T suite.True(namedUnion.IsUnresolved()) namedUnion = p.Types()[namedUnion.Ordinal()] field = findChoice("s", namedUnion) suite.EqualValues(ref.Ref{}, field.T.PackageRef()) field = findChoice("t", namedUnion) suite.EqualValues(suite.importRef, field.T.PackageRef()) usings := p.UsingDeclarations suite.Len(usings, 2) suite.EqualValues(types.ListKind, usings[0].Kind()) suite.EqualValues(suite.importRef, usings[0].Desc.(types.CompoundDesc).ElemTypes[0].PackageRef()) suite.EqualValues(types.ListKind, usings[1].Kind()) suite.EqualValues(0, usings[1].Desc.(types.CompoundDesc).ElemTypes[0].Ordinal()) }