func (s *testSuite) TestShove() { s.LdbFlagName = "-source-ldb" sn := "storeName" source1 := dataset.NewDataset(datas.NewDataStore(chunks.NewLevelDBStore(s.LdbDir, sn, 1, false)), "foo") source1, err := source1.Commit(types.Int32(42)) s.NoError(err) source2, err := source1.Commit(types.Int32(43)) s.NoError(err) source1HeadRef := source1.Head().Ref() source2.Store().Close() // Close DataStore backing both Datasets ldb2dir := path.Join(s.TempDir, "ldb2") out := s.Run(main, []string{"-source-store", sn, "-source", source1HeadRef.String(), "-sink-ldb", ldb2dir, "-sink-store", sn, "-sink-ds", "bar"}) s.Equal("", out) dest := dataset.NewDataset(datas.NewDataStore(chunks.NewLevelDBStore(ldb2dir, sn, 1, false)), "bar") s.True(types.Int32(42).Equals(dest.Head().Value())) dest.Store().Close() out = s.Run(main, []string{"-source-store", sn, "-source", "foo", "-sink-ldb", ldb2dir, "-sink-ds", "bar"}) s.Equal("", out) dest = dataset.NewDataset(datas.NewDataStore(chunks.NewLevelDBStore(ldb2dir, sn, 1, false)), "bar") s.True(types.Int32(43).Equals(dest.Head().Value())) dest.Store().Close() }
func assertOutput(inPath, lang, goldenPath string, t *testing.T) { assert := assert.New(t) emptyDS := datas.NewDataStore(chunks.NewMemoryStore()) // Will be DataStore containing imports depsDir, err := ioutil.TempDir("", "") assert.NoError(err) defer os.RemoveAll(depsDir) inFile, err := os.Open(inPath) assert.NoError(err) defer inFile.Close() goldenFile, err := os.Open(goldenPath) assert.NoError(err) defer goldenFile.Close() goldenBytes, err := ioutil.ReadAll(goldenFile) d.Chk.NoError(err) var buf bytes.Buffer pkg := pkg.ParseNomDL("gen", inFile, filepath.Dir(inPath), emptyDS) written := map[string]bool{} gen := newCodeGen(&buf, getBareFileName(inPath), lang, written, depsMap{}, pkg) gen.WritePackage() bs := buf.Bytes() if lang == "go" { bs, err = imports.Process("", bs, nil) d.Chk.NoError(err) } assert.Equal(string(goldenBytes), string(bs), "%s did not generate the same string", inPath) }
func TestGenerateDeps(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) dir, err := ioutil.TempDir("", "codegen_test_") assert.NoError(err) defer os.RemoveAll(dir) leaf1 := types.NewPackage([]types.Type{types.MakeEnumType("e1", "a", "b")}, []ref.Ref{}) leaf1Ref := ds.WriteValue(leaf1).TargetRef() leaf2 := types.NewPackage([]types.Type{types.MakePrimitiveType(types.BoolKind)}, []ref.Ref{}) leaf2Ref := ds.WriteValue(leaf2).TargetRef() depender := types.NewPackage([]types.Type{}, []ref.Ref{leaf1Ref}) dependerRef := ds.WriteValue(depender).TargetRef() top := types.NewPackage([]types.Type{}, []ref.Ref{leaf2Ref, dependerRef}) types.RegisterPackage(&top) localPkgs := refSet{top.Ref(): true} generateDepCode(filepath.Base(dir), dir, map[string]bool{}, top, localPkgs, ds) leaf1Path := filepath.Join(dir, code.ToTag(leaf1.Ref())+".go") leaf2Path := filepath.Join(dir, code.ToTag(leaf2.Ref())+".go") leaf3Path := filepath.Join(dir, code.ToTag(depender.Ref())+".go") _, err = os.Stat(leaf1Path) assert.NoError(err) _, err = os.Stat(leaf2Path) assert.NoError(err) _, err = os.Stat(leaf3Path) assert.NoError(err) }
func TestEnumIsValue(t *testing.T) { ds := datas.NewDataStore(chunks.NewMemoryStore()) var v types.Value = gen.NewEnumStruct() ref := ds.WriteValue(v).TargetRef() v2 := ds.ReadValue(ref) assert.True(t, v.Equals(v2)) }
func TestRead(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) dataString := `a,1,true b,2,false ` r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B", "C"} kinds := KindSlice{types.StringKind, types.Int8Kind, types.BoolKind} l, typeRef, typeDef := Read(r, "test", headers, kinds, ds) assert.Equal(uint64(2), l.Len()) assert.True(typeRef.IsUnresolved()) desc, ok := typeDef.Desc.(types.StructDesc) assert.True(ok) assert.Len(desc.Fields, 3) assert.Equal("A", desc.Fields[0].Name) assert.Equal("B", desc.Fields[1].Name) assert.Equal("C", desc.Fields[2].Name) assert.True(l.Get(0).(types.Struct).Get("A").Equals(types.NewString("a"))) assert.True(l.Get(1).(types.Struct).Get("A").Equals(types.NewString("b"))) assert.True(l.Get(0).(types.Struct).Get("B").Equals(types.Int8(1))) assert.True(l.Get(1).(types.Struct).Get("B").Equals(types.Int8(2))) assert.True(l.Get(0).(types.Struct).Get("C").Equals(types.Bool(true))) assert.True(l.Get(1).(types.Struct).Get("C").Equals(types.Bool(false))) }
func (s *testSuite) TestCSVImporterWithPipe() { oldDelimiter := delimiter newDelimiter := "|" delimiter = &newDelimiter defer func() { delimiter = oldDelimiter }() input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("a|b\n1|2\n") d.Chk.NoError(err) storeName := "store" setName := "csv" out := s.Run(main, []string{"-store", storeName, "-column-types", "String,Uint8", "-ds", setName, input.Name()}) s.Equal("", out) cs := chunks.NewLevelDBStore(s.LdbDir, storeName, 1, false) ds := dataset.NewDataset(datas.NewDataStore(cs), setName) defer ds.Store().Close() defer os.RemoveAll(s.LdbDir) l := ds.Head().Value().(types.List) s.Equal(uint64(1), l.Len()) v := l.Get(0) st := v.(types.Struct) s.Equal(types.NewString("1"), st.Get("a")) s.Equal(types.Uint8(2), st.Get("b")) }
func TestCanUseDefFromImport(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) dir, err := ioutil.TempDir("", "") assert.NoError(err) defer os.RemoveAll(dir) byPathNomDL := filepath.Join(dir, "filedep.noms") err = ioutil.WriteFile(byPathNomDL, []byte("struct FromFile{i:Int8}"), 0600) assert.NoError(err) r1 := strings.NewReader(` struct A { B: B } struct B { X: Int64 }`) pkg1 := pkg.ParseNomDL("test1", r1, dir, ds) pkgRef1 := ds.WriteValue(pkg1.Package).TargetRef() r2 := strings.NewReader(fmt.Sprintf(` alias Other = import "%s" struct C { C: Map<Int64, Other.A> } `, pkgRef1)) pkg2 := pkg.ParseNomDL("test2", r2, dir, ds) gen2 := newCodeGen(nil, "test2", "go", map[string]bool{}, depsMap{pkg1.Ref(): pkg1.Package}, pkg2) assert.True(gen2.canUseDef(pkg2.Types()[0], gen2.pkg.Package)) }
func TestDatasetCommitTracker(t *testing.T) { assert := assert.New(t) id1 := "testdataset" id2 := "othertestdataset" cs := chunks.NewMemoryStore() ds1 := NewDataset(datas.NewDataStore(cs), id1) ds1Commit := types.NewString("Commit value for " + id1) ds1, err := ds1.Commit(ds1Commit) assert.NoError(err) ds2 := NewDataset(datas.NewDataStore(cs), id2) ds2Commit := types.NewString("Commit value for " + id2) ds2, err = ds2.Commit(ds2Commit) assert.NoError(err) assert.EqualValues(ds1Commit, ds1.Head().Value()) assert.EqualValues(ds2Commit, ds2.Head().Value()) assert.False(ds2.Head().Value().Equals(ds1Commit)) assert.False(ds1.Head().Value().Equals(ds2Commit)) assert.Equal("sha1-145d840b4bf5d48f64d0c6ca91a5bc5d7d4eef3e", cs.Root().String()) }
func TestReadParseError(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) dataString := `a,"b` r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B"} kinds := KindSlice{types.StringKind, types.StringKind} func() { defer func() { r := recover() assert.NotNil(r) _, ok := r.(*csv.ParseError) assert.True(ok, "Should be a ParseError") }() Read(r, "test", headers, kinds, ds) }() }
func (suite *ImportTestSuite) SetupTest() { suite.vrw = datas.NewDataStore(chunks.NewMemoryStore()) ns := types.MakeStructType("NestedDepStruct", []types.Field{}, types.Choices{ types.Field{"b", types.MakePrimitiveType(types.BoolKind), false}, types.Field{"i", types.MakePrimitiveType(types.Int8Kind), false}, }) suite.nested = types.NewPackage([]types.Type{ns}, []ref.Ref{}) suite.nestedRef = suite.vrw.WriteValue(suite.nested).TargetRef() fs := types.MakeStructType("ForeignStruct", []types.Field{ types.Field{"b", types.MakeType(ref.Ref{}, 1), false}, types.Field{"n", types.MakeType(suite.nestedRef, 0), false}, }, types.Choices{}) fe := types.MakeEnumType("ForeignEnum", "uno", "dos") suite.imported = types.NewPackage([]types.Type{fs, fe}, []ref.Ref{suite.nestedRef}) suite.importRef = suite.vrw.WriteValue(suite.imported).TargetRef() }
func testTrailingHelper(t *testing.T, dataString string) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) r := NewCSVReader(bytes.NewBufferString(dataString), ',') headers := []string{"A", "B"} kinds := KindSlice{types.StringKind, types.StringKind} l, typeRef, typeDef := Read(r, "test", headers, kinds, ds) assert.Equal(uint64(3), l.Len()) assert.True(typeRef.IsUnresolved()) desc, ok := typeDef.Desc.(types.StructDesc) assert.True(ok) assert.Len(desc.Fields, 2) assert.Equal("A", desc.Fields[0].Name) assert.Equal("B", desc.Fields[1].Name) }
func TestStructIsValue(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) var v types.Value = gen.StructWithListDef{ L: gen.ListOfUint8Def{0, 1, 2}, B: true, S: "world", I: 42, }.New() ref := ds.WriteValue(v).TargetRef() v2 := ds.ReadValue(ref) assert.True(v.Equals(v2)) s2 := v2.(gen.StructWithList) assert.True(s2.L().Equals(gen.NewListOfUint8().Append(0, 1, 2))) assert.True(s2.B()) assert.Equal("world", s2.S()) assert.Equal(int64(42), s2.I()) }
func TestCommitNewPackages(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) pkgDS := dataset.NewDataset(ds, "packages") dir, err := ioutil.TempDir("", "") assert.NoError(err) defer os.RemoveAll(dir) inFile := filepath.Join(dir, "in.noms") err = ioutil.WriteFile(inFile, []byte("struct Simple{a:Bool}"), 0600) assert.NoError(err) p := parsePackageFile("name", inFile, pkgDS) localPkgs := refSet{p.Ref(): true} pkgDS = generate("name", inFile, filepath.Join(dir, "out.go"), dir, map[string]bool{}, p, localPkgs, pkgDS) s := pkgDS.Head().Value().(types.SetOfRefOfPackage) assert.EqualValues(1, s.Len()) tr := s.First().TargetValue(ds).Types()[0] assert.EqualValues(types.StructKind, tr.Kind()) }
func TestStructWithRef(t *testing.T) { assert := assert.New(t) ds := datas.NewDataStore(chunks.NewMemoryStore()) set := gen.SetOfFloat32Def{0: true, 1: true, 2: true}.New() ds.WriteValue(set) str := gen.StructWithRefDef{ R: set.Ref(), }.New() ds.WriteValue(str) r := str.R() r2 := gen.NewRefOfSetOfFloat32(set.Ref()) assert.True(r.Equals(r2)) assert.True(r2.TargetValue(ds).Equals(set)) set2 := r2.TargetValue(ds) assert.True(set.Equals(set2)) def := str.Def() assert.EqualValues(set.Ref(), def.R) }
func (s *testSuite) TestCSVImporter() { input, err := ioutil.TempFile(s.TempDir, "") d.Chk.NoError(err) defer input.Close() defer os.Remove(input.Name()) _, err = input.WriteString("a,b\n") d.Chk.NoError(err) for i := 0; i < 100; i++ { _, err = input.WriteString(fmt.Sprintf("a%d,%d\n", i, i)) d.Chk.NoError(err) } _, err = input.Seek(0, 0) d.Chk.NoError(err) storeName := "store" setName := "csv" out := s.Run(main, []string{"-store", storeName, "-column-types", "String,Uint8", "-ds", setName, input.Name()}) s.Equal("", out) cs := chunks.NewLevelDBStore(s.LdbDir, storeName, 1, false) ds := dataset.NewDataset(datas.NewDataStore(cs), setName) defer ds.Store().Close() defer os.RemoveAll(s.LdbDir) l := ds.Head().Value().(types.List) s.Equal(uint64(100), l.Len()) i := uint64(0) l.IterAll(func(v types.Value, j uint64) { s.Equal(i, j) st := v.(types.Struct) s.Equal(types.NewString(fmt.Sprintf("a%d", i)), st.Get("a")) s.Equal(types.Uint8(i), st.Get("b")) i++ }) }
func TestGetAlbums(t *testing.T) { assert := assert.New(t) store := datas.NewDataStore(chunks.NewMemoryStore()) testDs := dataset.NewDataset(store, "test") ds = &testDs progress := progressTracker{} methods := map[string]string{ "flickr.photosets.getList": `{ "photosets": { "photoset": [ { "id": "42", "photos": 2, "title": { "_content": "My Photoset" }, "description": { "_content": "" } } ] } }`, "flickr.photosets.getInfo": `{ "photoset": { "id": "42", "username": "******", "photos": 2, "title": { "_content": "My Photoset" }, "description": { "_content": "" } } }`, "flickr.photosets.getPhotos": `{ "photoset": { "id": "42", "photo": [ { "id": "0", "title": "_0", "datetaken": "2011-08-13 04:54:40", "url_s": "https:\/\/staticflickr.com\/0\/0.jpg", "height_s": "159", "width_s": "240", "url_m": "https:\/\/staticflickr.com\/0\/1.jpg", "height_m": "332", "width_m": "500", "url_l": "https:\/\/staticflickr.com\/0\/2.jpg", "height_l": "679", "width_l": "1024", "url_o": "https:\/\/staticflickr.com\/0\/3.jpg", "height_o": "679", "width_o": "1024", "longitude": 0, "latitude": 0 }, { "id": "1", "title": "_1", "datetaken": "2011-12-13 04:51:08", "url_s": "https:\/\/staticflickr.com\/1\/0.jpg", "height_s": "159", "width_s": "240", "url_m": "https:\/\/staticflickr.com\/1\/1.jpg", "height_m": "332", "width_m": "500", "url_l": "https:\/\/staticflickr.com\/1\/2.jpg", "height_l": "679", "width_l": "1024", "url_o": "https:\/\/staticflickr.com\/1\/3.jpg", "height_o": "6790", "width_o": "10240", "latitude": 48.8582641, "longitude": 2.2923184 } ], "title": "My Photoset" } }`, } albums := getAlbums(fakeFlickrAPI{methods}, &progress) assert.Equal(uint64(1), albums.Len()) album := albums.Get("42").TargetValue(store) assert.Equal("42", album.Id()) assert.Equal("My Photoset", album.Title()) photos := album.Photos() assert.Equal(uint64(2), photos.Len()) var photo0, photo1 RemotePhoto photos.IterAll(func(photo RefOfRemotePhoto) { p := photo.TargetValue(store) switch id := p.Id(); id { case "0": photo0 = p case "1": photo1 = p default: panic("unexpected photo " + id) } }) assert.Equal("0", photo0.Id()) assert.Equal("_0", photo0.Title()) assert.Equal(int64(1313236480000), photo0.Date().MsSinceEpoch()) assert.Equal(float32(0), photo0.Geoposition().Latitude()) assert.Equal(float32(0), photo0.Geoposition().Longitude()) assert.Equal(uint64(3), photo0.Sizes().Len()) // two of the images are the same assert.Equal(uint64(0), photo0.Tags().Len()) assert.Equal("1", photo1.Id()) assert.Equal("_1", photo1.Title()) // This photo was taken in Paris (by finding the lat/long of the Eiffel Tower), so its date should be interpreted according to that timezone, which is 9 hours ahead of PST (as of this moment). assert.Equal(int64(1323780668000-(9000*3600)), photo1.Date().MsSinceEpoch()) assert.Equal(float32(48.8582641), photo1.Geoposition().Latitude()) assert.Equal(float32(2.2923184), photo1.Geoposition().Longitude()) assert.Equal(uint64(4), photo1.Sizes().Len()) // all images are different sizes assert.Equal(uint64(0), photo1.Tags().Len()) }
func (suite *ImportTestSuite) TestImports() { find := func(n string, typ types.Type) types.Field { suite.Equal(types.StructKind, typ.Kind()) for _, f := range typ.Desc.(types.StructDesc).Fields { if f.Name == n { return f } } suite.Fail("Could not find field", "%s not present", n) return types.Field{} } findChoice := func(n string, typ types.Type) types.Field { suite.Equal(types.StructKind, typ.Kind()) for _, f := range typ.Desc.(types.StructDesc).Union { if f.Name == n { return f } } suite.Fail("Could not find choice", "%s not present", n) return types.Field{} } refFromNomsFile := func(path string) ref.Ref { ds := datas.NewDataStore(chunks.NewMemoryStore()) inFile, err := os.Open(path) suite.NoError(err) defer inFile.Close() parsedDep := ParseNomDL("", inFile, filepath.Dir(path), ds) return ds.WriteValue(parsedDep.Package).TargetRef() } dir, err := ioutil.TempDir("", "") suite.NoError(err) defer os.RemoveAll(dir) byPathNomDL := filepath.Join(dir, "filedep.noms") err = ioutil.WriteFile(byPathNomDL, []byte("struct FromFile{i:Int8}"), 0600) suite.NoError(err) r := strings.NewReader(fmt.Sprintf(` alias Other = import "%s" alias ByPath = import "%s" using List<Other.ForeignEnum> using List<Local1> struct Local1 { a: Other.ForeignStruct b: Int16 c: Local2 } struct Local2 { a: ByPath.FromFile b: Other.ForeignEnum } struct Union { union { a: Other.ForeignStruct b: Local2 } } struct WithUnion { a: Other.ForeignStruct b: union { s: Local1 t: Other.ForeignEnum } }`, suite.importRef, filepath.Base(byPathNomDL))) p := ParseNomDL("testing", r, dir, suite.vrw) named := p.Types()[0] suite.Equal("Local1", named.Name()) field := find("a", named) suite.EqualValues(suite.importRef, field.T.PackageRef()) field = find("c", named) suite.EqualValues(ref.Ref{}, field.T.PackageRef()) named = p.Types()[1] suite.Equal("Local2", named.Name()) field = find("a", named) suite.EqualValues(refFromNomsFile(byPathNomDL), field.T.PackageRef()) field = find("b", named) suite.EqualValues(suite.importRef, field.T.PackageRef()) named = p.Types()[2] suite.Equal("Union", named.Name()) field = findChoice("a", named) suite.EqualValues(suite.importRef, field.T.PackageRef()) field = findChoice("b", named) suite.EqualValues(ref.Ref{}, field.T.PackageRef()) named = p.Types()[3] suite.Equal("WithUnion", named.Name()) field = find("a", named) suite.EqualValues(suite.importRef, field.T.PackageRef()) namedUnion := find("b", named).T suite.True(namedUnion.IsUnresolved()) namedUnion = p.Types()[namedUnion.Ordinal()] field = findChoice("s", namedUnion) suite.EqualValues(ref.Ref{}, field.T.PackageRef()) field = findChoice("t", namedUnion) suite.EqualValues(suite.importRef, field.T.PackageRef()) usings := p.UsingDeclarations suite.Len(usings, 2) suite.EqualValues(types.ListKind, usings[0].Kind()) suite.EqualValues(suite.importRef, usings[0].Desc.(types.CompoundDesc).ElemTypes[0].PackageRef()) suite.EqualValues(types.ListKind, usings[1].Kind()) suite.EqualValues(0, usings[1].Desc.(types.CompoundDesc).ElemTypes[0].Ordinal()) }
// FIXME: run with pipe func (s *testSuite) TestCSVExporter() { storeName := "store" setName := "csv" header := []string{"a", "b", "c"} payload := [][]string{ []string{"5", "7", "100"}, []string{"4", "10", "255"}, []string{"512", "12", "55"}, } structName := "SomeStruct" // Setup data store cs := chunks.NewLevelDBStore(s.LdbDir, storeName, 1, false) ds := dataset.NewDataset(datas.NewDataStore(cs), setName) // Build Struct fields based on header f := make([]types.Field, 0, len(header)) for _, key := range header { f = append(f, types.Field{ Name: key, T: types.MakePrimitiveType(types.StringKind), }) } typeDef := types.MakeStructType(structName, f, types.Choices{}) pkg := types.NewPackage([]types.Type{typeDef}, []ref.Ref{}) pkgRef := types.RegisterPackage(&pkg) typeRef := types.MakeType(pkgRef, 0) structFields := typeDef.Desc.(types.StructDesc).Fields // Build data rows structs := make([]types.Value, len(payload)) for i, row := range payload { fields := make(map[string]types.Value) for j, v := range row { fields[structFields[j].Name] = types.NewString(v) } structs[i] = types.NewStruct(typeRef, typeDef, fields) } listType := types.MakeCompoundType(types.ListKind, typeRef) ds.Commit(types.NewTypedList(listType, structs...)) ds.Store().Close() // Run exporter out := s.Run(main, []string{"-store", storeName, "-ds", setName}) // Verify output csvReader := csv.NewReader(strings.NewReader(out)) row, err := csvReader.Read() d.Chk.NoError(err) s.Equal(header, row) for i := 0; i < len(payload); i++ { row, err := csvReader.Read() d.Chk.NoError(err) s.Equal(payload[i], row) } row, err = csvReader.Read() s.Equal(io.EOF, err) }
func main() { flags := datas.NewFlags() flag.Parse() ds, ok := flags.CreateDataStore() if !ok { ds = datas.NewDataStore(chunks.NewMemoryStore()) } defer ds.Close() if *pkgDSFlag != "" { if !ok { log.Print("Package dataset provided, but DataStore could not be opened.") flag.Usage() return } } else { log.Print("No package dataset provided; will be unable to process imports.") *pkgDSFlag = "default" } if *outputLang != goExt && *outputLang != jsExt { log.Print("Invalid out-lang provided.") flag.Usage() return } pkgDS := dataset.NewDataset(ds, *pkgDSFlag) // Ensure that, if pkgDS has stuff in it, its head is a SetOfRefOfPackage. if h, ok := pkgDS.MaybeHead(); ok { d.Chk.IsType(types.SetOfRefOfPackage{}, h.Value()) } localPkgs := refSet{} outDir, err := filepath.Abs(*outDirFlag) d.Chk.NoError(err, "Could not canonicalize -out-dir: %v", err) packageName := "" if *outputLang == goExt { packageName = getGoPackageName(outDir) } if *inFlag != "" { out := getOutFileName(filepath.Base(*inFlag)) p := parsePackageFile(packageName, *inFlag, pkgDS) localPkgs[p.Ref()] = true generate(packageName, *inFlag, filepath.Join(outDir, out), outDir, map[string]bool{}, p, localPkgs, pkgDS) return } // Generate code from all .noms file in the current directory nomsFiles, err := filepath.Glob("*" + ext) d.Chk.NoError(err) written := map[string]bool{} packages := map[string]pkg.Parsed{} for _, inFile := range nomsFiles { p := parsePackageFile(packageName, inFile, pkgDS) localPkgs[p.Ref()] = true packages[inFile] = p } // Sort to have deterministic output. keys := make([]string, 0, len(packages)) sort.Strings(keys) for inFile := range packages { keys = append(keys, inFile) } for _, inFile := range keys { p := packages[inFile] pkgDS = generate(packageName, inFile, filepath.Join(outDir, getOutFileName(inFile)), outDir, written, p, localPkgs, pkgDS) } }
func TestCanUseDef(t *testing.T) { assert := assert.New(t) emptyDS := datas.NewDataStore(chunks.NewMemoryStore()) depsDir, err := ioutil.TempDir("", "") assert.NoError(err) defer os.RemoveAll(depsDir) assertCanUseDef := func(s string, using, named bool) { pkg := pkg.ParseNomDL("fakefile", bytes.NewBufferString(s), "", emptyDS) gen := newCodeGen(nil, "fakefile", "go", map[string]bool{}, depsMap{}, pkg) for _, t := range pkg.UsingDeclarations { assert.Equal(using, gen.canUseDef(t, gen.pkg.Package)) } for _, t := range pkg.Types() { assert.Equal(named, gen.canUseDef(t, gen.pkg.Package)) } } good := ` using List<Int8> using Set<Int8> using Map<Int8, Int8> using Map<Int8, Set<Int8>> using Map<Int8, Map<Int8, Int8>> struct Simple { x: Int8 } using Set<Simple> using Map<Simple, Int8> using Map<Simple, Simple> ` assertCanUseDef(good, true, true) good = ` struct Tree { children: List<Tree> } ` assertCanUseDef(good, true, true) bad := ` struct WithList { x: List<Int8> } using Set<WithList> using Map<WithList, Int8> struct WithSet { x: Set<Int8> } using Set<WithSet> using Map<WithSet, Int8> struct WithMap { x: Map<Int8, Int8> } using Set<WithMap> using Map<WithMap, Int8> ` assertCanUseDef(bad, false, true) bad = ` struct Commit { value: Value parents: Set<Commit> } ` assertCanUseDef(bad, false, false) bad = ` Set<Set<Int8>> Set<Map<Int8, Int8>> Set<List<Int8>> Map<Set<Int8>, Int8> Map<Map<Int8, Int8>, Int8> Map<List<Int8>, Int8> ` for _, line := range strings.Split(bad, "\n") { if strings.TrimSpace(line) == "" { continue } assertCanUseDef(fmt.Sprintf("using %s", line), false, false) assertCanUseDef(fmt.Sprintf("struct S { x: %s }", line), false, false) } }
func newDS(id string, cs *chunks.MemoryStore) Dataset { store := datas.NewDataStore(cs) return NewDataset(store, id) }
func createTestDataset(name string) Dataset { return NewDataset(datas.NewDataStore(chunks.NewTestStore()), name) }