// TestLineComments, using a simple test case, checks that consequtive line // comments are properly terminated with a newline even if the AST position // information is incorrect. // func TestLineComments(t *testing.T) { const src = `// comment 1 // comment 2 // comment 3 package main ` fset := token.NewFileSet() f, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err != nil { panic(err) // error in test } var buf bytes.Buffer fset = token.NewFileSet() // use the wrong file set Fprint(&buf, fset, f) nlines := 0 for _, ch := range buf.Bytes() { if ch == '\n' { nlines++ } } const expected = 3 if nlines < expected { t.Errorf("got %d, expected %d\n", nlines, expected) t.Errorf("result:\n%s", buf.Bytes()) } }
func GetPackageList() { for _, fname := range files { fset := token.NewFileSet() file, err := parser.ParseFile(fset, fname, nil, parser.PackageClauseOnly) if err != nil { fmt.Fprint(os.Stderr, err) os.Exit(1) } pname := file.Name.Name if pname == "main" { fset := token.NewFileSet() fullfile, err := parser.ParseFile(fset, fname, nil, 0) if err == nil { v := &MainCheckVisitor{fname: fname} ast.Walk(v, fullfile) if v.hasMain { // get the name from the filename fparts := strings.Split(fname, ".", -1) basename := path.Base(fparts[0]) packages[basename] = nil } else { packages[pname] = nil } } } else { packages[file.Name.Name] = nil } } }
func generateRunner(filename string, testMains []*TestMain) os.Error { src := bytes.NewBufferString("") fmt.Fprint(src, "package main\n\n") fmt.Fprint(src, "import \"sync\"\n") fmt.Fprint(src, "import \"testing\"\n") fmt.Fprint(src, "import (\n") for _, testMain := range testMains { name := testMain.underscorePkgName() fmt.Fprintf(src, "%s \"%s\"\n", name, testMain.pkgName) } fmt.Fprint(src, ")\n") fmt.Fprint(src, "func main() {\n") fmt.Fprint(src, "wg := new(sync.WaitGroup)\n") for _, testMain := range testMains { pkgName := testMain.underscorePkgName() fmt.Fprint(src, "wg.Add(1)\n") fmt.Fprint(src, "go func() {\n") fmt.Fprint(src, "tests := []testing.InternalTest{\n") for _, test := range testMain.tests { testFunc := pkgName + "." + test fmt.Fprintf(src, "{\"%s\", %s},\n", testMain.pkgName+"."+test, testFunc) } fmt.Fprint(src, "}\n") fmt.Fprint(src, "benchmarks := []testing.InternalBenchmark{\n") for _, bench := range testMain.benchmarks { benchFunc := pkgName + "." + bench fmt.Fprintf(src, "{\"%s\", %s},\n", testMain.pkgName+"."+bench, benchFunc) } fmt.Fprint(src, "}\n") fmt.Fprintf(src, "for i := 0; i < %d; i++ {\n", iters) fmt.Fprint(src, "testing.Main(regexp.MatchString, tests)\n") fmt.Fprint(src, "testing.RunBenchmarks(regexp.MatchString, benchmarks)\n") fmt.Fprint(src, "}\n") fmt.Fprint(src, "wg.Done()\n") fmt.Fprint(src, "}()\n\n") } fmt.Fprint(src, "wg.Wait()\n") fmt.Fprint(src, "}\n") file, err := os.Open(filename, os.O_CREAT|os.O_TRUNC|os.O_WRONLY, 0666) if err != nil { return err } defer file.Close() //fmt.Printf("%s\n", string(src.Bytes())) fileNode, err := parser.ParseFile(token.NewFileSet(), filename, src.Bytes(), 0) if err != nil { panic(err) } config := printer.Config{printer.TabIndent, 8} _, err = config.Fprint(file, token.NewFileSet(), fileNode) if err != nil { return err } return nil }
func fprint(w io.Writer, a interface{}) error { switch x := a.(type) { case *ast.FieldList: // printer.Fprint does not support this type. Hack around it. return fprintFieldList(w, x) case ast.Node, []ast.Decl, []ast.Stmt: return printer.Fprint(w, token.NewFileSet(), x) case []ast.Expr: i := 0 for ; i < len(x)-1; i++ { if err := printer.Fprint(w, token.NewFileSet(), x[i]); err != nil { return err } if _, err := w.Write([]byte(", ")); err != nil { return err } } if len(x) != 0 { return printer.Fprint(w, token.NewFileSet(), x[i]) } return nil case string: _, err := io.WriteString(w, x) return err default: panic(fmt.Sprintf("unsupported value: %v", x)) } }
func main() { fileSet := token.NewFileSet() astFile, err := parser.ParseFile(fileSet, "minimal.go", nil, parser.ParseComments) if err != nil { panic(err) } ast.Print(fileSet, astFile) return astFile.Decls = append(astFile.Decls[:0], append([]ast.Decl{&ast.FuncDecl{ Name: ast.NewIdent("MyNewFunc"), Type: &ast.FuncType{ Func: 15, Params: &ast.FieldList{ Opening: 29, Closing: 30, }, }, }}, astFile.Decls[0:]...)...) offset := astFile.Decls[0].End() - astFile.Decls[0].Pos() intoffset := int(offset) fmt.Println("offset", offset) astFile.Comments[0].List[0].Slash += offset // astFile.Comments[0].List[0].Slash = 18 astFile.Decls[1].(*ast.GenDecl).TokPos += offset astFile.Decls[1].(*ast.GenDecl).Lparen += offset astFile.Decls[1].(*ast.GenDecl).Rparen += offset fileSetFile := fileSet.File(1) newFileSet := token.NewFileSet() newFileSetFile := newFileSet.AddFile("whatever", 1, fileSetFile.Size()+int(offset)) newFileSetFile.SetLines([]int{ 0, 13, 14, 15, 15 + intoffset, 20 + intoffset, 21 + intoffset, 32 + intoffset, 33 + intoffset, }) // hardcoded for now fmt.Println("astFile:") spew.Dump(astFile) fmt.Println() fmt.Println() fmt.Println("fileSet:") spew.Dump(fileSet) buf := new(bytes.Buffer) err = printer.Fprint(buf, newFileSet, astFile) if err != nil { panic(err) } fmt.Println(buf) }
func (m Mocks) Output(pkg, dir string, chanSize int, dest io.Writer) error { if _, err := dest.Write([]byte(commentHeader)); err != nil { return err } fset := token.NewFileSet() f := &ast.File{ Name: &ast.Ident{Name: pkg}, Decls: m.decls(chanSize), } var b bytes.Buffer format.Node(&b, fset, f) // TODO: Determine why adding imports without creating a new ast file // will only allow one import to be printed to the file. fset = token.NewFileSet() file, err := parser.ParseFile(fset, pkg, &b, 0) if err != nil { return err } file, fset, err = addImports(file, fset, dir) if err != nil { return err } return format.Node(dest, fset, file) }
func doParseFiles(filePathes []string, fset *token.FileSet) (*token.FileSet, []*ast.File, error) { if fset == nil { fset = token.NewFileSet() } util.Info("parsing files %v", filePathes) astFiles := make([]*ast.File, 0, len(filePathes)) for _, f := range filePathes { //XXX: Ignoring files with packages ends with _test. //XXX: Doing that because getting error in check() //XXX: cause source file is still going to current //XXX: packages. Need to analyze package before //XXX: and check both packages separately. tempFset := token.NewFileSet() astFile, err := parser.ParseFile(tempFset, f, nil, 0) if !strings.HasSuffix(astFile.Name.Name, "_test") { if err != nil { return nil, nil, err } astFile, _ := parser.ParseFile(fset, f, nil, 0) astFiles = append(astFiles, astFile) } } iterateFunc := func(f *token.File) bool { util.Debug("\t%s", f.Name()) return true } fset.Iterate(iterateFunc) return fset, astFiles, nil }
// TestVeryLongFile tests the position of an import object declared in // a very long input file. Line numbers greater than maxlines are // reported as line 1, not garbage or token.NoPos. func TestVeryLongFile(t *testing.T) { // parse and typecheck longFile := "package foo" + strings.Repeat("\n", 123456) + "var X int" fset1 := token.NewFileSet() f, err := parser.ParseFile(fset1, "foo.go", longFile, 0) if err != nil { t.Fatal(err) } var conf types.Config pkg, err := conf.Check("foo", fset1, []*ast.File{f}, nil) if err != nil { t.Fatal(err) } // export exportdata := gcimporter.BExportData(fset1, pkg) // import imports := make(map[string]*types.Package) fset2 := token.NewFileSet() _, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg.Path()) if err != nil { t.Fatalf("BImportData(%s): %v", pkg.Path(), err) } // compare posn1 := fset1.Position(pkg.Scope().Lookup("X").Pos()) posn2 := fset2.Position(pkg2.Scope().Lookup("X").Pos()) if want := "foo.go:1:1"; posn2.String() != want { t.Errorf("X position = %s, want %s (orig was %s)", posn2, want, posn1) } }
// inspired by godeps rewrite, rewrites import paths with gx vendored names func rewriteImportsInFile(fi string, rw func(string) string) error { cfg := &printer.Config{Mode: printer.UseSpaces | printer.TabIndent, Tabwidth: 8} fset := token.NewFileSet() file, err := parser.ParseFile(fset, fi, nil, parser.ParseComments) if err != nil { return err } var changed bool for _, imp := range file.Imports { p, err := strconv.Unquote(imp.Path.Value) if err != nil { return err } np := rw(p) if np != p { changed = true imp.Path.Value = strconv.Quote(np) } } if !changed { return nil } buf := bufpool.Get().(*bytes.Buffer) if err = cfg.Fprint(buf, fset, file); err != nil { return err } fset = token.NewFileSet() file, err = parser.ParseFile(fset, fi, buf, parser.ParseComments) if err != nil { return err } buf.Reset() bufpool.Put(buf) ast.SortImports(fset, file) wpath := fi + ".temp" w, err := os.Create(wpath) if err != nil { return err } if err = cfg.Fprint(w, fset, file); err != nil { return err } if err = w.Close(); err != nil { return err } return os.Rename(wpath, fi) }
func extractTests(pkgs []string, except []string, pkgOut string, output io.Writer) error { here, err := os.Getwd() if err != nil { return err } fset := token.NewFileSet() result := &ast.File{} result.Name = &ast.Ident{ Name: pkgOut, } result.Imports = append(result.Imports, &ast.ImportSpec{ Path: &ast.BasicLit{ Kind: token.STRING, Value: "\"testing\"", }, }) for _, pkgName := range pkgs { pkg, err := build.Import(pkgName, here, 0) if err != nil { return err } result.Imports = append(result.Imports, &ast.ImportSpec{ Path: &ast.BasicLit{ Kind: token.STRING, Value: "\"" + pkg.ImportPath + "\"", }, }) pkgFiles := token.NewFileSet() for _, src := range pkg.GoFiles { qsrc := filepath.Join(pkg.Dir, src) sfile, err := parser.ParseFile(pkgFiles, qsrc, nil, 0) if err != nil { return err } err = copyTests(result, sfile, except, pkg) if err != nil { return err } } } importDecls := make([]ast.Decl, len(result.Imports)) for i, spec := range result.Imports { importDecls[i] = &ast.GenDecl{ Tok: token.IMPORT, Specs: []ast.Spec{spec}, } } result.Decls = append(importDecls, result.Decls...) return format.Node(output, fset, result) }
func compareTwoTrees(src string) { v1 := &channelPusher{} v1.fileSet = token.NewFileSet() v1.queue = make(chan *ast.Node) v2 := &channelPusher{} v2.fileSet = token.NewFileSet() v2.queue = make(chan *ast.Node) tree1, err := parser.ParseExpr(src) if err != nil { panic(err) } src2 := "x + 2*y" tree2, err := parser.ParseExpr(src2) if err != nil { panic(err) } done := make(chan struct{}) defer close(done) go func() { ast.Walk(v1, tree1) close(v1.queue) done <- struct{}{} }() go func() { ast.Walk(v2, tree2) close(v2.queue) done <- struct{}{} }() var n1, n2 *ast.Node quit := false for !quit { select { case n1 = <-v1.queue: case n2 = <-v2.queue: case <-done: quit = true } if n1 != nil && n2 != nil { if !equalNodes(n1, n2) { println("!equalNodes") break } println("equalNodes") n1 = nil n2 = nil } } }
//ParseDocs parses the Package's documentation with go/doc. // //If you do not need a particular doc.Mode call this with 0. // //If the package directory contains a file of package documentation //(and the package is not itself named documentation), it is parsed //and its doc.Package.Doc string replaces the string generated //by the package itself. // //Note that the go/doc package munges the AST so this method parses the AST //again, regardless of the value in p.AST. As a consequence, it is valid //to call this even if you have not called the Parse method or if you have //called the Parse method and told it not to parse comments. func (p *Package) ParseDocs(mode doc.Mode) error { if p.Doc != nil { return nil } pkg, _, err := p.parse(true) if err != nil { return err } p.Doc = doc.New(pkg, p.Build.ImportPath, mode) //we don't want the below running if we happen to be importing a package //whose name happens to be documentation. if p.Build.Name == "documentation" { return nil } //check ignored files for any package named documentation. //assume there is only one such file. //We ignore errors here as the ignored files may not be meant to parse. var docfile string for _, u := range p.Build.IgnoredGoFiles { path := filepath.Join(p.Build.Dir) fs := token.NewFileSet() f, err := parser.ParseFile(fs, path, nil, parser.PackageClauseOnly) if err != nil { continue } if f.Name.Name == "documentation" { docfile = u break } } //there's an ignored file of package documentation, //parse it and replace the package doc string with this doc string. if docfile != "" { fs := token.NewFileSet() f := func(fi os.FileInfo) bool { return !fi.IsDir() && fi.Name() == docfile } pkgs, err := parser.ParseDir(fs, p.Build.Dir, f, parser.ParseComments) if err != nil { return err } d := doc.New(pkgs["documentation"], p.Build.ImportPath, 0) p.Doc.Doc = d.Doc } return nil }
func main() { fileSet := token.NewFileSet() comFileSet := token.NewFileSet() // The FileSet used to parse the COM declarations. mod := newModule(comFileSet) // Iterate over all the files specified on the command line. for _, filename := range os.Args[1:] { f, err := parser.ParseFile(fileSet, filename, nil, parser.ParseComments) if err != nil { log.Fatalln("error parsing the source file", filename, ":", err) } // Iterate over all the comments in the current file, picking out the ones that // start with "com". // Join them into a long string to be parsed, starting with a package declaration. chunks := []string{"package " + f.Name.Name} for _, cg := range f.Comments { for _, c := range cg.List { text := strings.TrimSpace(strings.Trim(c.Text, "/*")) if strings.HasPrefix(text, "com") { text = text[len("com"):] if text == "" || !strings.Contains(" \t\n", text[:1]) { continue } text = strings.TrimSpace(text) chunks = append(chunks, text) } } } comDecls := strings.Join(chunks, "\n") // Now parse the concatenated result as a Go source file. comAST, err := parser.ParseFile(comFileSet, filename, comDecls, parser.ParseComments) if err != nil { log.Fatalln("error parsing the COM declarations from", filename, ":", err) } err = mod.loadFile(comAST) if err != nil { log.Fatalln("error loading declarations from", filename, "into module:", err) } } err := mod.write(os.Stdout) if err != nil { log.Fatalln("error generating output:", err) } }
// rewriteGoFile rewrites import statments in the named file // according to the rules for func qualify. func rewriteGoFile(name, qual string, paths []string) error { debugln("rewriteGoFile", name, ",", qual, ",", paths) printerConfig := &printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8} fset := token.NewFileSet() f, err := parser.ParseFile(fset, name, nil, parser.ParseComments) if err != nil { return err } var changed bool for _, s := range f.Imports { name, err := strconv.Unquote(s.Path.Value) if err != nil { return err // can't happen } q := qualify(unqualify(name), qual, paths) if q != name { s.Path.Value = strconv.Quote(q) changed = true } } if !changed { return nil } var buffer bytes.Buffer if err = printerConfig.Fprint(&buffer, fset, f); err != nil { return err } fset = token.NewFileSet() f, err = parser.ParseFile(fset, name, &buffer, parser.ParseComments) ast.SortImports(fset, f) tpath := name + ".temp" t, err := os.Create(tpath) if err != nil { return err } if err = printerConfig.Fprint(t, fset, f); err != nil { return err } if err = t.Close(); err != nil { return err } // This is required before the rename on windows. if err = os.Remove(name); err != nil { return err } return os.Rename(tpath, name) }
func TestMultiFileInitOrder(t *testing.T) { fset := token.NewFileSet() mustParse := func(src string) *ast.File { f, err := parser.ParseFile(fset, "main", src, 0) if err != nil { t.Fatal(err) } return f } fileA := mustParse(`package main; var a = 1`) fileB := mustParse(`package main; var b = 2`) // The initialization order must not depend on the parse // order of the files, only on the presentation order to // the type-checker. for _, test := range []struct { files []*ast.File want string }{ {[]*ast.File{fileA, fileB}, "[a = 1 b = 2]"}, {[]*ast.File{fileB, fileA}, "[b = 2 a = 1]"}, } { var info Info if _, err := new(Config).Check("main", fset, test.files, &info); err != nil { t.Fatal(err) } if got := fmt.Sprint(info.InitOrder); got != test.want { t.Fatalf("got %s; want %s", got, test.want) } } }
func parserPkg(pkgRealpath, pkgpath string) error { if !compareFile(pkgRealpath) { Info(pkgRealpath + " don't has updated") return nil } fileSet := token.NewFileSet() astPkgs, err := parser.ParseDir(fileSet, pkgRealpath, func(info os.FileInfo) bool { name := info.Name() return !info.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go") }, parser.ParseComments) if err != nil { return err } for _, pkg := range astPkgs { for _, fl := range pkg.Files { for _, d := range fl.Decls { switch specDecl := d.(type) { case *ast.FuncDecl: parserComments(specDecl.Doc, specDecl.Name.String(), fmt.Sprint(specDecl.Recv.List[0].Type.(*ast.StarExpr).X), pkgpath) } } } } genRouterCode() savetoFile(pkgRealpath) return nil }
func main() { fset := token.NewFileSet() // Parse the input string, []byte, or io.Reader, // recording position information in fset. // ParseFile returns an *ast.File, a syntax tree. f, err := parser.ParseFile(fset, "hello.go", hello, 0) if err != nil { log.Fatal(err) // parse error } // A Config controls various options of the type checker. // The defaults work fine except for one setting: // we must specify how to deal with imports. conf := types.Config{Importer: importer.Default()} // Type-check the package containing only file f. // Check returns a *types.Package. pkg, err := conf.Check("cmd/hello", fset, []*ast.File{f}, nil) if err != nil { log.Fatal(err) // type error } fmt.Printf("Package %q\n", pkg.Path()) fmt.Printf("Name: %s\n", pkg.Name()) fmt.Printf("Imports: %s\n", pkg.Imports()) fmt.Printf("Scope: %s\n", pkg.Scope()) }
// LintFiles lints a set of files of a single package. // The argument is a map of filename to source. func (l *Linter) LintFiles(files map[string][]byte) ([]Problem, error) { if len(files) == 0 { return nil, nil } pkg := &pkg{ fset: token.NewFileSet(), files: make(map[string]*file), } var pkgName string for filename, src := range files { f, err := parser.ParseFile(pkg.fset, filename, src, parser.ParseComments) if err != nil { return nil, err } if pkgName == "" { pkgName = f.Name.Name } else if f.Name.Name != pkgName { return nil, fmt.Errorf("%s is in package %s, not %s", filename, f.Name.Name, pkgName) } pkg.files[filename] = &file{ pkg: pkg, f: f, fset: pkg.fset, src: src, filename: filename, } } return pkg.lint(), nil }
// Parse the arguments string Template(A, B, C) func parseTemplateAndArgs(s string) (name string, args []string) { expr, err := parser.ParseExpr(s) if err != nil { fatalf("Failed to parse %q: %v", s, err) } debugf("expr = %#v\n", expr) callExpr, ok := expr.(*ast.CallExpr) if !ok { fatalf("Failed to parse %q: expecting Identifier(...)", s) } debugf("fun = %#v", callExpr.Fun) fn, ok := callExpr.Fun.(*ast.Ident) if !ok { fatalf("Failed to parse %q: expecting Identifier(...)", s) } name = fn.Name for i, arg := range callExpr.Args { var buf bytes.Buffer debugf("arg[%d] = %#v", i, arg) format.Node(&buf, token.NewFileSet(), arg) s := buf.String() debugf("parsed = %q", s) args = append(args, s) } return }
func GetDeps(source string) (pkg, target string, deps, funcs, cflags, ldflags []string, err error) { isTest := strings.HasSuffix(source, "_test.go") && Test var file *ast.File flag := parser.ParseComments if !isTest { flag = flag | parser.ImportsOnly } file, err = parser.ParseFile(token.NewFileSet(), source, nil, flag) if err != nil { return } w := &Walker{ Name: "", Target: "", pkgPos: 0, Deps: []string{}, Funcs: []string{}, CGoLDFlags: []string{}, CGoCFlags: []string{}, ScanFuncs: isTest, } ast.Walk(w, file) deps = w.Deps pkg = w.Name target = w.Target funcs = w.Funcs cflags = RemoveDups(w.CGoCFlags) ldflags = RemoveDups(w.CGoLDFlags) return }
// Imports returns list of packages imported by // all sources found in dir. func Imports(dir string) ([]string, error) { fset := token.NewFileSet() // Find all packages in current dir pkgs, err := parser.ParseDir(fset, dir, nil, 0) if err != nil { return nil, err } // Iterate over each package, each file // and add imported packages to map imports := make(map[string]struct{}) for _, pkg := range pkgs { for _, file := range pkg.Files { for _, impt := range file.Imports { path := strings.Trim(impt.Path.Value, `"`) imports[path] = struct{}{} } } } // Convert map to slice and sort var ret []string for name := range imports { ret = append(ret, name) } sort.Strings(ret) return ret, nil }
func TestIssue8518(t *testing.T) { fset := token.NewFileSet() conf := Config{ Packages: make(map[string]*Package), Error: func(err error) { t.Log(err) }, // don't exit after first error Import: func(imports map[string]*Package, path string) (*Package, error) { return imports[path], nil }, } makePkg := func(path, src string) { f, err := parser.ParseFile(fset, path, src, 0) if err != nil { t.Fatal(err) } pkg, _ := conf.Check(path, fset, []*ast.File{f}, nil) // errors logged via conf.Error conf.Packages[path] = pkg } const libSrc = ` package a import "missing" const C1 = foo const C2 = missing.C ` const mainSrc = ` package main import "a" var _ = a.C1 var _ = a.C2 ` makePkg("a", libSrc) makePkg("main", mainSrc) // don't crash when type-checking this package }
func main() { flag.Parse() if !*optNoShared { writeSharedDir() } fset := token.NewFileSet() pkgs, err := parser.ParseFiles(fset, flag.Args(), parser.ParseComments) if err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } pkg := getOnlyPkg(pkgs) if pkg != nil { if *optNiceName == "" { *optNiceName = pkg.Name } ast.PackageExports(pkg) docs := doce.NewPackage(pkg, fset) writePackage(docs, *optNiceName) } }
func injectImport(src string) string { const inj = ` import __yyfmt__ "fmt" ` fset := token.NewFileSet() file := fset.AddFile("", -1, len(src)) var s scanner.Scanner s.Init( file, []byte(src), nil, scanner.ScanComments, ) for { switch _, tok, _ := s.Scan(); tok { case token.EOF: return inj + src case token.PACKAGE: s.Scan() // ident pos, _, _ := s.Scan() ofs := file.Offset(pos) return src[:ofs] + inj + src[ofs:] } } }
func pkgForPath(path string) (*types.Package, error) { // collect filenames ctxt := build.Default pkginfo, err := ctxt.Import(path, "", 0) if err != nil { return nil, err } filenames := append(pkginfo.GoFiles, pkginfo.CgoFiles...) // parse files fset := token.NewFileSet() files := make([]*ast.File, len(filenames)) for i, filename := range filenames { var err error files[i], err = parser.ParseFile(fset, filepath.Join(pkginfo.Dir, filename), nil, 0) if err != nil { return nil, err } } // typecheck files // (we only care about exports and thus can ignore function bodies) conf := types.Config{IgnoreFuncBodies: true, FakeImportC: true} return conf.Check(path, fset, files, nil) }
func (m Mocks) Output(pkg string, chanSize int, dest io.Writer) error { f := &ast.File{ Name: &ast.Ident{Name: pkg}, Decls: m.decls(chanSize), } return format.Node(dest, token.NewFileSet(), f) }
func runbench(t *testing.T, path string, ignoreFuncBodies bool) { fset := token.NewFileSet() files, err := pkgFiles(fset, path) if err != nil { t.Fatal(err) } b := testing.Benchmark(func(b *testing.B) { for i := 0; i < b.N; i++ { conf := Config{IgnoreFuncBodies: ignoreFuncBodies} conf.Check(path, fset, files, nil) } }) // determine line count lines := 0 fset.Iterate(func(f *token.File) bool { lines += f.LineCount() return true }) d := time.Duration(b.NsPerOp()) fmt.Printf( "%s: %s for %d lines (%d lines/s), ignoreFuncBodies = %v\n", filepath.Base(path), d, lines, int64(float64(lines)/d.Seconds()), ignoreFuncBodies, ) }
func TestEvalContext(t *testing.T) { skipSpecialPlatforms(t) src := ` package p import "fmt" import m "math" const c = 3.0 type T []int func f(a int, s string) float64 { fmt.Println("calling f") _ = m.Pi // use package math const d int = c + 1 var x int x = a + len(s) return float64(x) } ` fset := token.NewFileSet() file, err := parser.ParseFile(fset, "p", src, 0) if err != nil { t.Fatal(err) } pkg, err := Check("p", fset, []*ast.File{file}) if err != nil { t.Fatal(err) } pkgScope := pkg.Scope() if n := pkgScope.NumChildren(); n != 1 { t.Fatalf("got %d file scopes, want 1", n) } fileScope := pkgScope.Child(0) if n := fileScope.NumChildren(); n != 1 { t.Fatalf("got %d functions scopes, want 1", n) } funcScope := fileScope.Child(0) var tests = []string{ `true => true, untyped bool`, `fmt.Println => , func(a ...interface{}) (n int, err error)`, `c => 3, untyped float`, `T => , p.T`, `a => , int`, `s => , string`, `d => 4, int`, `x => , int`, `d/c => 1, int`, `c/2 => 3/2, untyped float`, `m.Pi < m.E => false, untyped bool`, } for _, test := range tests { str, typ := split(test, ", ") str, val := split(str, "=>") testEval(t, pkg, funcScope, str, nil, typ, val) } }
// parsePackage analyzes the single package constructed from the named files. // If text is non-nil, it is a string to be used instead of the content of the file, // to be used for testing. parsePackage exits if there is an error. func (g *Generator) parsePackage(directory string, names []string, text interface{}) { var files []*File var astFiles []*ast.File g.pkg = new(Package) fs := token.NewFileSet() for _, name := range names { if !strings.HasSuffix(name, ".go") { continue } parsedFile, err := parser.ParseFile(fs, name, text, 0) if err != nil { log.Fatalf("parsing package: %s: %s", name, err) } astFiles = append(astFiles, parsedFile) files = append(files, &File{ file: parsedFile, pkg: g.pkg, }) } if len(astFiles) == 0 { log.Fatalf("%s: no buildable Go files", directory) } g.pkg.name = astFiles[0].Name.Name g.pkg.files = files g.pkg.dir = directory // Type check the package. g.pkg.check(fs, astFiles) }
func loadExportsGoPath(dir string) map[string]bool { exports := make(map[string]bool) buildPkg, err := build.ImportDir(dir, 0) if err != nil { if strings.Contains(err.Error(), "no buildable Go source files in") { return nil } fmt.Fprintf(os.Stderr, "could not import %q: %v", dir, err) return nil } fset := token.NewFileSet() for _, file := range buildPkg.GoFiles { f, err := parser.ParseFile(fset, filepath.Join(dir, file), nil, 0) if err != nil { fmt.Fprintf(os.Stderr, "could not parse %q: %v", file, err) continue } for name := range f.Scope.Objects { if ast.IsExported(name) { exports[name] = true } } } return exports }