// format parses src, prints the corresponding AST, verifies the resulting // src is syntactically correct, and returns the resulting src or an error // if any. func format(src []byte, mode checkMode) ([]byte, error) { // parse src f, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err != nil { return nil, fmt.Errorf("parse: %s\n%s", err, src) } // filter exports if necessary if mode&export != 0 { ast.FileExports(f) // ignore result f.Comments = nil // don't print comments that are not in AST } // determine printer configuration cfg := Config{Tabwidth: tabwidth} if mode&rawFormat != 0 { cfg.Mode |= RawFormat } // print AST var buf bytes.Buffer if err := cfg.Fprint(&buf, fset, f); err != nil { return nil, fmt.Errorf("print: %s", err) } // make sure formated output is syntactically correct res := buf.Bytes() if _, err := parser.ParseFile(fset, "", res, 0); err != nil { return nil, fmt.Errorf("re-parse: %s\n%s", err, buf.Bytes()) } return res, nil }
func parse(fset *token.FileSet, src []byte) (interface{}, error) { // Try as a complete source file. file, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err == nil { return file, nil } // If the source is missing a package clause, try as a source fragment; otherwise fail. if !strings.Contains(err.Error(), "expected 'package'") { return nil, err } // Try as a declaration list by prepending a package clause in front of src. // Use ';' not '\n' to keep line numbers intact. psrc := append([]byte("package p;"), src...) file, err = parser.ParseFile(fset, "", psrc, parser.ParseComments) if err == nil { return file.Decls, nil } // If the source is missing a declaration, try as a statement list; otherwise fail. if !strings.Contains(err.Error(), "expected declaration") { return nil, err } // Try as statement list by wrapping a function around src. fsrc := append(append([]byte("package p; func _() {"), src...), '}') file, err = parser.ParseFile(fset, "", fsrc, parser.ParseComments) if err == nil { return file.Decls[0].(*ast.FuncDecl).Body.List, nil } // Failed, and out of options. return nil, err }
// TestLineComments, using a simple test case, checks that consequtive line // comments are properly terminated with a newline even if the AST position // information is incorrect. // func TestLineComments(t *testing.T) { const src = `// comment 1 // comment 2 // comment 3 package main ` fset := token.NewFileSet() f, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err != nil { panic(err) // error in test } var buf bytes.Buffer fset = token.NewFileSet() // use the wrong file set Fprint(&buf, fset, f) nlines := 0 for _, ch := range buf.Bytes() { if ch == '\n' { nlines++ } } const expected = 3 if nlines < expected { t.Errorf("got %d, expected %d\n", nlines, expected) t.Errorf("result:\n%s", buf.Bytes()) } }
func TestFilterDuplicates(t *testing.T) { // parse input fset := token.NewFileSet() file, err := parser.ParseFile(fset, "", input, 0) if err != nil { t.Fatal(err) } // create package files := map[string]*ast.File{"": file} pkg, err := ast.NewPackage(fset, files, nil, nil) if err != nil { t.Fatal(err) } // filter merged := ast.MergePackageFiles(pkg, ast.FilterFuncDuplicates) // pretty-print var buf bytes.Buffer if err := format.Node(&buf, fset, merged); err != nil { t.Fatal(err) } output := buf.String() if output != golden { t.Errorf("incorrect output:\n%s", output) } }
func TestExamples(t *testing.T) { fs := token.NewFileSet() file, err := parser.ParseFile(fs, "test.go", strings.NewReader(exampleTestFile), parser.ParseComments) if err != nil { t.Fatal(err) } for i, e := range doc.Examples(file) { c := exampleTestCases[i] if e.Name != c.Name { t.Errorf("got Name == %q, want %q", e.Name, c.Name) } if w := c.Play; w != "" { var g string // hah if e.Play == nil { g = "<nil>" } else { b := new(bytes.Buffer) if err := format.Node(b, fs, e.Play); err != nil { t.Fatal(err) } g = b.String() } if g != w { t.Errorf("%s: got Play == %q, want %q", c.Name, g, w) } } if g, w := e.Output, c.Output; g != w { t.Errorf("%s: got Output == %q, want %q", c.Name, g, w) } } }
// Verify that the printer produces a correct program // even if the position information of comments introducing newlines // is incorrect. func TestBadComments(t *testing.T) { const src = ` // first comment - text and position changed by test package p import "fmt" const pi = 3.14 // rough circle var ( x, y, z int = 1, 2, 3 u, v float64 ) func fibo(n int) { if n < 2 { return n /* seed values */ } return fibo(n-1) + fibo(n-2) } ` f, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err != nil { t.Error(err) // error in test } comment := f.Comments[0].List[0] pos := comment.Pos() if fset.Position(pos).Offset != 1 { t.Error("expected offset 1") // error in test } testComment(t, f, len(src), &ast.Comment{Slash: pos, Text: "//-style comment"}) testComment(t, f, len(src), &ast.Comment{Slash: pos, Text: "/*-style comment */"}) testComment(t, f, len(src), &ast.Comment{Slash: pos, Text: "/*-style \n comment */"}) testComment(t, f, len(src), &ast.Comment{Slash: pos, Text: "/*-style comment \n\n\n */"}) }
func parseFunc(filename, functionname string) (fun *ast.FuncDecl, fset *token.FileSet) { fset = token.NewFileSet() if file, err := parser.ParseFile(fset, filename, nil, 0); err == nil { for _, d := range file.Decls { if f, ok := d.(*ast.FuncDecl); ok && f.Name.Name == functionname { fun = f return } } } panic("function not found") }
// Verify that the printer doesn't crash if the AST contains BadXXX nodes. func TestBadNodes(t *testing.T) { const src = "package p\n(" const res = "package p\nBadDecl\n" f, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err == nil { t.Error("expected illegal program") // error in test } var buf bytes.Buffer Fprint(&buf, fset, f) if buf.String() != res { t.Errorf("got %q, expected %q", buf.String(), res) } }
// testComment verifies that f can be parsed again after printing it // with its first comment set to comment at any possible source offset. func testComment(t *testing.T, f *ast.File, srclen int, comment *ast.Comment) { f.Comments[0].List[0] = comment var buf bytes.Buffer for offs := 0; offs <= srclen; offs++ { buf.Reset() // Printing f should result in a correct program no // matter what the (incorrect) comment position is. if err := Fprint(&buf, fset, f); err != nil { t.Error(err) } if _, err := parser.ParseFile(fset, "", buf.Bytes(), 0); err != nil { t.Fatalf("incorrect program for pos = %d:\n%s", comment.Slash, buf.String()) } // Position information is just an offset. // Move comment one byte down in the source. comment.Slash++ } }
func TestStmtLists(t *testing.T) { for _, src := range stmts { file, err := parser.ParseFile(fset, "", "package p; func _() {"+src+"}", parser.ParseComments) if err != nil { panic(err) // error in test } var buf bytes.Buffer err = Fprint(&buf, fset, file.Decls[0].(*ast.FuncDecl).Body.List) // only print statements if err != nil { panic(err) // error in test } out := buf.String() if out != src { t.Errorf("\ngot : %q\nwant: %q\n", out, src) } } }
func TestDeclLists(t *testing.T) { for _, src := range decls { file, err := parser.ParseFile(fset, "", "package p;"+src, parser.ParseComments) if err != nil { panic(err) // error in test } var buf bytes.Buffer err = Fprint(&buf, fset, file.Decls) // only print declarations if err != nil { panic(err) // error in test } out := buf.String() if out != src { t.Errorf("\ngot : %q\nwant: %q\n", out, src) } } }
func TestNode(t *testing.T) { src, err := ioutil.ReadFile(testfile) if err != nil { t.Fatal(err) } fset := token.NewFileSet() file, err := parser.ParseFile(fset, testfile, src, parser.ParseComments) if err != nil { t.Fatal(err) } var buf bytes.Buffer if err = Node(&buf, fset, file); err != nil { t.Fatal("Node failed:", err) } diff(t, buf.Bytes(), src) }
// cannot initialize in init because (printer) Fprint launches goroutines. func initialize() { const filename = "testdata/parser.go" src, err := ioutil.ReadFile(filename) if err != nil { log.Fatalf("%s", err) } file, err := parser.ParseFile(fset, filename, src, parser.ParseComments) if err != nil { log.Fatalf("%s", err) } var buf bytes.Buffer testprint(&buf, file) if !bytes.Equal(buf.Bytes(), src) { log.Fatalf("print error: %s not idempotent", filename) } testfile = file }
// This example demonstrates how to inspect the AST of a Go program. func ExampleInspect() { // src is the input for which we want to inspect the AST. src := ` package p const c = 1.0 var X = f(3.14)*2 + c ` // Create the AST by parsing src. fset := token.NewFileSet() // positions are relative to fset f, err := parser.ParseFile(fset, "src.go", src, 0) if err != nil { panic(err) } // Inspect the AST and print all identifiers and literals. ast.Inspect(f, func(n ast.Node) bool { var s string switch x := n.(type) { case *ast.BasicLit: s = x.Value case *ast.Ident: s = x.Name } if s != "" { fmt.Printf("%s:\t%s\n", fset.Position(n.Pos()), s) } return true }) // output: // src.go:2:9: p // src.go:3:7: c // src.go:3:11: 1.0 // src.go:4:5: X // src.go:4:9: f // src.go:4:11: 3.14 // src.go:4:17: 2 // src.go:4:21: c }
func ExampleParseFile() { fset := token.NewFileSet() // positions are relative to fset // Parse the file containing this very example // but stop after processing the imports. f, err := parser.ParseFile(fset, "example_test.go", nil, parser.ImportsOnly) if err != nil { fmt.Println(err) return } // Print the imports from the file's AST. for _, s := range f.Imports { fmt.Println(s.Path.Value) } // output: // // "fmt" // "github.com/hokkaido/gopp/parser" // "github.com/hokkaido/gopp/token" }
func TestBaseIndent(t *testing.T) { // The testfile must not contain multi-line raw strings since those // are not indented (because their values must not change) and make // this test fail. const filename = "printer.go" src, err := ioutil.ReadFile(filename) if err != nil { panic(err) // error in test } file, err := parser.ParseFile(fset, filename, src, 0) if err != nil { panic(err) // error in test } var buf bytes.Buffer for indent := 0; indent < 4; indent++ { buf.Reset() (&Config{Tabwidth: tabwidth, Indent: indent}).Fprint(&buf, fset, file) // all code must be indented by at least 'indent' tabs lines := bytes.Split(buf.Bytes(), []byte{'\n'}) for i, line := range lines { if len(line) == 0 { continue // empty lines don't have indentation } n := 0 for j, b := range line { if b != '\t' { // end of indentation n = j break } } if n < indent { t.Errorf("line %d: got only %d tabs; want at least %d: %q", i, n, indent, line) } } } }
// Node formats node in canonical gofmt style and writes the result to dst. // // The node type must be *ast.File, *printer.CommentedNode, []ast.Decl, // []ast.Stmt, or assignment-compatible to ast.Expr, ast.Decl, ast.Spec, // or ast.Stmt. Node does not modify node. Imports are not sorted for // nodes representing partial source files (i.e., if the node is not an // *ast.File or a *printer.CommentedNode not wrapping an *ast.File). // // The function may return early (before the entire result is written) // and return a formatting error, for instance due to an incorrect AST. // func Node(dst io.Writer, fset *token.FileSet, node interface{}) error { // Determine if we have a complete source file (file != nil). var file *ast.File var cnode *printer.CommentedNode switch n := node.(type) { case *ast.File: file = n case *printer.CommentedNode: if f, ok := n.Node.(*ast.File); ok { file = f cnode = n } } // Sort imports if necessary. if file != nil && hasUnsortedImports(file) { // Make a copy of the AST because ast.SortImports is destructive. // TODO(gri) Do this more efficiently. var buf bytes.Buffer err := config.Fprint(&buf, fset, file) if err != nil { return err } file, err = parser.ParseFile(fset, "", buf.Bytes(), parser.ParseComments) if err != nil { // We should never get here. If we do, provide good diagnostic. return fmt.Errorf("format.Node internal error (%s)", err) } ast.SortImports(fset, file) // Use new file with sorted imports. node = file if cnode != nil { node = &printer.CommentedNode{Node: file, Comments: cnode.Comments} } } return config.Fprint(dst, fset, node) }
func TestCommentMap(t *testing.T) { fset := token.NewFileSet() f, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err != nil { t.Fatal(err) } cmap := NewCommentMap(fset, f, f.Comments) // very correct association of comments for n, list := range cmap { key := fmt.Sprintf("%2d: %T", fset.Position(n.Pos()).Line, n) got := ctext(list) want := res[key] if got != want { t.Errorf("%s: got %q; want %q", key, got, want) } } // verify that no comments got lost if n := len(cmap.Comments()); n != len(f.Comments) { t.Errorf("got %d comment groups in map; want %d", n, len(f.Comments)) } // support code to update test: // set genMap to true to generate res map const genMap = false if genMap { out := make([]string, 0, len(cmap)) for n, list := range cmap { out = append(out, fmt.Sprintf("\t\"%2d: %T\":\t%q,", fset.Position(n.Pos()).Line, n, ctext(list))) } sort.Strings(out) for _, s := range out { fmt.Println(s) } } }
// This example shows what an AST looks like when printed for debugging. func ExamplePrint() { // src is the input for which we want to print the AST. src := ` package main func main() { println("Hello, World!") } ` // Create the AST by parsing src. fset := token.NewFileSet() // positions are relative to fset f, err := parser.ParseFile(fset, "", src, 0) if err != nil { panic(err) } // Print the AST. ast.Print(fset, f) // output: // 0 *ast.File { // 1 . Package: 2:1 // 2 . Name: *ast.Ident { // 3 . . NamePos: 2:9 // 4 . . Name: "main" // 5 . } // 6 . Decls: []ast.Decl (len = 1) { // 7 . . 0: *ast.FuncDecl { // 8 . . . Name: *ast.Ident { // 9 . . . . NamePos: 3:6 // 10 . . . . Name: "main" // 11 . . . . Obj: *ast.Object { // 12 . . . . . Kind: func // 13 . . . . . Name: "main" // 14 . . . . . Decl: *(obj @ 7) // 15 . . . . } // 16 . . . } // 17 . . . Type: *ast.FuncType { // 18 . . . . Func: 3:1 // 19 . . . . Params: *ast.FieldList { // 20 . . . . . Opening: 3:10 // 21 . . . . . Closing: 3:11 // 22 . . . . } // 23 . . . } // 24 . . . Body: *ast.BlockStmt { // 25 . . . . Lbrace: 3:13 // 26 . . . . List: []ast.Stmt (len = 1) { // 27 . . . . . 0: *ast.ExprStmt { // 28 . . . . . . X: *ast.CallExpr { // 29 . . . . . . . Fun: *ast.Ident { // 30 . . . . . . . . NamePos: 4:2 // 31 . . . . . . . . Name: "println" // 32 . . . . . . . } // 33 . . . . . . . Lparen: 4:9 // 34 . . . . . . . Args: []ast.Expr (len = 1) { // 35 . . . . . . . . 0: *ast.BasicLit { // 36 . . . . . . . . . ValuePos: 4:10 // 37 . . . . . . . . . Kind: STRING // 38 . . . . . . . . . Value: "\"Hello, World!\"" // 39 . . . . . . . . } // 40 . . . . . . . } // 41 . . . . . . . Ellipsis: - // 42 . . . . . . . Rparen: 4:25 // 43 . . . . . . } // 44 . . . . . } // 45 . . . . } // 46 . . . . Rbrace: 5:1 // 47 . . . } // 48 . . } // 49 . } // 50 . Scope: *ast.Scope { // 51 . . Objects: map[string]*ast.Object (len = 1) { // 52 . . . "main": *(obj @ 11) // 53 . . } // 54 . } // 55 . Unresolved: []*ast.Ident (len = 1) { // 56 . . 0: *(obj @ 29) // 57 . } // 58 } }
// Verify that the SourcePos mode emits correct //line comments // by testing that position information for matching identifiers // is maintained. func TestSourcePos(t *testing.T) { const src = ` package p import ( "github.com/hokkaido/gopp/printer"; "math" ) const pi = 3.14; var x = 0 type t struct{ x, y, z int; u, v, w float32 } func (t *t) foo(a, b, c int) int { return a*t.x + b*t.y + // two extra lines here // ... c*t.z } ` // parse original f1, err := parser.ParseFile(fset, "src", src, parser.ParseComments) if err != nil { t.Fatal(err) } // pretty-print original var buf bytes.Buffer err = (&Config{Mode: UseSpaces | SourcePos, Tabwidth: 8}).Fprint(&buf, fset, f1) if err != nil { t.Fatal(err) } // parse pretty printed original // (//line comments must be interpreted even w/o parser.ParseComments set) f2, err := parser.ParseFile(fset, "", buf.Bytes(), 0) if err != nil { t.Fatalf("%s\n%s", err, buf.Bytes()) } // At this point the position information of identifiers in f2 should // match the position information of corresponding identifiers in f1. // number of identifiers must be > 0 (test should run) and must match n1 := identCount(f1) n2 := identCount(f2) if n1 == 0 { t.Fatal("got no idents") } if n2 != n1 { t.Errorf("got %d idents; want %d", n2, n1) } // verify that all identifiers have correct line information i2range := idents(f2) for i1 := range idents(f1) { i2 := <-i2range if i2.Name != i1.Name { t.Errorf("got ident %s; want %s", i2.Name, i1.Name) } l1 := fset.Position(i1.Pos()).Line l2 := fset.Position(i2.Pos()).Line if l2 != l1 { t.Errorf("got line %d; want %d for %s", l2, l1, i1.Name) } } if t.Failed() { t.Logf("\n%s", buf.Bytes()) } }
// Import returns details about the Go package named by the import path, // interpreting local import paths relative to the srcDir directory. // If the path is a local import path naming a package that can be imported // using a standard import path, the returned package will set p.ImportPath // to that path. // // In the directory containing the package, .go, .c, .h, and .s files are // considered part of the package except for: // // - .go files in package documentation // - files starting with _ or . (likely editor temporary files) // - files with build constraints not satisfied by the context // // If an error occurs, Import returns a non-nil error and a non-nil // *Package containing partial information. // func (ctxt *Context) Import(path string, srcDir string, mode ImportMode) (*Package, error) { p := &Package{ ImportPath: path, } if path == "" { return p, fmt.Errorf("import %q: invalid import path", path) } var pkga string var pkgerr error switch ctxt.Compiler { case "gccgo": dir, elem := pathpkg.Split(p.ImportPath) pkga = "pkg/gccgo/" + dir + "lib" + elem + ".a" case "gc": suffix := "" if ctxt.InstallSuffix != "" { suffix = "_" + ctxt.InstallSuffix } pkga = "pkg/" + ctxt.GOOS + "_" + ctxt.GOARCH + suffix + "/" + p.ImportPath + ".a" default: // Save error for end of function. pkgerr = fmt.Errorf("import %q: unknown compiler %q", path, ctxt.Compiler) } binaryOnly := false if IsLocalImport(path) { pkga = "" // local imports have no installed path if srcDir == "" { return p, fmt.Errorf("import %q: import relative to unknown directory", path) } if !ctxt.isAbsPath(path) { p.Dir = ctxt.joinPath(srcDir, path) } // Determine canonical import path, if any. if ctxt.GOROOT != "" { root := ctxt.joinPath(ctxt.GOROOT, "src", "pkg") if sub, ok := ctxt.hasSubdir(root, p.Dir); ok { p.Goroot = true p.ImportPath = sub p.Root = ctxt.GOROOT goto Found } } all := ctxt.gopath() for i, root := range all { rootsrc := ctxt.joinPath(root, "src") if sub, ok := ctxt.hasSubdir(rootsrc, p.Dir); ok { // We found a potential import path for dir, // but check that using it wouldn't find something // else first. if ctxt.GOROOT != "" { if dir := ctxt.joinPath(ctxt.GOROOT, "src", "pkg", sub); ctxt.isDir(dir) { goto Found } } for _, earlyRoot := range all[:i] { if dir := ctxt.joinPath(earlyRoot, "src", sub); ctxt.isDir(dir) { goto Found } } // sub would not name some other directory instead of this one. // Record it. p.ImportPath = sub p.Root = root goto Found } } // It's okay that we didn't find a root containing dir. // Keep going with the information we have. } else { if strings.HasPrefix(path, "/") { return p, fmt.Errorf("import %q: cannot import absolute path", path) } // tried records the location of unsuccessful package lookups var tried struct { goroot string gopath []string } // Determine directory from import path. if ctxt.GOROOT != "" { dir := ctxt.joinPath(ctxt.GOROOT, "src", "pkg", path) isDir := ctxt.isDir(dir) binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(ctxt.GOROOT, pkga)) if isDir || binaryOnly { p.Dir = dir p.Goroot = true p.Root = ctxt.GOROOT goto Found } tried.goroot = dir } for _, root := range ctxt.gopath() { dir := ctxt.joinPath(root, "src", path) isDir := ctxt.isDir(dir) binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(root, pkga)) if isDir || binaryOnly { p.Dir = dir p.Root = root goto Found } tried.gopath = append(tried.gopath, dir) } // package was not found var paths []string if tried.goroot != "" { paths = append(paths, fmt.Sprintf("\t%s (from $GOROOT)", tried.goroot)) } else { paths = append(paths, "\t($GOROOT not set)") } var i int var format = "\t%s (from $GOPATH)" for ; i < len(tried.gopath); i++ { if i > 0 { format = "\t%s" } paths = append(paths, fmt.Sprintf(format, tried.gopath[i])) } if i == 0 { paths = append(paths, "\t($GOPATH not set)") } return p, fmt.Errorf("cannot find package %q in any of:\n%s", path, strings.Join(paths, "\n")) } Found: if p.Root != "" { if p.Goroot { p.SrcRoot = ctxt.joinPath(p.Root, "src", "pkg") } else { p.SrcRoot = ctxt.joinPath(p.Root, "src") } p.PkgRoot = ctxt.joinPath(p.Root, "pkg") p.BinDir = ctxt.joinPath(p.Root, "bin") if pkga != "" { p.PkgObj = ctxt.joinPath(p.Root, pkga) } } if mode&FindOnly != 0 { return p, pkgerr } if binaryOnly && (mode&AllowBinary) != 0 { return p, pkgerr } dirs, err := ctxt.readDir(p.Dir) if err != nil { return p, err } var Sfiles []string // files with ".S" (capital S) var firstFile string imported := make(map[string][]token.Position) testImported := make(map[string][]token.Position) xTestImported := make(map[string][]token.Position) fset := token.NewFileSet() for _, d := range dirs { if d.IsDir() { continue } name := d.Name() if strings.HasPrefix(name, "_") || strings.HasPrefix(name, ".") { continue } i := strings.LastIndex(name, ".") if i < 0 { i = len(name) } ext := name[i:] if !ctxt.UseAllFiles && !ctxt.goodOSArchFile(name) { if ext == ".go" { p.IgnoredGoFiles = append(p.IgnoredGoFiles, name) } continue } switch ext { case ".go", ".c", ".s", ".h", ".S", ".swig", ".swigcxx": // tentatively okay - read to make sure case ".syso": // binary objects to add to package archive // Likely of the form foo_windows.syso, but // the name was vetted above with goodOSArchFile. p.SysoFiles = append(p.SysoFiles, name) continue default: // skip continue } filename := ctxt.joinPath(p.Dir, name) f, err := ctxt.openFile(filename) if err != nil { return p, err } var data []byte if strings.HasSuffix(filename, ".go") { data, err = readImports(f, false) } else { data, err = readComments(f) } f.Close() if err != nil { return p, fmt.Errorf("read %s: %v", filename, err) } // Look for +build comments to accept or reject the file. if !ctxt.UseAllFiles && !ctxt.shouldBuild(data) { if ext == ".go" { p.IgnoredGoFiles = append(p.IgnoredGoFiles, name) } continue } // Going to save the file. For non-Go files, can stop here. switch ext { case ".c": p.CFiles = append(p.CFiles, name) continue case ".h": p.HFiles = append(p.HFiles, name) continue case ".s": p.SFiles = append(p.SFiles, name) continue case ".S": Sfiles = append(Sfiles, name) continue case ".swig": p.SwigFiles = append(p.SwigFiles, name) continue case ".swigcxx": p.SwigCXXFiles = append(p.SwigCXXFiles, name) continue } pf, err := parser.ParseFile(fset, filename, data, parser.ImportsOnly|parser.ParseComments) if err != nil { return p, err } pkg := pf.Name.Name if pkg == "documentation" { p.IgnoredGoFiles = append(p.IgnoredGoFiles, name) continue } isTest := strings.HasSuffix(name, "_test.go") isXTest := false if isTest && strings.HasSuffix(pkg, "_test") { isXTest = true pkg = pkg[:len(pkg)-len("_test")] } if p.Name == "" { p.Name = pkg firstFile = name } else if pkg != p.Name { return p, fmt.Errorf("found packages %s (%s) and %s (%s) in %s", p.Name, firstFile, pkg, name, p.Dir) } if pf.Doc != nil && p.Doc == "" { p.Doc = doc.Synopsis(pf.Doc.Text()) } // Record imports and information about cgo. isCgo := false for _, decl := range pf.Decls { d, ok := decl.(*ast.GenDecl) if !ok { continue } for _, dspec := range d.Specs { spec, ok := dspec.(*ast.ImportSpec) if !ok { continue } quoted := spec.Path.Value path, err := strconv.Unquote(quoted) if err != nil { log.Panicf("%s: parser returned invalid quoted string: <%s>", filename, quoted) } if isXTest { xTestImported[path] = append(xTestImported[path], fset.Position(spec.Pos())) } else if isTest { testImported[path] = append(testImported[path], fset.Position(spec.Pos())) } else { imported[path] = append(imported[path], fset.Position(spec.Pos())) } if path == "C" { if isTest { return p, fmt.Errorf("use of cgo in test %s not supported", filename) } cg := spec.Doc if cg == nil && len(d.Specs) == 1 { cg = d.Doc } if cg != nil { if err := ctxt.saveCgo(filename, p, cg); err != nil { return p, err } } isCgo = true } } } if isCgo { if ctxt.CgoEnabled { p.CgoFiles = append(p.CgoFiles, name) } } else if isXTest { p.XTestGoFiles = append(p.XTestGoFiles, name) } else if isTest { p.TestGoFiles = append(p.TestGoFiles, name) } else { p.GoFiles = append(p.GoFiles, name) } } if p.Name == "" { return p, &NoGoError{p.Dir} } p.Imports, p.ImportPos = cleanImports(imported) p.TestImports, p.TestImportPos = cleanImports(testImported) p.XTestImports, p.XTestImportPos = cleanImports(xTestImported) // add the .S files only if we are using cgo // (which means gcc will compile them). // The standard assemblers expect .s files. if len(p.CgoFiles) > 0 { p.SFiles = append(p.SFiles, Sfiles...) sort.Strings(p.SFiles) } return p, pkgerr }