// TestLineComments, using a simple test case, checks that consequtive line // comments are properly terminated with a newline even if the AST position // information is incorrect. // func TestLineComments(t *testing.T) { const src = `// comment 1 // comment 2 // comment 3 package main ` fset := token.NewFileSet() f, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err != nil { panic(err) // error in test } var buf bytes.Buffer fset = token.NewFileSet() // use the wrong file set Fprint(&buf, fset, f) nlines := 0 for _, ch := range buf.Bytes() { if ch == '\n' { nlines++ } } const expected = 3 if nlines < expected { t.Errorf("got %d, expected %d\n", nlines, expected) t.Errorf("result:\n%s", buf.Bytes()) } }
func TestFilterDuplicates(t *testing.T) { // parse input fset := token.NewFileSet() file, err := parser.ParseFile(fset, "", input, 0) if err != nil { t.Fatal(err) } // create package files := map[string]*ast.File{"": file} pkg, err := ast.NewPackage(fset, files, nil, nil) if err != nil { t.Fatal(err) } // filter merged := ast.MergePackageFiles(pkg, ast.FilterFuncDuplicates) // pretty-print var buf bytes.Buffer if err := format.Node(&buf, fset, merged); err != nil { t.Fatal(err) } output := buf.String() if output != golden { t.Errorf("incorrect output:\n%s", output) } }
func ExampleScanner_Scan() { // src is the input that we want to tokenize. src := []byte("cos(x) + 1i*sin(x) // Euler") // Initialize the scanner. var s scanner.Scanner fset := token.NewFileSet() // positions are relative to fset file := fset.AddFile("", fset.Base(), len(src)) // register input "file" s.Init(file, src, nil /* no error handler */, scanner.ScanComments) // Repeated calls to Scan yield the token sequence found in the input. for { pos, tok, lit := s.Scan() if tok == token.EOF { break } fmt.Printf("%s\t%s\t%q\n", fset.Position(pos), tok, lit) } // output: // 1:1 IDENT "cos" // 1:4 ( "" // 1:5 IDENT "x" // 1:6 ) "" // 1:8 + "" // 1:10 IMAG "1i" // 1:12 * "" // 1:13 IDENT "sin" // 1:16 ( "" // 1:17 IDENT "x" // 1:18 ) "" // 1:20 ; "\n" // 1:20 COMMENT "// Euler" }
func TestExamples(t *testing.T) { fset := token.NewFileSet() file, err := parser.ParseFile(fset, "test.go", strings.NewReader(exampleTestFile), parser.ParseComments) if err != nil { t.Fatal(err) } for i, e := range doc.Examples(file) { c := exampleTestCases[i] if e.Name != c.Name { t.Errorf("got Name == %q, want %q", e.Name, c.Name) } if w := c.Play; w != "" { var g string // hah if e.Play == nil { g = "<nil>" } else { var buf bytes.Buffer if err := format.Node(&buf, fset, e.Play); err != nil { t.Fatal(err) } g = buf.String() } if g != w { t.Errorf("%s: got Play == %q, want %q", c.Name, g, w) } } if g, w := e.Output, c.Output; g != w { t.Errorf("%s: got Output == %q, want %q", c.Name, g, w) } } }
// Read reads the index from r into x; x must not be nil. // If r does not also implement io.ByteReader, it will be wrapped in a bufio.Reader. func (x *Index) Read(r io.Reader) error { // We use the ability to read bytes as a plausible surrogate for buffering. if _, ok := r.(io.ByteReader); !ok { r = bufio.NewReader(r) } var fx fileIndex if err := fx.Read(r); err != nil { return err } x.words = fx.Words x.alts = fx.Alts x.snippets = fx.Snippets if fx.Fulltext { x.fset = token.NewFileSet() decode := func(x interface{}) error { return gob.NewDecoder(r).Decode(x) } if err := x.fset.Read(decode); err != nil { return err } x.suffixes = new(suffixarray.Index) if err := x.suffixes.Read(r); err != nil { return err } } return nil }
func BenchmarkParse(b *testing.B) { b.SetBytes(int64(len(src))) for i := 0; i < b.N; i++ { if _, err := ParseFile(token.NewFileSet(), "", src, ParseComments); err != nil { b.Fatalf("benchmark failed due to parse error: %s", err) } } }
func pkgName(filename string) string { // use a new file set each time in order to not pollute the indexer's // file set (which must stay in sync with the concatenated source code) file, err := parser.ParseFile(token.NewFileSet(), filename, nil, parser.PackageClauseOnly) if err != nil || file == nil { return "" } return file.Name.Name }
func parseFunc(filename, functionname string) (fun *ast.FuncDecl, fset *token.FileSet) { fset = token.NewFileSet() if file, err := parser.ParseFile(fset, filename, nil, 0); err == nil { for _, d := range file.Decls { if f, ok := d.(*ast.FuncDecl); ok && f.Name.Name == functionname { fun = f return } } } panic("function not found") }
func TestErrors(t *testing.T) { fsetErrs = token.NewFileSet() list, err := ioutil.ReadDir(testdata) if err != nil { t.Fatal(err) } for _, fi := range list { name := fi.Name() if !fi.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".src") { checkErrors(t, filepath.Join(testdata, name), nil) } } }
func main() { flag.Parse() fset := token.NewFileSet() nheadings := 0 err := filepath.Walk(*root, func(path string, fi os.FileInfo, err error) error { if !fi.IsDir() { return nil } pkgs, err := parser.ParseDir(fset, path, isGoFile, parser.ParseComments) if err != nil { if *verbose { fmt.Fprintln(os.Stderr, err) } return nil } for _, pkg := range pkgs { d := doc.New(pkg, path, doc.Mode(0)) list := appendHeadings(nil, d.Doc) for _, d := range d.Consts { list = appendHeadings(list, d.Doc) } for _, d := range d.Types { list = appendHeadings(list, d.Doc) } for _, d := range d.Vars { list = appendHeadings(list, d.Doc) } for _, d := range d.Funcs { list = appendHeadings(list, d.Doc) } if len(list) > 0 { // directories may contain multiple packages; // print path and package name fmt.Printf("%s (package %s)\n", path, pkg.Name) for _, h := range list { fmt.Printf("\t%s\n", h) } nheadings += len(list) } } return nil }) if err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } fmt.Println(nheadings, "headings found") }
func BenchmarkScan(b *testing.B) { b.StopTimer() fset := token.NewFileSet() file := fset.AddFile("", fset.Base(), len(source)) var s Scanner b.StartTimer() for i := 0; i < b.N; i++ { s.Init(file, source, nil, ScanComments) for { _, tok, _ := s.Scan() if tok == token.EOF { break } } } }
// tokenSelection returns, as a selection, the sequence of // consecutive occurrences of token sel in the Go src text. // func tokenSelection(src []byte, sel token.Token) Selection { var s scanner.Scanner fset := token.NewFileSet() file := fset.AddFile("", fset.Base(), len(src)) s.Init(file, src, nil, scanner.ScanComments) return func() (seg Segment) { for { pos, tok, lit := s.Scan() if tok == token.EOF { break } offs := file.Offset(pos) if tok == sel { seg = Segment{offs, offs + len(lit)} break } } return } }
// newDirectory creates a new package directory tree with at most maxDepth // levels, anchored at root. The result tree is pruned such that it only // contains directories that contain package files or that contain // subdirectories containing package files (transitively). If a non-nil // pathFilter is provided, directory paths additionally must be accepted // by the filter (i.e., pathFilter(path) must be true). If a value >= 0 is // provided for maxDepth, nodes at larger depths are pruned as well; they // are assumed to contain package files even if their contents are not known // (i.e., in this case the tree may contain directories w/o any package files). // func newDirectory(root string, maxDepth int) *Directory { // The root could be a symbolic link so use Stat not Lstat. d, err := fs.Stat(root) // If we fail here, report detailed error messages; otherwise // is is hard to see why a directory tree was not built. switch { case err != nil: log.Printf("newDirectory(%s): %s", root, err) return nil case !isPkgDir(d): log.Printf("newDirectory(%s): not a package directory", root) return nil } if maxDepth < 0 { maxDepth = 1e6 // "infinity" } b := treeBuilder{maxDepth} // the file set provided is only for local parsing, no position // information escapes and thus we don't need to save the set return b.newDirTree(token.NewFileSet(), root, d.Name(), 0) }
// ParseExpr is a convenience function for obtaining the AST of an expression x. // The position information recorded in the AST is undefined. The filename used // in error messages is the empty string. // func ParseExpr(x string) (ast.Expr, error) { var p parser p.init(token.NewFileSet(), "", []byte(x), 0) // Set up pkg-level scopes to avoid nil-pointer errors. // This is not needed for a correct expression x as the // parser will be ok with a nil topScope, but be cautious // in case of an erroneous x. p.openScope() p.pkgScope = p.topScope e := p.parseRhsOrType() p.closeScope() assert(p.topScope == nil, "unbalanced scopes") if p.errors.Len() > 0 { p.errors.Sort() return nil, p.errors.Err() } return e, nil }
// This example demonstrates how to inspect the AST of a Go program. func ExampleInspect() { // src is the input for which we want to inspect the AST. src := ` package p const c = 1.0 var X = f(3.14)*2 + c ` // Create the AST by parsing src. fset := token.NewFileSet() // positions are relative to fset f, err := parser.ParseFile(fset, "src.go", src, 0) if err != nil { panic(err) } // Inspect the AST and print all identifiers and literals. ast.Inspect(f, func(n ast.Node) bool { var s string switch x := n.(type) { case *ast.BasicLit: s = x.Value case *ast.Ident: s = x.Name } if s != "" { fmt.Printf("%s:\t%s\n", fset.Position(n.Pos()), s) } return true }) // output: // src.go:2:9: p // src.go:3:7: c // src.go:3:11: 1.0 // src.go:4:5: X // src.go:4:9: f // src.go:4:11: 3.14 // src.go:4:17: 2 // src.go:4:21: c }
func ExampleParseFile() { fset := token.NewFileSet() // positions are relative to fset // Parse the file containing this very example // but stop after processing the imports. f, err := parser.ParseFile(fset, "example_test.go", nil, parser.ImportsOnly) if err != nil { fmt.Println(err) return } // Print the imports from the file's AST. for _, s := range f.Imports { fmt.Println(s.Path.Value) } // output: // // "fmt" // "go/parser" // "go/token" }
func BenchmarkScanFile(b *testing.B) { b.StopTimer() const filename = "scanner.go" src, err := ioutil.ReadFile(filename) if err != nil { panic(err) } fset := token.NewFileSet() file := fset.AddFile(filename, fset.Base(), len(src)) b.SetBytes(int64(len(src))) var s Scanner b.StartTimer() for i := 0; i < b.N; i++ { s.Init(file, src, nil, ScanComments) for { _, tok, _ := s.Scan() if tok == token.EOF { break } } } }
func TestCommentMap(t *testing.T) { fset := token.NewFileSet() f, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err != nil { t.Fatal(err) } cmap := NewCommentMap(fset, f, f.Comments) // very correct association of comments for n, list := range cmap { key := fmt.Sprintf("%2d: %T", fset.Position(n.Pos()).Line, n) got := ctext(list) want := res[key] if got != want { t.Errorf("%s: got %q; want %q", key, got, want) } } // verify that no comments got lost if n := len(cmap.Comments()); n != len(f.Comments) { t.Errorf("got %d comment groups in map; want %d", n, len(f.Comments)) } // support code to update test: // set genMap to true to generate res map const genMap = false if genMap { out := make([]string, 0, len(cmap)) for n, list := range cmap { out = append(out, fmt.Sprintf("\t\"%2d: %T\":\t%q,", fset.Position(n.Pos()).Line, n, ctext(list))) } sort.Strings(out) for _, s := range out { fmt.Println(s) } } }
func test(t *testing.T, mode Mode) { // determine file filter filter := isGoFile if *files != "" { rx, err := regexp.Compile(*files) if err != nil { t.Fatal(err) } filter = func(fi os.FileInfo) bool { return isGoFile(fi) && rx.MatchString(fi.Name()) } } // get packages fset := token.NewFileSet() pkgs, err := parser.ParseDir(fset, dataDir, filter, parser.ParseComments) if err != nil { t.Fatal(err) } // test packages for _, pkg := range pkgs { importpath := dataDir + "/" + pkg.Name doc := New(pkg, importpath, mode) // golden files always use / in filenames - canonicalize them for i, filename := range doc.Filenames { doc.Filenames[i] = filepath.ToSlash(filename) } // print documentation var buf bytes.Buffer if err := templateTxt.Execute(&buf, bundle{doc, fset}); err != nil { t.Error(err) continue } got := buf.Bytes() // update golden file if necessary golden := filepath.Join(dataDir, fmt.Sprintf("%s.%d.golden", pkg.Name, mode)) if *update { err := ioutil.WriteFile(golden, got, 0644) if err != nil { t.Error(err) } continue } // get golden file want, err := ioutil.ReadFile(golden) if err != nil { t.Error(err) continue } // compare if !bytes.Equal(got, want) { t.Errorf("package %s\n\tgot:\n%s\n\twant:\n%s", pkg.Name, got, want) } } }
// NewIndex creates a new index for the .go files // in the directories given by dirnames. // func NewIndex(dirnames <-chan string, fulltextIndex bool, throttle float64) *Index { var x Indexer th := NewThrottle(throttle, 100*time.Millisecond) // run at least 0.1s at a time // initialize Indexer // (use some reasonably sized maps to start) x.fset = token.NewFileSet() x.packages = make(map[string]*Pak, 256) x.words = make(map[string]*IndexResult, 8192) // index all files in the directories given by dirnames for dirname := range dirnames { list, err := fs.ReadDir(dirname) if err != nil { continue // ignore this directory } for _, f := range list { if !f.IsDir() { x.visitFile(dirname, f, fulltextIndex) } th.Throttle() } } if !fulltextIndex { // the file set, the current file, and the sources are // not needed after indexing if no text index is built - // help GC and clear them x.fset = nil x.sources.Reset() x.current = nil // contains reference to fset! } // for each word, reduce the RunLists into a LookupResult; // also collect the word with its canonical spelling in a // word list for later computation of alternative spellings words := make(map[string]*LookupResult) var wlist RunList for w, h := range x.words { decls := reduce(h.Decls) others := reduce(h.Others) words[w] = &LookupResult{ Decls: decls, Others: others, } wlist = append(wlist, &wordPair{canonical(w), w}) th.Throttle() } x.stats.Words = len(words) // reduce the word list {canonical(w), w} into // a list of AltWords runs {canonical(w), {w}} alist := wlist.reduce(lessWordPair, newAltWords) // convert alist into a map of alternative spellings alts := make(map[string]*AltWords) for i := 0; i < len(alist); i++ { a := alist[i].(*AltWords) alts[a.Canon] = a } // create text index var suffixes *suffixarray.Index if fulltextIndex { suffixes = suffixarray.New(x.sources.Bytes()) } return &Index{x.fset, suffixes, words, alts, x.snippets, x.stats} }
"newgo/ast" "newgo/parser" "newgo/token" "path/filepath" "testing" "time" ) const ( dataDir = "testdata" tabwidth = 8 ) var update = flag.Bool("update", false, "update golden files") var fset = token.NewFileSet() type checkMode uint const ( export checkMode = 1 << iota rawFormat idempotent ) // format parses src, prints the corresponding AST, verifies the resulting // src is syntactically correct, and returns the resulting src or an error // if any. func format(src []byte, mode checkMode) ([]byte, error) { // parse src f, err := parser.ParseFile(fset, "", src, parser.ParseComments)
// getPageInfo returns the PageInfo for a package directory abspath. If the // parameter genAST is set, an AST containing only the package exports is // computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc) // is extracted from the AST. If there is no corresponding package in the // directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub- // directories, PageInfo.Dirs is nil. If an error occurred, PageInfo.Err is // set to the respective error but the error is not logged. // func (h *docServer) getPageInfo(abspath, relpath string, mode PageInfoMode) *PageInfo { info := &PageInfo{Dirname: abspath} // Restrict to the package files that would be used when building // the package on this system. This makes sure that if there are // separate implementations for, say, Windows vs Unix, we don't // jumble them all together. // Note: Uses current binary's GOOS/GOARCH. // To use different pair, such as if we allowed the user to choose, // set ctxt.GOOS and ctxt.GOARCH before calling ctxt.ImportDir. ctxt := build.Default ctxt.IsAbsPath = pathpkg.IsAbs ctxt.ReadDir = fsReadDir ctxt.OpenFile = fsOpenFile pkginfo, err := ctxt.ImportDir(abspath, 0) // continue if there are no Go source files; we still want the directory info if _, nogo := err.(*build.NoGoError); err != nil && !nogo { info.Err = err return info } // collect package files pkgname := pkginfo.Name pkgfiles := append(pkginfo.GoFiles, pkginfo.CgoFiles...) if len(pkgfiles) == 0 { // Commands written in C have no .go files in the build. // Instead, documentation may be found in an ignored file. // The file may be ignored via an explicit +build ignore // constraint (recommended), or by defining the package // documentation (historic). pkgname = "main" // assume package main since pkginfo.Name == "" pkgfiles = pkginfo.IgnoredGoFiles } // get package information, if any if len(pkgfiles) > 0 { // build package AST fset := token.NewFileSet() files, err := parseFiles(fset, abspath, pkgfiles) if err != nil { info.Err = err return info } // ignore any errors - they are due to unresolved identifiers pkg, _ := ast.NewPackage(fset, files, poorMansImporter, nil) // extract package documentation info.FSet = fset if mode&showSource == 0 { // show extracted documentation var m doc.Mode if mode&noFiltering != 0 { m = doc.AllDecls } if mode&allMethods != 0 { m |= doc.AllMethods } info.PDoc = doc.New(pkg, pathpkg.Clean(relpath), m) // no trailing '/' in importpath // collect examples testfiles := append(pkginfo.TestGoFiles, pkginfo.XTestGoFiles...) files, err = parseFiles(fset, abspath, testfiles) if err != nil { log.Println("parsing examples:", err) } info.Examples = collectExamples(pkg, files) // collect any notes that we want to show if info.PDoc.Notes != nil { // could regexp.Compile only once per godoc, but probably not worth it if rx, err := regexp.Compile(*notes); err == nil { for m, n := range info.PDoc.Notes { if rx.MatchString(m) { if info.Notes == nil { info.Notes = make(map[string][]*doc.Note) } info.Notes[m] = n } } } } } else { // show source code // TODO(gri) Consider eliminating export filtering in this mode, // or perhaps eliminating the mode altogether. if mode&noFiltering == 0 { packageExports(fset, pkg) } info.PAst = ast.MergePackageFiles(pkg, 0) } info.IsMain = pkgname == "main" } // get directory information, if any var dir *Directory var timestamp time.Time if tree, ts := fsTree.get(); tree != nil && tree.(*Directory) != nil { // directory tree is present; lookup respective directory // (may still fail if the file system was updated and the // new directory tree has not yet been computed) dir = tree.(*Directory).lookup(abspath) timestamp = ts } if dir == nil { // no directory tree present (too early after startup or // command-line mode); compute one level for this page // note: cannot use path filter here because in general // it doesn't contain the fsTree path dir = newDirectory(abspath, 1) timestamp = time.Now() } info.Dirs = dir.listing(true) info.DirTime = timestamp info.DirFlat = mode&flatDir != 0 return info }
// Source formats src in canonical gofmt style and returns the result // or an (I/O or syntax) error. src is expected to be a syntactically // correct Go source file, or a list of Go declarations or statements. // // If src is a partial source file, the leading and trailing space of src // is applied to the result (such that it has the same leading and trailing // space as src), and the result is indented by the same amount as the first // line of src containing code. Imports are not sorted for partial source files. // func Source(src []byte) ([]byte, error) { fset := token.NewFileSet() node, err := parse(fset, src) if err != nil { return nil, err } var buf bytes.Buffer if file, ok := node.(*ast.File); ok { // Complete source file. ast.SortImports(fset, file) err := config.Fprint(&buf, fset, file) if err != nil { return nil, err } } else { // Partial source file. // Determine and prepend leading space. i, j := 0, 0 for j < len(src) && isSpace(src[j]) { if src[j] == '\n' { i = j + 1 // index of last line in leading space } j++ } buf.Write(src[:i]) // Determine indentation of first code line. // Spaces are ignored unless there are no tabs, // in which case spaces count as one tab. indent := 0 hasSpace := false for _, b := range src[i:j] { switch b { case ' ': hasSpace = true case '\t': indent++ } } if indent == 0 && hasSpace { indent = 1 } // Format the source. cfg := config cfg.Indent = indent err := cfg.Fprint(&buf, fset, node) if err != nil { return nil, err } // Determine and append trailing space. i = len(src) for i > 0 && isSpace(src[i-1]) { i-- } buf.Write(src[i:]) } return buf.Bytes(), nil }
// Import returns details about the Go package named by the import path, // interpreting local import paths relative to the srcDir directory. // If the path is a local import path naming a package that can be imported // using a standard import path, the returned package will set p.ImportPath // to that path. // // In the directory containing the package, .go, .c, .h, and .s files are // considered part of the package except for: // // - .go files in package documentation // - files starting with _ or . (likely editor temporary files) // - files with build constraints not satisfied by the context // // If an error occurs, Import returns a non-nil error and a non-nil // *Package containing partial information. // func (ctxt *Context) Import(path string, srcDir string, mode ImportMode) (*Package, error) { p := &Package{ ImportPath: path, } if path == "" { return p, fmt.Errorf("import %q: invalid import path", path) } var pkga string var pkgerr error switch ctxt.Compiler { case "gccgo": dir, elem := pathpkg.Split(p.ImportPath) pkga = "pkg/gccgo_" + ctxt.GOOS + "_" + ctxt.GOARCH + "/" + dir + "lib" + elem + ".a" case "gc": suffix := "" if ctxt.InstallSuffix != "" { suffix = "_" + ctxt.InstallSuffix } pkga = "pkg/" + ctxt.GOOS + "_" + ctxt.GOARCH + suffix + "/" + p.ImportPath + ".a" default: // Save error for end of function. pkgerr = fmt.Errorf("import %q: unknown compiler %q", path, ctxt.Compiler) } binaryOnly := false if IsLocalImport(path) { pkga = "" // local imports have no installed path if srcDir == "" { return p, fmt.Errorf("import %q: import relative to unknown directory", path) } if !ctxt.isAbsPath(path) { p.Dir = ctxt.joinPath(srcDir, path) } // Determine canonical import path, if any. if ctxt.GOROOT != "" { root := ctxt.joinPath(ctxt.GOROOT, "src", "pkg") if sub, ok := ctxt.hasSubdir(root, p.Dir); ok { p.Goroot = true p.ImportPath = sub p.Root = ctxt.GOROOT goto Found } } all := ctxt.gopath() for i, root := range all { rootsrc := ctxt.joinPath(root, "src") if sub, ok := ctxt.hasSubdir(rootsrc, p.Dir); ok { // We found a potential import path for dir, // but check that using it wouldn't find something // else first. if ctxt.GOROOT != "" { if dir := ctxt.joinPath(ctxt.GOROOT, "src", "pkg", sub); ctxt.isDir(dir) { p.ConflictDir = dir goto Found } } for _, earlyRoot := range all[:i] { if dir := ctxt.joinPath(earlyRoot, "src", sub); ctxt.isDir(dir) { p.ConflictDir = dir goto Found } } // sub would not name some other directory instead of this one. // Record it. p.ImportPath = sub p.Root = root goto Found } } // It's okay that we didn't find a root containing dir. // Keep going with the information we have. } else { if strings.HasPrefix(path, "/") { return p, fmt.Errorf("import %q: cannot import absolute path", path) } // tried records the location of unsuccessful package lookups var tried struct { goroot string gopath []string } // Determine directory from import path. if ctxt.GOROOT != "" { dir := ctxt.joinPath(ctxt.GOROOT, "src", "pkg", path) isDir := ctxt.isDir(dir) binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(ctxt.GOROOT, pkga)) if isDir || binaryOnly { p.Dir = dir p.Goroot = true p.Root = ctxt.GOROOT goto Found } tried.goroot = dir } for _, root := range ctxt.gopath() { dir := ctxt.joinPath(root, "src", path) isDir := ctxt.isDir(dir) binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(root, pkga)) if isDir || binaryOnly { p.Dir = dir p.Root = root goto Found } tried.gopath = append(tried.gopath, dir) } // package was not found var paths []string if tried.goroot != "" { paths = append(paths, fmt.Sprintf("\t%s (from $GOROOT)", tried.goroot)) } else { paths = append(paths, "\t($GOROOT not set)") } var i int var format = "\t%s (from $GOPATH)" for ; i < len(tried.gopath); i++ { if i > 0 { format = "\t%s" } paths = append(paths, fmt.Sprintf(format, tried.gopath[i])) } if i == 0 { paths = append(paths, "\t($GOPATH not set)") } return p, fmt.Errorf("cannot find package %q in any of:\n%s", path, strings.Join(paths, "\n")) } Found: if p.Root != "" { if p.Goroot { p.SrcRoot = ctxt.joinPath(p.Root, "src", "pkg") } else { p.SrcRoot = ctxt.joinPath(p.Root, "src") } p.PkgRoot = ctxt.joinPath(p.Root, "pkg") p.BinDir = ctxt.joinPath(p.Root, "bin") if pkga != "" { p.PkgObj = ctxt.joinPath(p.Root, pkga) } } if mode&FindOnly != 0 { return p, pkgerr } if binaryOnly && (mode&AllowBinary) != 0 { return p, pkgerr } dirs, err := ctxt.readDir(p.Dir) if err != nil { return p, err } var Sfiles []string // files with ".S" (capital S) var firstFile string imported := make(map[string][]token.Position) testImported := make(map[string][]token.Position) xTestImported := make(map[string][]token.Position) allTags := make(map[string]bool) fset := token.NewFileSet() for _, d := range dirs { if d.IsDir() { continue } name := d.Name() ext := nameExt(name) match, data, filename, err := ctxt.matchFile(p.Dir, name, true, allTags) if err != nil { return p, err } if !match { if ext == ".go" { p.IgnoredGoFiles = append(p.IgnoredGoFiles, name) } continue } // Going to save the file. For non-Go files, can stop here. switch ext { case ".c": p.CFiles = append(p.CFiles, name) continue case ".cc", ".cpp", ".cxx": p.CXXFiles = append(p.CXXFiles, name) continue case ".h", ".hh", ".hpp", ".hxx": p.HFiles = append(p.HFiles, name) continue case ".s": p.SFiles = append(p.SFiles, name) continue case ".S": Sfiles = append(Sfiles, name) continue case ".swig": p.SwigFiles = append(p.SwigFiles, name) continue case ".swigcxx": p.SwigCXXFiles = append(p.SwigCXXFiles, name) continue case ".syso": // binary objects to add to package archive // Likely of the form foo_windows.syso, but // the name was vetted above with goodOSArchFile. p.SysoFiles = append(p.SysoFiles, name) continue } pf, err := parser.ParseFile(fset, filename, data, parser.ImportsOnly|parser.ParseComments) if err != nil { return p, err } pkg := pf.Name.Name if pkg == "documentation" { p.IgnoredGoFiles = append(p.IgnoredGoFiles, name) continue } isTest := strings.HasSuffix(name, "_test.go") isXTest := false if isTest && strings.HasSuffix(pkg, "_test") { isXTest = true pkg = pkg[:len(pkg)-len("_test")] } if p.Name == "" { p.Name = pkg firstFile = name } else if pkg != p.Name { return p, fmt.Errorf("found packages %s (%s) and %s (%s) in %s", p.Name, firstFile, pkg, name, p.Dir) } if pf.Doc != nil && p.Doc == "" { p.Doc = doc.Synopsis(pf.Doc.Text()) } // Record imports and information about cgo. isCgo := false for _, decl := range pf.Decls { d, ok := decl.(*ast.GenDecl) if !ok { continue } for _, dspec := range d.Specs { spec, ok := dspec.(*ast.ImportSpec) if !ok { continue } quoted := spec.Path.Value path, err := strconv.Unquote(quoted) if err != nil { log.Panicf("%s: parser returned invalid quoted string: <%s>", filename, quoted) } if isXTest { xTestImported[path] = append(xTestImported[path], fset.Position(spec.Pos())) } else if isTest { testImported[path] = append(testImported[path], fset.Position(spec.Pos())) } else { imported[path] = append(imported[path], fset.Position(spec.Pos())) } if path == "C" { if isTest { return p, fmt.Errorf("use of cgo in test %s not supported", filename) } cg := spec.Doc if cg == nil && len(d.Specs) == 1 { cg = d.Doc } if cg != nil { if err := ctxt.saveCgo(filename, p, cg); err != nil { return p, err } } isCgo = true } } } if isCgo { allTags["cgo"] = true if ctxt.CgoEnabled { p.CgoFiles = append(p.CgoFiles, name) } else { p.IgnoredGoFiles = append(p.IgnoredGoFiles, name) } } else if isXTest { p.XTestGoFiles = append(p.XTestGoFiles, name) } else if isTest { p.TestGoFiles = append(p.TestGoFiles, name) } else { p.GoFiles = append(p.GoFiles, name) } } if len(p.GoFiles)+len(p.CgoFiles)+len(p.TestGoFiles)+len(p.XTestGoFiles) == 0 { return p, &NoGoError{p.Dir} } for tag := range allTags { p.AllTags = append(p.AllTags, tag) } sort.Strings(p.AllTags) p.Imports, p.ImportPos = cleanImports(imported) p.TestImports, p.TestImportPos = cleanImports(testImported) p.XTestImports, p.XTestImportPos = cleanImports(xTestImported) // add the .S files only if we are using cgo // (which means gcc will compile them). // The standard assemblers expect .s files. if len(p.CgoFiles) > 0 { p.SFiles = append(p.SFiles, Sfiles...) sort.Strings(p.SFiles) } return p, pkgerr }
// This example shows what an AST looks like when printed for debugging. func ExamplePrint() { // src is the input for which we want to print the AST. src := ` package main func main() { println("Hello, World!") } ` // Create the AST by parsing src. fset := token.NewFileSet() // positions are relative to fset f, err := parser.ParseFile(fset, "", src, 0) if err != nil { panic(err) } // Print the AST. ast.Print(fset, f) // output: // 0 *ast.File { // 1 . Package: 2:1 // 2 . Name: *ast.Ident { // 3 . . NamePos: 2:9 // 4 . . Name: "main" // 5 . } // 6 . Decls: []ast.Decl (len = 1) { // 7 . . 0: *ast.FuncDecl { // 8 . . . Name: *ast.Ident { // 9 . . . . NamePos: 3:6 // 10 . . . . Name: "main" // 11 . . . . Obj: *ast.Object { // 12 . . . . . Kind: func // 13 . . . . . Name: "main" // 14 . . . . . Decl: *(obj @ 7) // 15 . . . . } // 16 . . . } // 17 . . . Type: *ast.FuncType { // 18 . . . . Func: 3:1 // 19 . . . . Params: *ast.FieldList { // 20 . . . . . Opening: 3:10 // 21 . . . . . Closing: 3:11 // 22 . . . . } // 23 . . . } // 24 . . . Body: *ast.BlockStmt { // 25 . . . . Lbrace: 3:13 // 26 . . . . List: []ast.Stmt (len = 1) { // 27 . . . . . 0: *ast.ExprStmt { // 28 . . . . . . X: *ast.CallExpr { // 29 . . . . . . . Fun: *ast.Ident { // 30 . . . . . . . . NamePos: 4:2 // 31 . . . . . . . . Name: "println" // 32 . . . . . . . } // 33 . . . . . . . Lparen: 4:9 // 34 . . . . . . . Args: []ast.Expr (len = 1) { // 35 . . . . . . . . 0: *ast.BasicLit { // 36 . . . . . . . . . ValuePos: 4:10 // 37 . . . . . . . . . Kind: STRING // 38 . . . . . . . . . Value: "\"Hello, World!\"" // 39 . . . . . . . . } // 40 . . . . . . . } // 41 . . . . . . . Ellipsis: - // 42 . . . . . . . Rparen: 4:25 // 43 . . . . . . } // 44 . . . . . } // 45 . . . . } // 46 . . . . Rbrace: 5:1 // 47 . . . } // 48 . . } // 49 . } // 50 . Scope: *ast.Scope { // 51 . . Objects: map[string]*ast.Object (len = 1) { // 52 . . . "main": *(obj @ 11) // 53 . . } // 54 . } // 55 . Unresolved: []*ast.Ident (len = 1) { // 56 . . 0: *(obj @ 29) // 57 . } // 58 } }