func main() { flag.Parse() fset := token.NewFileSet() nheadings := 0 err := filepath.Walk(*root, func(path string, fi os.FileInfo, err error) error { if !fi.IsDir() { return nil } pkgs, err := parser.ParseDir(fset, path, isGoFile, parser.ParseComments) if err != nil { if *verbose { fmt.Fprintln(os.Stderr, err) } return nil } for _, pkg := range pkgs { d := doc.New(pkg, path, doc.Mode(0)) list := appendHeadings(nil, d.Doc) for _, d := range d.Consts { list = appendHeadings(list, d.Doc) } for _, d := range d.Types { list = appendHeadings(list, d.Doc) } for _, d := range d.Vars { list = appendHeadings(list, d.Doc) } for _, d := range d.Funcs { list = appendHeadings(list, d.Doc) } if len(list) > 0 { // directories may contain multiple packages; // print path and package name fmt.Printf("%s (package %s)\n", path, pkg.Name) for _, h := range list { fmt.Printf("\t%s\n", h) } nheadings += len(list) } } return nil }) if err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } fmt.Println(nheadings, "headings found") }
// getPageInfo returns the PageInfo for a package directory abspath. If the // parameter genAST is set, an AST containing only the package exports is // computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc) // is extracted from the AST. If there is no corresponding package in the // directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub- // directories, PageInfo.Dirs is nil. If an error occurred, PageInfo.Err is // set to the respective error but the error is not logged. // func (h *docServer) getPageInfo(abspath, relpath string, mode PageInfoMode) *PageInfo { info := &PageInfo{Dirname: abspath} // Restrict to the package files that would be used when building // the package on this system. This makes sure that if there are // separate implementations for, say, Windows vs Unix, we don't // jumble them all together. // Note: Uses current binary's GOOS/GOARCH. // To use different pair, such as if we allowed the user to choose, // set ctxt.GOOS and ctxt.GOARCH before calling ctxt.ImportDir. ctxt := build.Default ctxt.IsAbsPath = pathpkg.IsAbs ctxt.ReadDir = fsReadDir ctxt.OpenFile = fsOpenFile pkginfo, err := ctxt.ImportDir(abspath, 0) // continue if there are no Go source files; we still want the directory info if _, nogo := err.(*build.NoGoError); err != nil && !nogo { info.Err = err return info } // collect package files pkgname := pkginfo.Name pkgfiles := append(pkginfo.GoFiles, pkginfo.CgoFiles...) if len(pkgfiles) == 0 { // Commands written in C have no .go files in the build. // Instead, documentation may be found in an ignored file. // The file may be ignored via an explicit +build ignore // constraint (recommended), or by defining the package // documentation (historic). pkgname = "main" // assume package main since pkginfo.Name == "" pkgfiles = pkginfo.IgnoredGoFiles } // get package information, if any if len(pkgfiles) > 0 { // build package AST fset := token.NewFileSet() files, err := parseFiles(fset, abspath, pkgfiles) if err != nil { info.Err = err return info } // ignore any errors - they are due to unresolved identifiers pkg, _ := ast.NewPackage(fset, files, poorMansImporter, nil) // extract package documentation info.FSet = fset if mode&showSource == 0 { // show extracted documentation var m doc.Mode if mode&noFiltering != 0 { m = doc.AllDecls } if mode&allMethods != 0 { m |= doc.AllMethods } info.PDoc = doc.New(pkg, pathpkg.Clean(relpath), m) // no trailing '/' in importpath // collect examples testfiles := append(pkginfo.TestGoFiles, pkginfo.XTestGoFiles...) files, err = parseFiles(fset, abspath, testfiles) if err != nil { log.Println("parsing examples:", err) } info.Examples = collectExamples(pkg, files) // collect any notes that we want to show if info.PDoc.Notes != nil { // could regexp.Compile only once per godoc, but probably not worth it if rx, err := regexp.Compile(*notes); err == nil { for m, n := range info.PDoc.Notes { if rx.MatchString(m) { if info.Notes == nil { info.Notes = make(map[string][]*doc.Note) } info.Notes[m] = n } } } } } else { // show source code // TODO(gri) Consider eliminating export filtering in this mode, // or perhaps eliminating the mode altogether. if mode&noFiltering == 0 { packageExports(fset, pkg) } info.PAst = ast.MergePackageFiles(pkg, 0) } info.IsMain = pkgname == "main" } // get directory information, if any var dir *Directory var timestamp time.Time if tree, ts := fsTree.get(); tree != nil && tree.(*Directory) != nil { // directory tree is present; lookup respective directory // (may still fail if the file system was updated and the // new directory tree has not yet been computed) dir = tree.(*Directory).lookup(abspath) timestamp = ts } if dir == nil { // no directory tree present (too early after startup or // command-line mode); compute one level for this page // note: cannot use path filter here because in general // it doesn't contain the fsTree path dir = newDirectory(abspath, 1) timestamp = time.Now() } info.Dirs = dir.listing(true) info.DirTime = timestamp info.DirFlat = mode&flatDir != 0 return info }