func main() { flag.Parse() if !*optNoShared { writeSharedDir() } fset := token.NewFileSet() pkgs, err := parser.ParseFiles(fset, flag.Args(), parser.ParseComments) if err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } pkg := getOnlyPkg(pkgs) if pkg != nil { if *optNiceName == "" { *optNiceName = pkg.Name } ast.PackageExports(pkg) docs := doce.NewPackage(pkg, fset) writePackage(docs, *optNiceName) } }
// getPageInfo returns the PageInfo for a package directory path. If the // parameter try is true, no errors are logged if getPageInfo fails. // If there is no corresponding package in the directory, // PageInfo.PDoc is nil. If there are no subdirectories, // PageInfo.Dirs is nil. // func (h *httpHandler) getPageInfo(path string, try bool) PageInfo { // the path is relative to h.fsroot dirname := pathutil.Join(h.fsRoot, path) // the package name is the directory name within its parent // (use dirname instead of path because dirname is clean; i.e. has no trailing '/') _, pkgname := pathutil.Split(dirname) // filter function to select the desired .go files filter := func(d *os.Dir) bool { if isPkgFile(d) { // Some directories contain main packages: Only accept // files that belong to the expected package so that // parser.ParsePackage doesn't return "multiple packages // found" errors. // Additionally, accept the special package name // fakePkgName if we are looking at cmd documentation. name := pkgName(dirname + "/" + d.Name) return name == pkgname || h.fsRoot == *cmdroot && name == fakePkgName } return false } // get package AST pkgs, err := parser.ParseDir(dirname, filter, parser.ParseComments) if err != nil && !try { // TODO: errors should be shown instead of an empty directory log.Stderrf("parser.parseDir: %s", err) } if len(pkgs) != 1 && !try { // TODO: should handle multiple packages log.Stderrf("parser.parseDir: found %d packages", len(pkgs)) } var pkg *ast.Package for _, pkg = range pkgs { break // take the first package found } // compute package documentation var pdoc *doc.PackageDoc if pkg != nil { ast.PackageExports(pkg) pdoc = doc.NewPackageDoc(pkg, pathutil.Clean(path)) // no trailing '/' in importpath } // get directory information var dir *Directory if tree, _ := fsTree.get(); tree != nil { // directory tree is present; lookup respective directory // (may still fail if the file system was updated and the // new directory tree has not yet beet computed) dir = tree.(*Directory).lookup(dirname) } else { // no directory tree present (either early after startup // or command-line mode); compute one level for this page dir = newDirectory(dirname, 1) } return PageInfo{pdoc, dir.listing(true), h.isPkg} }
func parse(path, basePath string) ([]*Element, error) { elements := make([]*Element, 0) fset := token.NewFileSet() filter := func(f os.FileInfo) bool { return !f.IsDir() && strings.HasSuffix(f.Name(), ".go") && !strings.HasSuffix(f.Name(), "_test.go") } pkgs, err := parser.ParseDir(fset, path, filter, parser.ParseComments) if err != nil { return nil, err } for _, pkg := range pkgs { ast.PackageExports(pkg) for _, file := range pkg.Files { p := getFilePath(fset, basePath, file.Package) mE := func() *Element { return &Element{ Package: pkg.Name, FilePath: p, } } for _, decl := range file.Decls { switch t := decl.(type) { case *ast.FuncDecl: e := mE() indexFunc(e, fset, t) elements = append(elements, e) case *ast.GenDecl: es := indexGen(mE, fset, t) elements = append(elements, es...) } } } } return elements, nil }
// discoverTypesInPath searches for types of typeID in path and returns a list // of plugins it finds. func discoverTypesInPath(path, typeID string) ([]plugin, error) { postProcessors := []plugin{} dirs, err := listDirectories(path) if err != nil { return postProcessors, err } for _, dir := range dirs { fset := token.NewFileSet() goPackages, err := parser.ParseDir(fset, dir, nil, parser.AllErrors) if err != nil { return postProcessors, fmt.Errorf("Failed parsing directory %s: %s", dir, err) } for _, goPackage := range goPackages { ast.PackageExports(goPackage) ast.Inspect(goPackage, func(n ast.Node) bool { switch x := n.(type) { case *ast.TypeSpec: if x.Name.Name == typeID { derivedName := deriveName(path, dir) postProcessors = append(postProcessors, plugin{ Package: goPackage.Name, PluginName: derivedName, ImportName: deriveImport(x.Name.Name, derivedName), TypeName: x.Name.Name, Path: dir, }) // The AST stops parsing when we return false. Once we // find the symbol we want we can stop parsing. // DEBUG: // fmt.Printf("package %#v\n", goPackage) return false } } return true }) } } return postProcessors, nil }
// getPageInfo returns the PageInfo for a package directory abspath. If the // parameter genAST is set, an AST containing only the package exports is // computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc) // is extracted from the AST. If there is no corresponding package in the // directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub- // directories, PageInfo.Dirs is nil. If a directory read error occurred, // PageInfo.Err is set to the respective error but the error is not logged. // func (h *docServer) getPageInfo(abspath, relpath, pkgname string, mode PageInfoMode) PageInfo { var pkgFiles []string // If we're showing the default package, restrict to the ones // that would be used when building the package on this // system. This makes sure that if there are separate // implementations for, say, Windows vs Unix, we don't // jumble them all together. if pkgname == "" { // Note: Uses current binary's GOOS/GOARCH. // To use different pair, such as if we allowed the user // to choose, set ctxt.GOOS and ctxt.GOARCH before // calling ctxt.ScanDir. ctxt := build.Default ctxt.IsAbsPath = pathpkg.IsAbs ctxt.ReadDir = fsReadDir ctxt.OpenFile = fsOpenFile dir, err := ctxt.ImportDir(abspath, 0) if err == nil { pkgFiles = append(dir.GoFiles, dir.CgoFiles...) } } // filter function to select the desired .go files filter := func(d os.FileInfo) bool { // Only Go files. if !isPkgFile(d) { return false } // If we are looking at cmd documentation, only accept // the special fakePkgFile containing the documentation. if !h.isPkg { return d.Name() == fakePkgFile } // Also restrict file list to pkgFiles. return pkgFiles == nil || inList(d.Name(), pkgFiles) } // get package ASTs fset := token.NewFileSet() pkgs, err := parseDir(fset, abspath, filter) if err != nil && pkgs == nil { // only report directory read errors, ignore parse errors // (may be able to extract partial package information) return PageInfo{Dirname: abspath, Err: err} } // select package var pkg *ast.Package // selected package var plist []string // list of other package (names), if any if len(pkgs) == 1 { // Exactly one package - select it. for _, p := range pkgs { pkg = p } } else if len(pkgs) > 1 { // Multiple packages - select the best matching package: The // 1st choice is the package with pkgname, the 2nd choice is // the package with dirname, and the 3rd choice is a package // that is not called "main" if there is exactly one such // package. Otherwise, don't select a package. dirpath, dirname := pathpkg.Split(abspath) // If the dirname is "go" we might be in a sub-directory for // .go files - use the outer directory name instead for better // results. if dirname == "go" { _, dirname = pathpkg.Split(pathpkg.Clean(dirpath)) } var choice3 *ast.Package loop: for _, p := range pkgs { switch { case p.Name == pkgname: pkg = p break loop // 1st choice; we are done case p.Name == dirname: pkg = p // 2nd choice case p.Name != "main": choice3 = p } } if pkg == nil && len(pkgs) == 2 { pkg = choice3 } // Compute the list of other packages // (excluding the selected package, if any). plist = make([]string, len(pkgs)) i := 0 for name := range pkgs { if pkg == nil || name != pkg.Name { plist[i] = name i++ } } plist = plist[0:i] sort.Strings(plist) } // get examples from *_test.go files var examples []*doc.Example filter = func(d os.FileInfo) bool { return isGoFile(d) && strings.HasSuffix(d.Name(), "_test.go") } if testpkgs, err := parseDir(fset, abspath, filter); err != nil { log.Println("parsing test files:", err) } else { for _, testpkg := range testpkgs { var files []*ast.File for _, f := range testpkg.Files { files = append(files, f) } examples = append(examples, doc.Examples(files...)...) } } // compute package documentation var past *ast.File var pdoc *doc.Package if pkg != nil { if mode&showSource == 0 { // show extracted documentation var m doc.Mode if mode&noFiltering != 0 { m = doc.AllDecls } if mode&allMethods != 0 { m |= doc.AllMethods } pdoc = doc.New(pkg, pathpkg.Clean(relpath), m) // no trailing '/' in importpath } else { // show source code // TODO(gri) Consider eliminating export filtering in this mode, // or perhaps eliminating the mode altogether. if mode&noFiltering == 0 { ast.PackageExports(pkg) } past = ast.MergePackageFiles(pkg, ast.FilterUnassociatedComments) } } // get directory information var dir *Directory var timestamp time.Time if tree, ts := fsTree.get(); tree != nil && tree.(*Directory) != nil { // directory tree is present; lookup respective directory // (may still fail if the file system was updated and the // new directory tree has not yet been computed) dir = tree.(*Directory).lookup(abspath) timestamp = ts } if dir == nil { // no directory tree present (too early after startup or // command-line mode); compute one level for this page // note: cannot use path filter here because in general // it doesn't contain the fsTree path dir = newDirectory(abspath, 1) timestamp = time.Now() } return PageInfo{ Dirname: abspath, PList: plist, FSet: fset, PAst: past, PDoc: pdoc, Examples: examples, Dirs: dir.listing(true), DirTime: timestamp, DirFlat: mode&flatDir != 0, IsPkg: h.isPkg, Err: nil, } }
// getPageInfo returns the PageInfo for a package directory abspath. If the // parameter genAST is set, an AST containing only the package exports is // computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc) // is extracted from the AST. If there is no corresponding package in the // directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub- // directories, PageInfo.Dirs is nil. If a directory read error occurred, // PageInfo.Err is set to the respective error but the error is not logged. // func (h *httpHandler) getPageInfo(abspath, relpath, pkgname string, mode PageInfoMode) PageInfo { // filter function to select the desired .go files filter := func(d FileInfo) bool { // If we are looking at cmd documentation, only accept // the special fakePkgFile containing the documentation. return isPkgFile(d) && (h.isPkg || d.Name() == fakePkgFile) } // get package ASTs fset := token.NewFileSet() pkgs, err := parseDir(fset, abspath, filter) if err != nil && pkgs == nil { // only report directory read errors, ignore parse errors // (may be able to extract partial package information) return PageInfo{Dirname: abspath, Err: err} } // select package var pkg *ast.Package // selected package var plist []string // list of other package (names), if any if len(pkgs) == 1 { // Exactly one package - select it. for _, p := range pkgs { pkg = p } } else if len(pkgs) > 1 { // Multiple packages - select the best matching package: The // 1st choice is the package with pkgname, the 2nd choice is // the package with dirname, and the 3rd choice is a package // that is not called "main" if there is exactly one such // package. Otherwise, don't select a package. dirpath, dirname := filepath.Split(abspath) // If the dirname is "go" we might be in a sub-directory for // .go files - use the outer directory name instead for better // results. if dirname == "go" { _, dirname = filepath.Split(filepath.Clean(dirpath)) } var choice3 *ast.Package loop: for _, p := range pkgs { switch { case p.Name == pkgname: pkg = p break loop // 1st choice; we are done case p.Name == dirname: pkg = p // 2nd choice case p.Name != "main": choice3 = p } } if pkg == nil && len(pkgs) == 2 { pkg = choice3 } // Compute the list of other packages // (excluding the selected package, if any). plist = make([]string, len(pkgs)) i := 0 for name := range pkgs { if pkg == nil || name != pkg.Name { plist[i] = name i++ } } plist = plist[0:i] } // compute package documentation var past *ast.File var pdoc *doc.PackageDoc if pkg != nil { if mode&exportsOnly != 0 { ast.PackageExports(pkg) } if mode&genDoc != 0 { pdoc = doc.NewPackageDoc(pkg, path.Clean(relpath)) // no trailing '/' in importpath } else { past = ast.MergePackageFiles(pkg, ast.FilterUnassociatedComments) } } // get directory information var dir *Directory var timestamp int64 if tree, ts := fsTree.get(); tree != nil && tree.(*Directory) != nil { // directory tree is present; lookup respective directory // (may still fail if the file system was updated and the // new directory tree has not yet been computed) dir = tree.(*Directory).lookup(abspath) timestamp = ts } if dir == nil { // the path may refer to a user-specified file system mapped // via fsMap; lookup that mapping and corresponding RWValue // if any var v *RWValue fsMap.Iterate(func(path string, value *RWValue) bool { if isParentOf(path, abspath) { // mapping found v = value return false } return true }) if v != nil { // found a RWValue associated with a user-specified file // system; a non-nil RWValue stores a (possibly out-of-date) // directory tree for that file system if tree, ts := v.get(); tree != nil && tree.(*Directory) != nil { dir = tree.(*Directory).lookup(abspath) timestamp = ts } } } if dir == nil { // no directory tree present (too early after startup or // command-line mode); compute one level for this page // note: cannot use path filter here because in general // it doesn't contain the fsTree path dir = newDirectory(abspath, nil, 1) timestamp = time.Seconds() } return PageInfo{abspath, plist, fset, past, pdoc, dir.listing(true), timestamp, h.isPkg, nil} }
// discoverTypesInPath searches for types of typeID in path using go's ast and // returns a list of plugins it finds. func discoverTypesInPath(path, typeID, typeName string) ([]plugin, error) { pluginTypes := []plugin{} dirs, err := listDirectories(path) if err != nil { return pluginTypes, err } for _, dir := range dirs { fset := token.NewFileSet() goPackages, err := parser.ParseDir(fset, dir, nil, parser.AllErrors) if err != nil { return pluginTypes, fmt.Errorf("Failed parsing directory %s: %s", dir, err) } for _, goPackage := range goPackages { ast.PackageExports(goPackage) ast.Inspect(goPackage, func(n ast.Node) bool { switch x := n.(type) { case *ast.FuncDecl: // If we get a function then we will check the function name // against typeName and the function return type (Results) // against typeID. // // There may be more than one return type but in the target // case there should only be one. Also the return type is a // ast.SelectorExpr which means we have multiple nodes. // We'll read all of them as ast.Ident (identifier), join // them via . to get a string like terraform.ResourceProvider // and see if it matches our expected typeID // // This is somewhat verbose but prevents us from identifying // the wrong types if the function name is amiguous or if // there are other subfolders added later. if x.Name.Name == typeName && len(x.Type.Results.List) == 1 { node := x.Type.Results.List[0].Type typeIdentifiers := []string{} ast.Inspect(node, func(m ast.Node) bool { switch y := m.(type) { case *ast.Ident: typeIdentifiers = append(typeIdentifiers, y.Name) } // We need all of the identifiers to join so we // can't break early here. return true }) if strings.Join(typeIdentifiers, ".") == typeID { derivedName := deriveName(path, dir) pluginTypes = append(pluginTypes, plugin{ Package: goPackage.Name, PluginName: derivedName, ImportName: deriveImport(x.Name.Name, derivedName), TypeName: x.Name.Name, Path: dir, }) } } case *ast.TypeSpec: // In the simpler case we will simply check whether the type // declaration has the name we were looking for. if x.Name.Name == typeID { derivedName := deriveName(path, dir) pluginTypes = append(pluginTypes, plugin{ Package: goPackage.Name, PluginName: derivedName, ImportName: deriveImport(x.Name.Name, derivedName), TypeName: x.Name.Name, Path: dir, }) // The AST stops parsing when we return false. Once we // find the symbol we want we can stop parsing. return false } } return true }) } } return pluginTypes, nil }
// getPageInfo returns the PageInfo for a package directory abspath. If the // parameter genAST is set, an AST containing only the package exports is // computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc) // is extracted from the AST. If the parameter try is set, no errors are // logged if getPageInfo fails. If there is no corresponding package in the // directory, PageInfo.PDoc and PageInfo.PExp are nil. If there are no sub- // directories, PageInfo.Dirs is nil. // func (h *httpHandler) getPageInfo(abspath, relpath, pkgname string, genAST, try bool) PageInfo { // filter function to select the desired .go files filter := func(d *os.Dir) bool { // If we are looking at cmd documentation, only accept // the special fakePkgFile containing the documentation. return isPkgFile(d) && (h.isPkg || d.Name == fakePkgFile) } // get package ASTs pkgs, err := parser.ParseDir(abspath, filter, parser.ParseComments) if err != nil && !try { // TODO: errors should be shown instead of an empty directory log.Stderrf("parser.parseDir: %s", err) } // select package var pkg *ast.Package // selected package var plist []string // list of other package (names), if any if len(pkgs) == 1 { // Exactly one package - select it. for _, p := range pkgs { pkg = p } } else if len(pkgs) > 1 { // Multiple packages - select the best matching package: The // 1st choice is the package with pkgname, the 2nd choice is // the package with dirname, and the 3rd choice is a package // that is not called "main" if there is exactly one such // package. Otherwise, don't select a package. dirpath, dirname := pathutil.Split(abspath) // If the dirname is "go" we might be in a sub-directory for // .go files - use the outer directory name instead for better // results. if dirname == "go" { _, dirname = pathutil.Split(pathutil.Clean(dirpath)) } var choice3 *ast.Package loop: for _, p := range pkgs { switch { case p.Name == pkgname: pkg = p break loop // 1st choice; we are done case p.Name == dirname: pkg = p // 2nd choice case p.Name != "main": choice3 = p } } if pkg == nil && len(pkgs) == 2 { pkg = choice3 } // Compute the list of other packages // (excluding the selected package, if any). plist = make([]string, len(pkgs)) i := 0 for name, _ := range pkgs { if pkg == nil || name != pkg.Name { plist[i] = name i++ } } plist = plist[0:i] } // compute package documentation var past *ast.File var pdoc *doc.PackageDoc if pkg != nil { ast.PackageExports(pkg) if genAST { past = ast.MergePackageFiles(pkg, false) } else { pdoc = doc.NewPackageDoc(pkg, pathutil.Clean(relpath)) // no trailing '/' in importpath } } // get directory information var dir *Directory if tree, _ := fsTree.get(); tree != nil && tree.(*Directory) != nil { // directory tree is present; lookup respective directory // (may still fail if the file system was updated and the // new directory tree has not yet been computed) // TODO(gri) Need to build directory tree for fsMap entries dir = tree.(*Directory).lookup(abspath) } if dir == nil { // no directory tree present (either early after startup // or command-line mode, or we don't build a tree for the // directory; e.g. google3); compute one level for this page dir = newDirectory(abspath, 1) } return PageInfo{abspath, plist, past, pdoc, dir.listing(true), h.isPkg} }
func main() { flag.Parse() fset := token.NewFileSet() var pkgs map[string]*ast.Package var err error gopath := os.Getenv(*envVar) + "/src/" var structList []*structNode var interfaceList []*interfaceNode //var funcList []*function var directories []string var depth []int directories = append(directories, *filename) depth = append(depth, 0) var searchedDirectories []string //loop until directories to search is empty for len(directories) > 0 { pkgs, err = parser.ParseDir(fset, gopath+directories[len(directories)-1], legalFile, 0) //dep is used to currentDepth := depth[len(depth)-1] searchedDirectories = append(searchedDirectories, directories[len(directories)-1]) depth = depth[:len(depth)-1] directories = directories[:len(directories)-1] //skip this folder if there was an error parsing, usually meaning the directory is not found if err != nil { continue } for _, pkg := range pkgs { //remove unexported types/functions if onlyExports if *onlyExports { hasExports := ast.PackageExports(pkg) if !hasExports { continue } } typeMap.currentPkg = pkg.Name for _, file := range pkg.Files { //add imports to directories to check if not at maxDepth yet if currentDepth < *maxDepth { for _, impor := range file.Imports { importName := strings.Trim(impor.Path.Value, "\"") if !containsString(searchedDirectories, importName) && !containsString(directories, importName) { depth = append(depth, currentDepth+1) directories = append(directories, importName) } } } //add all types to master list before processing delcarations //minimizes creation of unknown types for key := range file.Scope.Objects { typeMap.lookupOrAdd(key) } //processes all structs, interfaces, and embedded types for key, scope := range file.Scope.Objects { //non-receiver functions are found in scope if scope.Kind == ast.Typ { typ := typeMap.lookupOrAdd(key) processTypeDecl(scope, typ, &structList, &interfaceList) } } //processes all the function declarations for _, decl := range file.Decls { processFuncDecl(decl) } } } } //set all interfaces' implementedByCache for _, i := range interfaceList { i.setImplementedBy(structList) } //Create the dot graph and print it out s := createDOT(interfaceList, structList) fmt.Println(s) }
func buildDoc(importPath string, files []string) (*Package, error) { b := &builder{ fset: token.NewFileSet(), importPaths: make(map[string]map[string]string), } pkgs := make(map[string]*ast.Package) for _, f := range files { if strings.HasSuffix(f, "_test.go") { continue } if src, err := parser.ParseFile(b.fset, f, nil, parser.ParseComments); err == nil { name := src.Name.Name pkg, found := pkgs[name] if !found { pkg = &ast.Package{Name: name, Files: make(map[string]*ast.File)} pkgs[name] = pkg } pkg.Files[f] = src } } score := 0 for _, pkg := range pkgs { switch { case score < 3 && strings.HasSuffix(importPath, pkg.Name): b.pkg = pkg score = 3 case score < 2 && pkg.Name != "main": b.pkg = pkg score = 2 case score < 1: b.pkg = pkg score = 1 } } if b.pkg == nil { return nil, fmt.Errorf("Package %s not found", importPath) } ast.PackageExports(b.pkg) pdoc := doc.New(b.pkg, importPath, 0) pdoc.Doc = strings.TrimRight(pdoc.Doc, " \t\n\r") // Collect examples. for _, f := range files { if !strings.HasSuffix(f, "_test.go") { continue } src, err := parser.ParseFile(b.fset, f, nil, parser.ParseComments) if err != nil { continue } if src.Name.Name != pdoc.Name && src.Name.Name != pdoc.Name+"_test" { continue } b.examples = append(b.examples, doc.Examples(src)...) } return &Package{ Consts: b.values(pdoc.Consts), Doc: pdoc.Doc, Examples: b.getExamples(""), Files: b.files(pdoc.Filenames), Funcs: b.funcs(pdoc.Funcs), ImportPath: pdoc.ImportPath, Name: pdoc.Name, Types: b.types(pdoc.Types), Updated: time.Now(), Vars: b.values(pdoc.Vars), }, nil }