Example #1
0
func TestFilterDuplicates(t *testing.T) {
	// parse input
	fset := token.NewFileSet()
	file, err := parser.ParseFile(fset, "", input, 0)
	if err != nil {
		t.Fatal(err)
	}

	// create package
	files := map[string]*ast.File{"": file}
	pkg, err := ast.NewPackage(fset, files, nil, nil)
	if err != nil {
		t.Fatal(err)
	}

	// filter
	merged := ast.MergePackageFiles(pkg, ast.FilterFuncDuplicates)

	// pretty-print
	var buf bytes.Buffer
	if err := format.Node(&buf, fset, merged); err != nil {
		t.Fatal(err)
	}
	output := buf.String()

	if output != golden {
		t.Errorf("incorrect output:\n%s", output)
	}
}
Example #2
0
// parsePackages inspects Go AST packages to ensure the files
// are intended to register Tasks with or use the gofer package.
func parsePackages(packages map[string]*ast.Package, dir string) {
	for _, pkg := range packages {
		file := ast.MergePackageFiles(pkg, ast.FilterImportDuplicates)

		if isGoferTaskFile(file) {
			imprtPath := strings.TrimPrefix(strings.Replace(dir, goPath, "", 1), SourcePrefix)
			templateData.Imports = append(templateData.Imports, imprt{imprtPath})
		}
	}
}
Example #3
0
File: pkg.go Project: sreis/go
// parsePackage turns the build package we found into a parsed package
// we can then use to generate documentation.
func parsePackage(writer io.Writer, pkg *build.Package, userPath string) *Package {
	fs := token.NewFileSet()
	// include tells parser.ParseDir which files to include.
	// That means the file must be in the build package's GoFiles or CgoFiles
	// list only (no tag-ignored files, tests, swig or other non-Go files).
	include := func(info os.FileInfo) bool {
		for _, name := range pkg.GoFiles {
			if name == info.Name() {
				return true
			}
		}
		for _, name := range pkg.CgoFiles {
			if name == info.Name() {
				return true
			}
		}
		return false
	}
	pkgs, err := parser.ParseDir(fs, pkg.Dir, include, parser.ParseComments)
	if err != nil {
		log.Fatal(err)
	}
	// Make sure they are all in one package.
	if len(pkgs) != 1 {
		log.Fatalf("multiple packages in directory %s", pkg.Dir)
	}
	astPkg := pkgs[pkg.Name]

	// TODO: go/doc does not include typed constants in the constants
	// list, which is what we want. For instance, time.Sunday is of type
	// time.Weekday, so it is defined in the type but not in the
	// Consts list for the package. This prevents
	//	go doc time.Sunday
	// from finding the symbol. Work around this for now, but we
	// should fix it in go/doc.
	// A similar story applies to factory functions.
	docPkg := doc.New(astPkg, pkg.ImportPath, doc.AllDecls)
	for _, typ := range docPkg.Types {
		docPkg.Consts = append(docPkg.Consts, typ.Consts...)
		docPkg.Vars = append(docPkg.Vars, typ.Vars...)
		docPkg.Funcs = append(docPkg.Funcs, typ.Funcs...)
	}

	return &Package{
		writer:   writer,
		name:     pkg.Name,
		userPath: userPath,
		pkg:      astPkg,
		file:     ast.MergePackageFiles(astPkg, 0),
		doc:      docPkg,
		build:    pkg,
		fs:       fs,
	}
}
Example #4
0
// InlineDotImports displays Go package source code with dot imports inlined.
func InlineDotImports(w io.Writer, importPath string) {
	/*imp2 := importer.New()
	imp2.Config.UseGcFallback = true
	cfg := types.Config{Import: imp2.Import}
	_ = cfg*/

	conf := loader.Config{
	//TypeChecker:   cfg,
	}

	conf.Import(importPath)

	prog, err := conf.Load()
	if err != nil {
		panic(err)
	}

	/*pi, err := imp.ImportPackage(importPath)
	if err != nil {
		panic(err)
	}
	_ = pi*/

	pi := prog.Imported[importPath]

	findDotImports(prog, pi)

	files := make(map[string]*ast.File)
	{
		// This package
		for _, file := range pi.Files {
			filename := prog.Fset.File(file.Package).Name()
			files[filename] = file
		}

		// All dot imports
		for _, pi := range dotImports {
			for _, file := range pi.Files {
				filename := prog.Fset.File(file.Package).Name()
				files[filename] = file
			}
		}
	}

	apkg := &ast.Package{Name: pi.Pkg.Name(), Files: files}

	merged := ast.MergePackageFiles(apkg, astMergeMode)

	WriteMergedPackage(w, prog.Fset, merged)
}
Example #5
0
func (b *builder) merge() ([]byte, error) {
	var buf bytes.Buffer
	if err := b.tpl.Execute(&buf, b); err != nil {
		return nil, err
	}

	f, err := parser.ParseFile(b.fset, "", &buf, 0)
	if err != nil {
		return nil, err
	}
	// b.imports(f)
	b.deleteImports(f)
	b.files["main.go"] = f

	pkg, _ := ast.NewPackage(b.fset, b.files, nil, nil)
	pkg.Name = "main"

	ret, err := ast.MergePackageFiles(pkg, 0), nil
	if err != nil {
		return nil, err
	}

	// @TODO: we reread the file, probably something goes wrong with position
	buf.Reset()
	if err = format.Node(&buf, b.fset, ret); err != nil {
		return nil, err
	}

	ret, err = parser.ParseFile(b.fset, "", buf.Bytes(), 0)
	if err != nil {
		return nil, err
	}

	for _, spec := range b.imports {
		var name string
		if spec.Name != nil {
			name = spec.Name.Name
		}
		ipath, _ := strconv.Unquote(spec.Path.Value)
		addImport(b.fset, ret, name, ipath)
	}

	buf.Reset()
	if err := format.Node(&buf, b.fset, ret); err != nil {
		return nil, err
	}

	return buf.Bytes(), nil
}
Example #6
0
// Package parses a package
func Package(path string) ([]Function, error) {
	fset := token.NewFileSet()
	pkg, err := getPackage(path, fset)
	if err != nil {
		return nil, err
	}

	f := ast.MergePackageFiles(
		pkg,
		ast.FilterImportDuplicates|ast.FilterUnassociatedComments)

	info, err := makeInfo(path, fset, f)
	if err != nil {
		return nil, err
	}

	return functions(f, info, fset)
}
Example #7
0
// syntaxTree retrieves the AST for the given package by merging all its files
// and constructing a global syntax tree.
func syntaxTree(pkgDir string) (*ast.File, error) {
	fset := token.NewFileSet()
	packages, err := parser.ParseDir(fset, pkgDir, nil, 0)
	if err != nil {
		return nil, err
	}

	var pkgAst *ast.Package
	pkgName := filepath.Base(pkgDir)
	if p, found := packages["main"]; found {
		pkgAst = p
	} else {
		if p, found := packages[pkgName]; found {
			pkgAst = p
		}
	}
	if pkgAst == nil {
		return nil, fmt.Errorf("cannot find package main or %s in %s", pkgName, pkgDir)
	}

	return ast.MergePackageFiles(pkgAst, 0), nil
}
Example #8
0
File: main.go Project: regorov/go
func main() {
	p := argparse.New("A minimal Go compiler for K750")
	p.Argument("Dir", 1, argparse.Store, "dir", "The package directory to compile. It should contain a 'main' package.")

	args := &Args{}
	err := p.Parse(args)
	if err != nil {
		if cmdLineErr, ok := err.(argparse.CommandLineError); ok {
			ansi.Fprintln(os.Stderr, ansi.RedBold, string(cmdLineErr))
			p.Usage()
			os.Exit(2)

		} else {
			ansi.Fprintf(os.Stderr, ansi.RedBold, "Error: %s\n", err.Error())
			os.Exit(1)
		}
	}

	fset := token.NewFileSet()
	pkgs, err := parser.ParseDir(fset, args.Dir, nil, parser.DeclarationErrors)
	if err != nil {
		ansi.Fprintf(os.Stderr, ansi.RedBold, "Error: %s\n", err.Error())
		os.Exit(1)
	}

	pkg, ok := pkgs["main"]
	if !ok {
		ansi.Fprintf(os.Stderr, ansi.RedBold, "Error: main package was not found.")
		os.Exit(1)
	}

	file := ast.MergePackageFiles(pkg, ast.FilterFuncDuplicates|ast.FilterImportDuplicates)

	emitHeader()
	defer emitFooter()

	ast.Walk(NewFileVisitor(), file)
}
Example #9
0
// pos, end are position of beginning and end of value. if value is
// uninitialized, token.Pos of end of `Version` is returned in both places.
func findVersion(pkg string) (f *token.FileSet, pos, end token.Pos, err error) {
	// TODO consider benchmarking other approaches to parsing.
	f = token.NewFileSet()
	pkgs, err := parser.ParseDir(f, pkg, nil, 0)
	if err != nil {
		return nil, pos, end, err
	}

	// TODO see how much the scheduler would kill concurrizing this

	for _, pkg := range pkgs {
		pkgf := ast.MergePackageFiles(pkg, 0) // TODO could exclude as much as possible
		for _, d := range pkgf.Decls {
			switch d := d.(type) {
			case *ast.GenDecl:
				switch d.Tok {
				case token.CONST:
					for _, spec := range d.Specs {
						switch spec := spec.(type) {
						case *ast.ValueSpec:
							for i, n := range spec.Names {
								if n.Name == "Version" {
									if spec.Values != nil {
										expr := spec.Values[i]
										return f, expr.Pos(), expr.End(), nil // we need to go deeper
									}
									return f, n.End(), n.End(), nil // return end of names, `= value` gets added
								}
							}
						}
					}
				}
			}
		}
	}
	return nil, pos, end, errors.New("Didn't find 'Version' const in package " + pkg)
}
Example #10
0
// getPageInfo returns the PageInfo for a package directory abspath. If the
// parameter genAST is set, an AST containing only the package exports is
// computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc)
// is extracted from the AST. If there is no corresponding package in the
// directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub-
// directories, PageInfo.Dirs is nil. If a directory read error occurred,
// PageInfo.Err is set to the respective error but the error is not logged.
//
func (h *docServer) getPageInfo(abspath, relpath, pkgname string, mode PageInfoMode) PageInfo {
	var pkgFiles []string

	// If we're showing the default package, restrict to the ones
	// that would be used when building the package on this
	// system.  This makes sure that if there are separate
	// implementations for, say, Windows vs Unix, we don't
	// jumble them all together.
	if pkgname == "" {
		// Note: Uses current binary's GOOS/GOARCH.
		// To use different pair, such as if we allowed the user
		// to choose, set ctxt.GOOS and ctxt.GOARCH before
		// calling ctxt.ScanDir.
		ctxt := build.Default
		ctxt.IsAbsPath = pathpkg.IsAbs
		ctxt.ReadDir = fsReadDir
		ctxt.OpenFile = fsOpenFile
		dir, err := ctxt.ImportDir(abspath, 0)
		if err == nil {
			pkgFiles = append(dir.GoFiles, dir.CgoFiles...)
		}
	}

	// filter function to select the desired .go files
	filter := func(d os.FileInfo) bool {
		// Only Go files.
		if !isPkgFile(d) {
			return false
		}
		// If we are looking at cmd documentation, only accept
		// the special fakePkgFile containing the documentation.
		if !h.isPkg {
			return d.Name() == fakePkgFile
		}
		// Also restrict file list to pkgFiles.
		return pkgFiles == nil || inList(d.Name(), pkgFiles)
	}

	// get package ASTs
	fset := token.NewFileSet()
	pkgs, err := parseDir(fset, abspath, filter)
	if err != nil && pkgs == nil {
		// only report directory read errors, ignore parse errors
		// (may be able to extract partial package information)
		return PageInfo{Dirname: abspath, Err: err}
	}

	// select package
	var pkg *ast.Package // selected package
	var plist []string   // list of other package (names), if any
	if len(pkgs) == 1 {
		// Exactly one package - select it.
		for _, p := range pkgs {
			pkg = p
		}

	} else if len(pkgs) > 1 {
		// Multiple packages - select the best matching package: The
		// 1st choice is the package with pkgname, the 2nd choice is
		// the package with dirname, and the 3rd choice is a package
		// that is not called "main" if there is exactly one such
		// package. Otherwise, don't select a package.
		dirpath, dirname := pathpkg.Split(abspath)

		// If the dirname is "go" we might be in a sub-directory for
		// .go files - use the outer directory name instead for better
		// results.
		if dirname == "go" {
			_, dirname = pathpkg.Split(pathpkg.Clean(dirpath))
		}

		var choice3 *ast.Package
	loop:
		for _, p := range pkgs {
			switch {
			case p.Name == pkgname:
				pkg = p
				break loop // 1st choice; we are done
			case p.Name == dirname:
				pkg = p // 2nd choice
			case p.Name != "main":
				choice3 = p
			}
		}
		if pkg == nil && len(pkgs) == 2 {
			pkg = choice3
		}

		// Compute the list of other packages
		// (excluding the selected package, if any).
		plist = make([]string, len(pkgs))
		i := 0
		for name := range pkgs {
			if pkg == nil || name != pkg.Name {
				plist[i] = name
				i++
			}
		}
		plist = plist[0:i]
		sort.Strings(plist)
	}

	// get examples from *_test.go files
	var examples []*doc.Example
	filter = func(d os.FileInfo) bool {
		return isGoFile(d) && strings.HasSuffix(d.Name(), "_test.go")
	}
	if testpkgs, err := parseDir(fset, abspath, filter); err != nil {
		log.Println("parsing test files:", err)
	} else {
		for _, testpkg := range testpkgs {
			var files []*ast.File
			for _, f := range testpkg.Files {
				files = append(files, f)
			}
			examples = append(examples, doc.Examples(files...)...)
		}
	}

	// compute package documentation
	var past *ast.File
	var pdoc *doc.Package
	if pkg != nil {
		if mode&showSource == 0 {
			// show extracted documentation
			var m doc.Mode
			if mode&noFiltering != 0 {
				m = doc.AllDecls
			}
			if mode&allMethods != 0 {
				m |= doc.AllMethods
			}
			pdoc = doc.New(pkg, pathpkg.Clean(relpath), m) // no trailing '/' in importpath
		} else {
			// show source code
			// TODO(gri) Consider eliminating export filtering in this mode,
			//           or perhaps eliminating the mode altogether.
			if mode&noFiltering == 0 {
				ast.PackageExports(pkg)
			}
			past = ast.MergePackageFiles(pkg, ast.FilterUnassociatedComments)
		}
	}

	// get directory information
	var dir *Directory
	var timestamp time.Time
	if tree, ts := fsTree.get(); tree != nil && tree.(*Directory) != nil {
		// directory tree is present; lookup respective directory
		// (may still fail if the file system was updated and the
		// new directory tree has not yet been computed)
		dir = tree.(*Directory).lookup(abspath)
		timestamp = ts
	}
	if dir == nil {
		// no directory tree present (too early after startup or
		// command-line mode); compute one level for this page
		// note: cannot use path filter here because in general
		//       it doesn't contain the fsTree path
		dir = newDirectory(abspath, 1)
		timestamp = time.Now()
	}

	return PageInfo{
		Dirname:  abspath,
		PList:    plist,
		FSet:     fset,
		PAst:     past,
		PDoc:     pdoc,
		Examples: examples,
		Dirs:     dir.listing(true),
		DirTime:  timestamp,
		DirFlat:  mode&flatDir != 0,
		IsPkg:    h.isPkg,
		Err:      nil,
	}
}
Example #11
0
File: godoc.go Project: hfeeki/go
// getPageInfo returns the PageInfo for a package directory abspath. If the
// parameter genAST is set, an AST containing only the package exports is
// computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc)
// is extracted from the AST. If there is no corresponding package in the
// directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub-
// directories, PageInfo.Dirs is nil. If a directory read error occurred,
// PageInfo.Err is set to the respective error but the error is not logged.
//
func (h *docServer) getPageInfo(abspath, relpath string, mode PageInfoMode) PageInfo {
	var pkgFiles []string

	// Restrict to the package files
	// that would be used when building the package on this
	// system.  This makes sure that if there are separate
	// implementations for, say, Windows vs Unix, we don't
	// jumble them all together.
	// Note: Uses current binary's GOOS/GOARCH.
	// To use different pair, such as if we allowed the user
	// to choose, set ctxt.GOOS and ctxt.GOARCH before
	// calling ctxt.ScanDir.
	ctxt := build.Default
	ctxt.IsAbsPath = pathpkg.IsAbs
	ctxt.ReadDir = fsReadDir
	ctxt.OpenFile = fsOpenFile
	if dir, err := ctxt.ImportDir(abspath, 0); err == nil {
		pkgFiles = append(dir.GoFiles, dir.CgoFiles...)
	}

	// filter function to select the desired .go files
	filter := func(d os.FileInfo) bool {
		// Only Go files.
		if !isPkgFile(d) {
			return false
		}
		// If we are looking at cmd documentation, only accept
		// the special fakePkgFile containing the documentation.
		if !h.isPkg {
			return d.Name() == fakePkgFile
		}
		// Also restrict file list to pkgFiles.
		return pkgFiles == nil || inList(d.Name(), pkgFiles)
	}

	// get package ASTs
	fset := token.NewFileSet()
	pkgs, err := parseDir(fset, abspath, filter)
	if err != nil {
		return PageInfo{Dirname: abspath, Err: err}
	}

	// select package
	var pkg *ast.Package // selected package
	if len(pkgs) == 1 {
		// Exactly one package - select it.
		for _, p := range pkgs {
			pkg = p
		}

	} else if len(pkgs) > 1 {
		// More than one package - report an error.
		var buf bytes.Buffer
		for _, p := range pkgs {
			if buf.Len() > 0 {
				fmt.Fprintf(&buf, ", ")
			}
			fmt.Fprintf(&buf, p.Name)
		}
		return PageInfo{
			Dirname: abspath,
			Err:     fmt.Errorf("%s contains more than one package: %s", abspath, buf.Bytes()),
		}
	}

	// get examples from *_test.go files
	var examples []*doc.Example
	filter = func(d os.FileInfo) bool {
		return isGoFile(d) && strings.HasSuffix(d.Name(), "_test.go")
	}
	if testpkgs, err := parseDir(fset, abspath, filter); err != nil {
		log.Println("parsing test files:", err)
	} else {
		for _, testpkg := range testpkgs {
			var files []*ast.File
			for _, f := range testpkg.Files {
				files = append(files, f)
			}
			examples = append(examples, doc.Examples(files...)...)
		}
	}

	// compute package documentation
	var past *ast.File
	var pdoc *doc.Package
	if pkg != nil {
		if mode&showSource == 0 {
			// show extracted documentation
			var m doc.Mode
			if mode&noFiltering != 0 {
				m = doc.AllDecls
			}
			if mode&allMethods != 0 {
				m |= doc.AllMethods
			}
			pdoc = doc.New(pkg, pathpkg.Clean(relpath), m) // no trailing '/' in importpath
		} else {
			// show source code
			// TODO(gri) Consider eliminating export filtering in this mode,
			//           or perhaps eliminating the mode altogether.
			if mode&noFiltering == 0 {
				packageExports(fset, pkg)
			}
			past = ast.MergePackageFiles(pkg, 0)
		}
	}

	// get directory information
	var dir *Directory
	var timestamp time.Time
	if tree, ts := fsTree.get(); tree != nil && tree.(*Directory) != nil {
		// directory tree is present; lookup respective directory
		// (may still fail if the file system was updated and the
		// new directory tree has not yet been computed)
		dir = tree.(*Directory).lookup(abspath)
		timestamp = ts
	}
	if dir == nil {
		// no directory tree present (too early after startup or
		// command-line mode); compute one level for this page
		// note: cannot use path filter here because in general
		//       it doesn't contain the fsTree path
		dir = newDirectory(abspath, 1)
		timestamp = time.Now()
	}

	return PageInfo{
		Dirname:  abspath,
		FSet:     fset,
		PAst:     past,
		PDoc:     pdoc,
		Examples: examples,
		Dirs:     dir.listing(true),
		DirTime:  timestamp,
		DirFlat:  mode&flatDir != 0,
		IsPkg:    h.isPkg,
		Err:      nil,
	}
}
Example #12
0
func bundle() error {
	tmp, err := ioutil.TempFile(".", ".tmp.asset-")
	if err != nil {
		return err
	}
	defer func() {
		if tmp != nil {
			_ = os.Remove(tmp.Name())
		}
	}()
	defer tmp.Close()

	if _, err := io.WriteString(tmp, "// +build ignore\n\n"); err != nil {
		return err
	}

	fset := token.NewFileSet()

	pkgs, err := parser.ParseDir(fset, ".", filter, parser.ParseComments)
	if err != nil {
		return err
	}
	if len(pkgs) != 1 {
		return errors.New("more than one package found in files to be bundled")
	}
	pkg := pkgs["main"]

	// Need to move all import statements so they're at the front of
	// the new file; first, remove all existing import declarations.
	for _, f := range pkg.Files {
		decls := f.Decls[:0]
		for _, d := range f.Decls {
			if d, ok := d.(*ast.GenDecl); ok && d.Tok == token.IMPORT {
				continue
			}
			decls = append(decls, d)
		}
		f.Decls = decls
	}

	merged := ast.MergePackageFiles(pkg, ast.FilterUnassociatedComments|ast.FilterImportDuplicates)

	// Put imports back, using the parsed information.
	dec := &ast.GenDecl{
		Tok:    token.IMPORT,
		Lparen: 1, // kludge so ast/format outputs more than one import
	}
	for _, f := range pkg.Files {
		for _, i := range f.Imports {
			dec.Specs = append(dec.Specs, i)
		}
	}
	merged.Decls = append([]ast.Decl{dec}, merged.Decls...)

	if err := format.Node(tmp, fset, merged); err != nil {
		return err
	}

	if err := os.Rename(tmp.Name(), "asset.go"); err != nil {
		return err
	}
	tmp = nil
	return nil
}
Example #13
0
// getPageInfo returns the PageInfo for a package directory abspath. If the
// parameter genAST is set, an AST containing only the package exports is
// computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc)
// is extracted from the AST. If there is no corresponding package in the
// directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub-
// directories, PageInfo.Dirs is nil. If an error occurred, PageInfo.Err is
// set to the respective error but the error is not logged.
//
func (h *docServer) getPageInfo(abspath, relpath string, mode PageInfoMode) (info PageInfo) {
	info.Dirname = abspath

	// Restrict to the package files that would be used when building
	// the package on this system.  This makes sure that if there are
	// separate implementations for, say, Windows vs Unix, we don't
	// jumble them all together.
	// Note: Uses current binary's GOOS/GOARCH.
	// To use different pair, such as if we allowed the user to choose,
	// set ctxt.GOOS and ctxt.GOARCH before calling ctxt.ImportDir.
	ctxt := build.Default
	ctxt.IsAbsPath = pathpkg.IsAbs
	ctxt.ReadDir = fsReadDir
	ctxt.OpenFile = fsOpenFile
	pkginfo, err := ctxt.ImportDir(abspath, 0)
	// continue if there are no Go source files; we still want the directory info
	if _, nogo := err.(*build.NoGoError); err != nil && !nogo {
		info.Err = err
		return
	}

	// collect package files
	pkgname := pkginfo.Name
	pkgfiles := append(pkginfo.GoFiles, pkginfo.CgoFiles...)
	if len(pkgfiles) == 0 {
		// Commands written in C have no .go files in the build.
		// Instead, documentation may be found in an ignored file.
		// The file may be ignored via an explicit +build ignore
		// constraint (recommended), or by defining the package
		// documentation (historic).
		pkgname = "main" // assume package main since pkginfo.Name == ""
		pkgfiles = pkginfo.IgnoredGoFiles
	}

	// get package information, if any
	if len(pkgfiles) > 0 {
		// build package AST
		fset := token.NewFileSet()
		files, err := parseFiles(fset, abspath, pkgfiles)
		if err != nil {
			info.Err = err
			return
		}
		pkg := &ast.Package{Name: pkgname, Files: files}

		// extract package documentation
		info.FSet = fset
		if mode&showSource == 0 {
			// show extracted documentation
			var m doc.Mode
			if mode&noFiltering != 0 {
				m = doc.AllDecls
			}
			if mode&allMethods != 0 {
				m |= doc.AllMethods
			}
			info.PDoc = doc.New(pkg, pathpkg.Clean(relpath), m) // no trailing '/' in importpath

			// collect examples
			testfiles := append(pkginfo.TestGoFiles, pkginfo.XTestGoFiles...)
			files, err = parseFiles(fset, abspath, testfiles)
			if err != nil {
				log.Println("parsing examples:", err)
			}
			info.Examples = collectExamples(pkg, files)

			// collect any notes that we want to show
			if info.PDoc.Notes != nil {
				info.Notes = make(map[string][]string)
				for _, m := range notesToShow {
					if n := info.PDoc.Notes[m]; n != nil {
						info.Notes[m] = n
					}
				}
			}

		} else {
			// show source code
			// TODO(gri) Consider eliminating export filtering in this mode,
			//           or perhaps eliminating the mode altogether.
			if mode&noFiltering == 0 {
				packageExports(fset, pkg)
			}
			info.PAst = ast.MergePackageFiles(pkg, 0)
		}
		info.IsMain = pkgname == "main"
	}

	// get directory information, if any
	var dir *Directory
	var timestamp time.Time
	if tree, ts := fsTree.get(); tree != nil && tree.(*Directory) != nil {
		// directory tree is present; lookup respective directory
		// (may still fail if the file system was updated and the
		// new directory tree has not yet been computed)
		dir = tree.(*Directory).lookup(abspath)
		timestamp = ts
	}
	if dir == nil {
		// no directory tree present (too early after startup or
		// command-line mode); compute one level for this page
		// note: cannot use path filter here because in general
		//       it doesn't contain the fsTree path
		dir = newDirectory(abspath, 1)
		timestamp = time.Now()
	}
	info.Dirs = dir.listing(true)
	info.DirTime = timestamp
	info.DirFlat = mode&flatDir != 0

	return
}
Example #14
0
func main() {
	if len(os.Args) != 4 {
		fmt.Println("Usage: tickdoc absPath path/to/golang/package output/dir")
		fmt.Println()
		fmt.Println("absPath - the absolute path of rendered documentation, used to generate links.")
		os.Exit(1)
	}

	absPath = os.Args[1]
	dir := os.Args[2]
	out := os.Args[3]

	fset := token.NewFileSet() // positions are relative to fset

	skipTest := func(fi os.FileInfo) bool {
		return !strings.HasSuffix(fi.Name(), "_test.go")
	}

	pkgs, err := parser.ParseDir(fset, dir, skipTest, parser.ParseComments)
	if err != nil {
		log.Fatal(err)
	}

	nodes := make(map[string]*Node)
	for _, pkg := range pkgs {
		f := ast.MergePackageFiles(pkg, ast.FilterFuncDuplicates|ast.FilterUnassociatedComments|ast.FilterImportDuplicates)
		ast.Inspect(f, func(n ast.Node) bool {
			switch decl := n.(type) {
			case *ast.GenDecl:
				handleGenDecl(nodes, decl)
			case *ast.FuncDecl:
				handleFuncDecl(nodes, decl)
			}
			return true
		})
	}

	ordered := make([]string, 0, len(nodes))
	for name, node := range nodes {
		if name == "" || !ast.IsExported(name) {
			continue
		}
		if node.Embedded {
			err := node.Embed(nodes)
			if err != nil {
				log.Fatal(err)
			}
		} else {
			ordered = append(ordered, name)
			node.Flatten(nodes)
		}
	}
	sort.Strings(ordered)

	r := markdown.NewRenderer()
	for i, name := range ordered {
		var buf bytes.Buffer
		n := nodes[name]
		n.Render(&buf, r, nodes, i)
		filename := path.Join(out, snaker.CamelToSnake(name)+".md")
		log.Println("Writing file:", filename, i)
		f, err := os.Create(filename)
		if err != nil {
			log.Fatal(err)
		}
		defer f.Close()
		f.Write(buf.Bytes())
	}
}
Example #15
0
// getPageInfo returns the PageInfo for a package directory abspath. If the
// parameter genAST is set, an AST containing only the package exports is
// computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc)
// is extracted from the AST. If the parameter try is set, no errors are
// logged if getPageInfo fails. If there is no corresponding package in the
// directory, PageInfo.PDoc and PageInfo.PExp are nil. If there are no sub-
// directories, PageInfo.Dirs is nil.
//
func (h *httpHandler) getPageInfo(abspath, relpath, pkgname string, genAST, try bool) PageInfo {
	// filter function to select the desired .go files
	filter := func(d *os.Dir) bool {
		// If we are looking at cmd documentation, only accept
		// the special fakePkgFile containing the documentation.
		return isPkgFile(d) && (h.isPkg || d.Name == fakePkgFile)
	}

	// get package ASTs
	pkgs, err := parser.ParseDir(abspath, filter, parser.ParseComments)
	if err != nil && !try {
		// TODO: errors should be shown instead of an empty directory
		log.Stderrf("parser.parseDir: %s", err)
	}

	// select package
	var pkg *ast.Package // selected package
	var plist []string   // list of other package (names), if any
	if len(pkgs) == 1 {
		// Exactly one package - select it.
		for _, p := range pkgs {
			pkg = p
		}

	} else if len(pkgs) > 1 {
		// Multiple packages - select the best matching package: The
		// 1st choice is the package with pkgname, the 2nd choice is
		// the package with dirname, and the 3rd choice is a package
		// that is not called "main" if there is exactly one such
		// package. Otherwise, don't select a package.
		dirpath, dirname := pathutil.Split(abspath)

		// If the dirname is "go" we might be in a sub-directory for
		// .go files - use the outer directory name instead for better
		// results.
		if dirname == "go" {
			_, dirname = pathutil.Split(pathutil.Clean(dirpath))
		}

		var choice3 *ast.Package
	loop:
		for _, p := range pkgs {
			switch {
			case p.Name == pkgname:
				pkg = p
				break loop // 1st choice; we are done
			case p.Name == dirname:
				pkg = p // 2nd choice
			case p.Name != "main":
				choice3 = p
			}
		}
		if pkg == nil && len(pkgs) == 2 {
			pkg = choice3
		}

		// Compute the list of other packages
		// (excluding the selected package, if any).
		plist = make([]string, len(pkgs))
		i := 0
		for name, _ := range pkgs {
			if pkg == nil || name != pkg.Name {
				plist[i] = name
				i++
			}
		}
		plist = plist[0:i]
	}

	// compute package documentation
	var past *ast.File
	var pdoc *doc.PackageDoc
	if pkg != nil {
		ast.PackageExports(pkg)
		if genAST {
			past = ast.MergePackageFiles(pkg, false)
		} else {
			pdoc = doc.NewPackageDoc(pkg, pathutil.Clean(relpath)) // no trailing '/' in importpath
		}
	}

	// get directory information
	var dir *Directory
	if tree, _ := fsTree.get(); tree != nil && tree.(*Directory) != nil {
		// directory tree is present; lookup respective directory
		// (may still fail if the file system was updated and the
		// new directory tree has not yet been computed)
		// TODO(gri) Need to build directory tree for fsMap entries
		dir = tree.(*Directory).lookup(abspath)
	}
	if dir == nil {
		// no directory tree present (either early after startup
		// or command-line mode, or we don't build a tree for the
		// directory; e.g. google3); compute one level for this page
		dir = newDirectory(abspath, 1)
	}

	return PageInfo{abspath, plist, past, pdoc, dir.listing(true), h.isPkg}
}
Example #16
0
// getPageInfo returns the PageInfo for a package directory abspath. If the
// parameter genAST is set, an AST containing only the package exports is
// computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc)
// is extracted from the AST. If there is no corresponding package in the
// directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub-
// directories, PageInfo.Dirs is nil. If a directory read error occurred,
// PageInfo.Err is set to the respective error but the error is not logged.
//
func (h *httpHandler) getPageInfo(abspath, relpath, pkgname string, mode PageInfoMode) PageInfo {
	// filter function to select the desired .go files
	filter := func(d FileInfo) bool {
		// If we are looking at cmd documentation, only accept
		// the special fakePkgFile containing the documentation.
		return isPkgFile(d) && (h.isPkg || d.Name() == fakePkgFile)
	}

	// get package ASTs
	fset := token.NewFileSet()
	pkgs, err := parseDir(fset, abspath, filter)
	if err != nil && pkgs == nil {
		// only report directory read errors, ignore parse errors
		// (may be able to extract partial package information)
		return PageInfo{Dirname: abspath, Err: err}
	}

	// select package
	var pkg *ast.Package // selected package
	var plist []string   // list of other package (names), if any
	if len(pkgs) == 1 {
		// Exactly one package - select it.
		for _, p := range pkgs {
			pkg = p
		}

	} else if len(pkgs) > 1 {
		// Multiple packages - select the best matching package: The
		// 1st choice is the package with pkgname, the 2nd choice is
		// the package with dirname, and the 3rd choice is a package
		// that is not called "main" if there is exactly one such
		// package. Otherwise, don't select a package.
		dirpath, dirname := filepath.Split(abspath)

		// If the dirname is "go" we might be in a sub-directory for
		// .go files - use the outer directory name instead for better
		// results.
		if dirname == "go" {
			_, dirname = filepath.Split(filepath.Clean(dirpath))
		}

		var choice3 *ast.Package
	loop:
		for _, p := range pkgs {
			switch {
			case p.Name == pkgname:
				pkg = p
				break loop // 1st choice; we are done
			case p.Name == dirname:
				pkg = p // 2nd choice
			case p.Name != "main":
				choice3 = p
			}
		}
		if pkg == nil && len(pkgs) == 2 {
			pkg = choice3
		}

		// Compute the list of other packages
		// (excluding the selected package, if any).
		plist = make([]string, len(pkgs))
		i := 0
		for name := range pkgs {
			if pkg == nil || name != pkg.Name {
				plist[i] = name
				i++
			}
		}
		plist = plist[0:i]
	}

	// compute package documentation
	var past *ast.File
	var pdoc *doc.PackageDoc
	if pkg != nil {
		if mode&exportsOnly != 0 {
			ast.PackageExports(pkg)
		}
		if mode&genDoc != 0 {
			pdoc = doc.NewPackageDoc(pkg, path.Clean(relpath)) // no trailing '/' in importpath
		} else {
			past = ast.MergePackageFiles(pkg, ast.FilterUnassociatedComments)
		}
	}

	// get directory information
	var dir *Directory
	var timestamp int64
	if tree, ts := fsTree.get(); tree != nil && tree.(*Directory) != nil {
		// directory tree is present; lookup respective directory
		// (may still fail if the file system was updated and the
		// new directory tree has not yet been computed)
		dir = tree.(*Directory).lookup(abspath)
		timestamp = ts
	}
	if dir == nil {
		// the path may refer to a user-specified file system mapped
		// via fsMap; lookup that mapping and corresponding RWValue
		// if any
		var v *RWValue
		fsMap.Iterate(func(path string, value *RWValue) bool {
			if isParentOf(path, abspath) {
				// mapping found
				v = value
				return false
			}
			return true
		})
		if v != nil {
			// found a RWValue associated with a user-specified file
			// system; a non-nil RWValue stores a (possibly out-of-date)
			// directory tree for that file system
			if tree, ts := v.get(); tree != nil && tree.(*Directory) != nil {
				dir = tree.(*Directory).lookup(abspath)
				timestamp = ts
			}
		}
	}
	if dir == nil {
		// no directory tree present (too early after startup or
		// command-line mode); compute one level for this page
		// note: cannot use path filter here because in general
		//       it doesn't contain the fsTree path
		dir = newDirectory(abspath, nil, 1)
		timestamp = time.Seconds()
	}

	return PageInfo{abspath, plist, fset, past, pdoc, dir.listing(true), timestamp, h.isPkg, nil}
}
Example #17
0
// getPageInfo returns the PageInfo for a package directory abspath. If the
// parameter genAST is set, an AST containing only the package exports is
// computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc)
// is extracted from the AST. If there is no corresponding package in the
// directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub-
// directories, PageInfo.Dirs is nil. If an error occurred, PageInfo.Err is
// set to the respective error but the error is not logged.
//
func (h *handlerServer) GetPageInfo(abspath, relpath string, mode PageInfoMode) *PageInfo {
	info := &PageInfo{Dirname: abspath}

	// Restrict to the package files that would be used when building
	// the package on this system.  This makes sure that if there are
	// separate implementations for, say, Windows vs Unix, we don't
	// jumble them all together.
	// Note: Uses current binary's GOOS/GOARCH.
	// To use different pair, such as if we allowed the user to choose,
	// set ctxt.GOOS and ctxt.GOARCH before calling ctxt.ImportDir.
	ctxt := build.Default
	ctxt.IsAbsPath = pathpkg.IsAbs
	ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) {
		return h.c.fs.ReadDir(filepath.ToSlash(dir))
	}
	ctxt.OpenFile = func(name string) (r io.ReadCloser, err error) {
		data, err := vfs.ReadFile(h.c.fs, filepath.ToSlash(name))
		if err != nil {
			return nil, err
		}
		return ioutil.NopCloser(bytes.NewReader(data)), nil
	}

	pkginfo, err := ctxt.ImportDir(abspath, 0)
	// continue if there are no Go source files; we still want the directory info
	if _, nogo := err.(*build.NoGoError); err != nil && !nogo {
		info.Err = err
		return info
	}

	// collect package files
	pkgname := pkginfo.Name
	pkgfiles := append(pkginfo.GoFiles, pkginfo.CgoFiles...)
	if len(pkgfiles) == 0 {
		// Commands written in C have no .go files in the build.
		// Instead, documentation may be found in an ignored file.
		// The file may be ignored via an explicit +build ignore
		// constraint (recommended), or by defining the package
		// documentation (historic).
		pkgname = "main" // assume package main since pkginfo.Name == ""
		pkgfiles = pkginfo.IgnoredGoFiles
	}

	// get package information, if any
	if len(pkgfiles) > 0 {
		// build package AST
		fset := token.NewFileSet()
		files, err := h.c.parseFiles(fset, abspath, pkgfiles)
		if err != nil {
			info.Err = err
			return info
		}

		// ignore any errors - they are due to unresolved identifiers
		pkg, _ := ast.NewPackage(fset, files, poorMansImporter, nil)

		// extract package documentation
		info.FSet = fset
		if mode&ShowSource == 0 {
			// show extracted documentation
			var m doc.Mode
			if mode&NoFiltering != 0 {
				m |= doc.AllDecls
			}
			if mode&AllMethods != 0 {
				m |= doc.AllMethods
			}
			info.PDoc = doc.New(pkg, pathpkg.Clean(relpath), m) // no trailing '/' in importpath
			if mode&NoFactoryFuncs != 0 {
				for _, t := range info.PDoc.Types {
					info.PDoc.Funcs = append(info.PDoc.Funcs, t.Funcs...)
					t.Funcs = nil
				}
				sort.Sort(funcsByName(info.PDoc.Funcs))
			}

			// collect examples
			testfiles := append(pkginfo.TestGoFiles, pkginfo.XTestGoFiles...)
			files, err = h.c.parseFiles(fset, abspath, testfiles)
			if err != nil {
				log.Println("parsing examples:", err)
			}
			info.Examples = collectExamples(pkg, files)

			// collect any notes that we want to show
			if info.PDoc.Notes != nil {
				// could regexp.Compile only once per godoc, but probably not worth it
				if rx := h.p.NotesRx; rx != nil {
					for m, n := range info.PDoc.Notes {
						if rx.MatchString(m) {
							if info.Notes == nil {
								info.Notes = make(map[string][]*doc.Note)
							}
							info.Notes[m] = n
						}
					}
				}
			}

		} else {
			// show source code
			// TODO(gri) Consider eliminating export filtering in this mode,
			//           or perhaps eliminating the mode altogether.
			if mode&NoFiltering == 0 {
				packageExports(fset, pkg)
			}
			info.PAst = ast.MergePackageFiles(pkg, 0)
		}
		info.IsMain = pkgname == "main"
	}

	// get directory information, if any
	var dir *Directory
	var timestamp time.Time
	if tree, ts := h.c.fsTree.Get(); tree != nil && tree.(*Directory) != nil {
		// directory tree is present; lookup respective directory
		// (may still fail if the file system was updated and the
		// new directory tree has not yet been computed)
		dir = tree.(*Directory).lookup(abspath)
		timestamp = ts
	}
	if dir == nil {
		// no directory tree present (too early after startup or
		// command-line mode); compute one level for this page
		// note: cannot use path filter here because in general
		//       it doesn't contain the FSTree path
		dir = h.c.newDirectory(abspath, 1)
		timestamp = time.Now()
	}
	info.Dirs = dir.listing(true)
	info.DirTime = timestamp
	info.DirFlat = mode&FlatDir != 0

	return info
}
Example #18
0
func main() {
	flag.Usage = usage
	flag.Parse()
	args := flag.Args()

	if len(args) != 2 {
		flag.Usage()
		os.Exit(1)
	}

	dir := args[0]
	out := args[1]
	// Decode config
	err := decodeConfig(*configPath)
	if err != nil {
		log.Fatal(err)
	}

	fset := token.NewFileSet() // positions are relative to fset

	skipTest := func(fi os.FileInfo) bool {
		return !strings.HasSuffix(fi.Name(), "_test.go")
	}

	pkgs, err := parser.ParseDir(fset, dir, skipTest, parser.ParseComments)
	if err != nil {
		log.Fatal(err)
	}

	nodes := make(map[string]*Node)
	for _, pkg := range pkgs {
		f := ast.MergePackageFiles(pkg, ast.FilterFuncDuplicates|ast.FilterUnassociatedComments|ast.FilterImportDuplicates)
		ast.Inspect(f, func(n ast.Node) bool {
			switch decl := n.(type) {
			case *ast.GenDecl:
				handleGenDecl(nodes, decl)
			case *ast.FuncDecl:
				handleFuncDecl(nodes, decl)
			}
			return true
		})
	}

	ordered := make([]string, 0, len(nodes))
	for name, node := range nodes {
		if name == "" || !ast.IsExported(name) || node.Name == "" {
			continue
		}
		if node.Embedded {
			err := node.Embed(nodes)
			if err != nil {
				log.Fatal(err)
			}
		} else {
			ordered = append(ordered, name)
			node.Flatten(nodes)
		}
	}
	sort.Strings(ordered)

	r := markdown.NewRenderer(nil)
	for i, name := range ordered {
		var buf bytes.Buffer
		n := nodes[name]
		weight := (i + 1) * config.IndexWidth
		if w, ok := config.Weights[name]; ok {
			weight = w
		}
		n.Render(&buf, r, nodes, weight)
		filename := path.Join(out, snaker.CamelToSnake(name)+".md")
		log.Println("Writing file:", filename, i)
		f, err := os.Create(filename)
		if err != nil {
			log.Fatal(err)
		}
		defer f.Close()
		f.Write(buf.Bytes())
	}
}