// Parse parses the ast for this file and returns a ParsedFile func (w *Weave) ParseAST(fname string) *ast.File { var err error fset := token.NewFileSet() af, err := parser.ParseFile(fset, fname, nil, 0) if err != nil { w.flog.Println(err) } loadcfg := loader.Config{} loadcfg.CreateFromFilenames(fname) info := types.Info{ Types: make(map[ast.Expr]types.TypeAndValue), Defs: make(map[*ast.Ident]types.Object), } var conf types.Config _, err = conf.Check(af.Name.Name, fset, []*ast.File{af}, &info) if err != nil { if w.warnAST { w.flog.Println(err) } } return af }
func ObjectOf(filename string, cursor int) (types.Object, *types.Selection, error) { text, off, err := readSourceOffset(filename, cursor, nil) if err != nil { return nil, nil, err } if err := checkSelection(text, off); err != nil { return nil, nil, err } af, fset, err := parseFile(filename, text) if err != nil { return nil, nil, err } node, err := nodeAtOffset(af, fset, cursor) if err != nil { return nil, nil, err } ctx := newContext(filename, af, fset, &build.Default) info := &types.Info{ Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), } if _, ok := node.(*ast.SelectorExpr); ok { info.Selections = make(map[*ast.SelectorExpr]*types.Selection) } conf := types.Config{} if _, err := conf.Check(ctx.dirname, ctx.fset, ctx.files, info); err != nil { // Return error only if missing type info. if len(info.Defs) == 0 && len(info.Uses) == 0 { return nil, nil, err } } return lookupType(node, info) }
func (pkg *Package) check(fs *token.FileSet, astFiles []*ast.File) error { pkg.defs = make(map[*ast.Ident]types.Object) pkg.uses = make(map[*ast.Ident]types.Object) pkg.spans = make(map[types.Object]Span) pkg.types = make(map[ast.Expr]types.TypeAndValue) config := types.Config{ // We provide the same packages map for all imports to ensure // that everybody sees identical packages for the given paths. Packages: imports, // By providing a Config with our own error function, it will continue // past the first error. There is no need for that function to do anything. Error: func(error) {}, } info := &types.Info{ Types: pkg.types, Defs: pkg.defs, Uses: pkg.uses, } typesPkg, err := config.Check(pkg.path, fs, astFiles, info) pkg.typesPkg = typesPkg // update spans for id, obj := range pkg.defs { pkg.growSpan(id, obj) } for id, obj := range pkg.uses { pkg.growSpan(id, obj) } return err }
func checkPkgFiles(files []*ast.File) { type bailout struct{} conf := types.Config{ FakeImportC: true, Error: func(err error) { if !*allErrors && errorCount >= 10 { panic(bailout{}) } report(err) }, Sizes: sizes, } if *gccgo { var inst gccgoimporter.GccgoInstallation inst.InitFromDriver("gccgo") conf.Import = inst.GetImporter(nil, nil) } defer func() { switch p := recover().(type) { case nil, bailout: // normal return or early exit default: // re-panic panic(p) } }() const path = "pkg" // any non-empty string will do for now conf.Check(path, fset, files, nil) }
func (c *Config) Object(filename string, cursor int, src interface{}) (*Object, []byte, error) { // TODO: Refactor this mess! text, err := readSource(filename, src) if err != nil { return nil, nil, err } if err := checkSelection(text, cursor); err != nil { return nil, nil, err } af, fset, err := parseFile(filename, text) if err != nil { return nil, nil, err } node, err := nodeAtOffset(af, fset, cursor) if err != nil { return nil, nil, err } ctx := newContext(filename, af, fset, &c.Context) info := &types.Info{ Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), } if _, ok := node.(*ast.SelectorExpr); ok { info.Selections = make(map[*ast.SelectorExpr]*types.Selection) } conf := types.Config{} if _, err := conf.Check(ctx.dirname, ctx.fset, ctx.files, info); err != nil { // Return error only if missing type info. if len(info.Defs) == 0 && len(info.Uses) == 0 { return nil, nil, err } } obj, sel, err := lookupType(node, info) if err != nil { return nil, nil, err } o, err := newObject(obj, sel) if err != nil { return nil, nil, err } f, err := o.Finder() if err != nil { return nil, nil, err } tp, objSrc, err := ctx.objectPosition(o.PkgPath, f) if err != nil { if o.pos.IsValid() { if p := positionFor(o.pos, fset); p != nil { o.Position = Position(*p) return o, objSrc, nil } } return nil, nil, err } if tp != nil { o.Position = Position(*tp) } return o, objSrc, nil }
func main() { flag.Parse() exitStatus := 0 importPaths := gotool.ImportPaths(flag.Args()) if len(importPaths) == 0 { importPaths = []string{"."} } for _, pkgPath := range importPaths { visitor := &visitor{ info: types.Info{ Types: make(map[ast.Expr]types.TypeAndValue), Defs: make(map[*ast.Ident]types.Object), Selections: make(map[*ast.SelectorExpr]*types.Selection), }, m: make(map[types.Type]map[string]int), skip: make(map[types.Type]struct{}), } fset, astFiles := check.ASTFilesForPackage(pkgPath, *loadTestFiles) imp := importer.New() // Preliminary cgo support. imp.Config = importer.Config{UseGcFallback: true} config := types.Config{Import: imp.Import} var err error visitor.pkg, err = config.Check(pkgPath, fset, astFiles, &visitor.info) if err != nil { fmt.Fprintf(os.Stderr, "%s: %v\n", pkgPath, err) continue } for _, f := range astFiles { ast.Walk(visitor, f) } for t := range visitor.m { if _, skip := visitor.skip[t]; skip { continue } for fieldName, v := range visitor.m[t] { if !*reportExported && ast.IsExported(fieldName) { continue } if v == 0 { field, _, _ := types.LookupFieldOrMethod(t, false, visitor.pkg, fieldName) if fieldName == "XMLName" { if named, ok := field.Type().(*types.Named); ok && named.Obj().Pkg().Path() == "encoding/xml" { continue } } pos := fset.Position(field.Pos()) fmt.Printf("%s: %s:%d:%d: %s.%s\n", pkgPath, pos.Filename, pos.Line, pos.Column, types.TypeString(t, nil), fieldName, ) exitStatus = 1 } } } } os.Exit(exitStatus) }
func (ctxt *Context) grind(pkg *Package) { Loop: for loop := 0; ; loop++ { println(loop) pkg.FileSet = token.NewFileSet() pkg.Files = nil for _, name := range pkg.Filenames { f, err := parser.ParseFile(pkg.FileSet, name, pkg.Src(name), 0) if err != nil { if loop > 0 { ctxt.Errorf("%s: error parsing rewritten file: %v", pkg.ImportPath, err) return } ctxt.Errorf("%s: %v", pkg.ImportPath, err) return } pkg.Files = append(pkg.Files, f) } conf := new(types.Config) // conf.DisableUnusedImportCheck = true pkg.Info = types.Info{} pkg.Info.Types = make(map[ast.Expr]types.TypeAndValue) pkg.Info.Scopes = make(map[ast.Node]*types.Scope) pkg.Info.Defs = make(map[*ast.Ident]types.Object) pkg.Info.Uses = make(map[*ast.Ident]types.Object) typesPkg, err := conf.Check(pkg.ImportPath, pkg.FileSet, pkg.Files, &pkg.Info) if err != nil && typesPkg == nil { if loop > 0 { ctxt.Errorf("%s: error type checking rewritten package: %v", pkg.ImportPath, err) for _, name := range pkg.Filenames { if pkg.Modified(name) { ctxt.Errorf("%s <<<\n%s\n>>>", name, pkg.Src(name)) } } return } ctxt.Errorf("%s: %v", pkg.ImportPath, err) return } pkg.Types = typesPkg pkg.TypesError = err for _, g := range ctxt.Grinders { pkg.clean = true g(ctxt, pkg) if !pkg.clean { continue Loop } } break } }
// check type-checks the package. The package must be OK to proceed. func (pkg *Package) check(fs *token.FileSet, astFiles []*ast.File) { pkg.defs = make(map[*ast.Ident]types.Object) config := types.Config{FakeImportC: true} info := &types.Info{ Defs: pkg.defs, } typesPkg, err := config.Check(pkg.dir, fs, astFiles, info) if err != nil { log.Fatalf("checking package: %s", err) } pkg.typesPkg = typesPkg }
func ExampleMap() { const source = `package P var X []string var Y []string const p, q = 1.0, 2.0 func f(offset int32) (value byte, ok bool) func g(rune) (uint8, bool) ` // Parse and type-check the package. fset := token.NewFileSet() f, err := parser.ParseFile(fset, "P.go", source, 0) if err != nil { panic(err) } pkg, err := new(types.Config).Check("P", fset, []*ast.File{f}, nil) if err != nil { panic(err) } scope := pkg.Scope() // Group names of package-level objects by their type. var namesByType typeutil.Map // value is []string for _, name := range scope.Names() { T := scope.Lookup(name).Type() names, _ := namesByType.At(T).([]string) names = append(names, name) namesByType.Set(T, names) } // Format, sort, and print the map entries. var lines []string namesByType.Iterate(func(T types.Type, names interface{}) { lines = append(lines, fmt.Sprintf("%s %s", names, T)) }) sort.Strings(lines) for _, line := range lines { fmt.Println(line) } // Output: // [X Y] []string // [f g] func(offset int32) (value byte, ok bool) // [p q] untyped float }
func (p *Processor) parseSourceFiles(filenames []string) (*types.Package, error) { var files []*ast.File fs := token.NewFileSet() for _, filename := range filenames { file, err := parser.ParseFile(fs, filename, nil, 0) if err != nil { return nil, fmt.Errorf("parsing package: %s: %s", filename, err) } files = append(files, file) } config := types.Config{FakeImportC: true, Error: func(error) {}} info := &types.Info{} return config.Check(p.Path, fs, files, info) }
func typeCheck(t *testing.T, filename string) *types.Package { f, err := parser.ParseFile(fset, filename, nil, parser.AllErrors) if err != nil { t.Fatalf("%s: %v", filename, err) } pkgName := filepath.Base(filename) pkgName = strings.TrimSuffix(pkgName, ".go") // typecheck and collect typechecker errors var conf types.Config conf.Error = func(err error) { t.Error(err) } pkg, err := conf.Check(pkgName, fset, []*ast.File{f}, nil) if err != nil { t.Fatal(err) } return pkg }
// ParsePackage parses the package in the given directory and returns it. func ParsePackage(directory, skipPrefix, skipSuffix string) (*Package, error) { pkgDir, err := build.Default.ImportDir(directory, 0) if err != nil { return nil, fmt.Errorf("cannot process directory %s: %s", directory, err) } var files []*ast.File fs := token.NewFileSet() for _, name := range pkgDir.GoFiles { if !strings.HasSuffix(name, ".go") || (skipSuffix != "" && strings.HasPrefix(name, skipPrefix) && strings.HasSuffix(name, skipSuffix)) { continue } if directory != "." { name = filepath.Join(directory, name) } f, err := parser.ParseFile(fs, name, nil, 0) if err != nil { return nil, fmt.Errorf("parsing file %v: %v", name, err) } files = append(files, f) } if len(files) == 0 { return nil, fmt.Errorf("%s: no buildable Go files", directory) } // type-check the package defs := make(map[*ast.Ident]types.Object) config := types.Config{FakeImportC: true} info := &types.Info{Defs: defs} if _, err := config.Check(directory, fs, files, info); err != nil { return nil, fmt.Errorf("type-checking package: %v", err) } return &Package{ Name: files[0].Name.Name, files: files, defs: defs, }, nil }
func (p *Parser) parsePackage(directory string, fileNames []string) (*PackageInfo, error) { var files FileInfos pkg := &PackageInfo{} fs := token.NewFileSet() for _, fileName := range fileNames { if !strings.HasSuffix(fileName, ".go") { continue } parsedFile, err := parser.ParseFile(fs, fileName, nil, parser.ParseComments) if err != nil { return nil, fmt.Errorf("parsing package: %s: %s", fileName, err) } files = append(files, (*FileInfo)(parsedFile)) } if len(files) == 0 { return nil, fmt.Errorf("%s: no buildable Go files", directory) } pkg.Files = files pkg.Dir = directory // resolve types config := types.Config{ FakeImportC: true, IgnoreFuncBodies: true, DisableUnusedImportCheck: true, } info := &types.Info{ Defs: make(map[*ast.Ident]types.Object), } typesPkg, err := config.Check(pkg.Dir, fs, files.AstFiles(), info) if p.SkipSemanticsCheck && err != nil { return pkg, nil } else if err != nil { return nil, err } pkg.Types = typesPkg return pkg, nil }
func importPkg(pkgname string) (*types.Package, *ast.Package, error) { pkg, err := build.Import(pkgname, "", 0) if err != nil { return nil, nil, err } fset := token.NewFileSet() pkgmap, err := parser.ParseDir(fset, pkg.Dir, nil, parser.ParseComments) if err != nil { return nil, nil, err } var filelist []*ast.File for _, f := range pkgmap[pkg.Name].Files { filelist = append(filelist, f) } config := types.Config{} typpkg, err := config.Check(pkg.Dir, fset, filelist, nil) return typpkg, pkgmap[pkg.Name], err }
func TestDependencies(t *testing.T) { packages := make(map[string]*types.Package) conf := types.Config{ Packages: packages, Import: func(_ map[string]*types.Package, path string) (*types.Package, error) { return packages[path], nil }, } fset := token.NewFileSet() // All edges go to the right. // /--D--B--A // F \_C_/ // \__E_/ for i, content := range []string{ `package a`, `package c; import (_ "a")`, `package b; import (_ "a")`, `package e; import (_ "c")`, `package d; import (_ "b"; _ "c")`, `package f; import (_ "d"; _ "e")`, } { f, err := parser.ParseFile(fset, fmt.Sprintf("%d.go", i), content, 0) if err != nil { t.Fatal(err) } pkg, err := conf.Check(f.Name.Name, fset, []*ast.File{f}, nil) if err != nil { t.Fatal(err) } packages[pkg.Path()] = pkg } for _, test := range []struct { roots, want string }{ {"a", "a"}, {"b", "ab"}, {"c", "ac"}, {"d", "abcd"}, {"e", "ace"}, {"f", "abcdef"}, {"be", "abce"}, {"eb", "aceb"}, {"de", "abcde"}, {"ed", "acebd"}, {"ef", "acebdf"}, } { var pkgs []*types.Package for _, r := range test.roots { pkgs = append(pkgs, packages[string(r)]) } var got string for _, p := range typeutil.Dependencies(pkgs...) { got += p.Path() } if got != test.want { t.Errorf("Dependencies(%q) = %q, want %q", test.roots, got, test.want) } } }
// doPackage analyzes the single package constructed from the named files, looking for // the definition of ident. func doPackage(pkg *ast.Package, fset *token.FileSet, ident string) { var files []*File found := false for name, astFile := range pkg.Files { if packageFlag && astFile.Doc == nil { continue } file := &File{ fset: fset, name: name, ident: ident, lowerIdent: strings.ToLower(ident), file: astFile, comments: ast.NewCommentMap(fset, astFile, astFile.Comments), } if regexpFlag && regexp.QuoteMeta(ident) != ident { // It's a regular expression. var err error file.regexp, err = regexp.Compile("^(?i:" + ident + ")$") if err != nil { fmt.Fprintf(os.Stderr, "regular expression `%s`:", err) os.Exit(2) } } switch { case strings.HasPrefix(name, goRootSrcPkg): file.urlPrefix = "http://golang.org/pkg" file.pathPrefix = goRootSrcPkg case strings.HasPrefix(name, goRootSrcCmd): file.urlPrefix = "http://golang.org/cmd" file.pathPrefix = goRootSrcCmd default: file.urlPrefix = "http://godoc.org" for _, path := range goPaths { p := filepath.Join(path, "src") if strings.HasPrefix(name, p) { file.pathPrefix = p break } } } file.urlPrefix = urlHeadTag + file.urlPrefix files = append(files, file) if found { continue } file.doPrint = false if packageFlag { file.pkgComments() } else { ast.Walk(file, file.file) if file.found { found = true } } } if !found { return } // By providing the Context with our own error function, it will continue // past the first error. There is no need for that function to do anything. config := types.Config{ Error: func(error) {}, } info := &types.Info{ Defs: make(map[*ast.Ident]types.Object), } path := "" var astFiles []*ast.File for name, astFile := range pkg.Files { if path == "" { path = name } astFiles = append(astFiles, astFile) } config.Check(path, fset, astFiles, info) // Ignore errors. // We need to search all files for methods, so record the full list in each file. for _, file := range files { file.allFiles = files } for _, file := range files { file.doPrint = true file.defs = info.Defs if packageFlag { file.pkgComments() } else { ast.Walk(file, file.file) } } }
func TestDependencies(t *testing.T) { packages := make(map[string]*types.Package) conf := types.Config{ Packages: packages, Import: func(_ map[string]*types.Package, path string) (*types.Package, error) { return packages[path], nil }, } fset := token.NewFileSet() // All edges go to the right. // /--D--B--A // F \_C_/ // \__E_/ for i, content := range []string{ `package A`, `package C; import (_ "A")`, `package B; import (_ "A")`, `package E; import (_ "C")`, `package D; import (_ "B"; _ "C")`, `package F; import (_ "D"; _ "E")`, } { f, err := parser.ParseFile(fset, fmt.Sprintf("%d.go", i), content, 0) if err != nil { t.Fatal(err) } pkg, err := conf.Check(f.Name.Name, fset, []*ast.File{f}, nil) if err != nil { t.Fatal(err) } packages[pkg.Path()] = pkg } for _, test := range []struct { roots, want string }{ {"A", "A"}, {"B", "AB"}, {"C", "AC"}, {"D", "ABCD"}, {"E", "ACE"}, {"F", "ABCDEF"}, {"BE", "ABCE"}, {"EB", "ACEB"}, {"DE", "ABCDE"}, {"ED", "ACEBD"}, {"EF", "ACEBDF"}, } { var pkgs []*types.Package for _, r := range test.roots { pkgs = append(pkgs, conf.Packages[string(r)]) } var got string for _, p := range typeutil.Dependencies(pkgs...) { got += p.Path() } if got != test.want { t.Errorf("Dependencies(%q) = %q, want %q", test.roots, got, test.want) } } }
func parseAndCheck(fset *token.FileSet, pkgDir, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, *types.Info, error) { var pkgFiles []*ast.File // all package files // Parse the named file using `parse`, which handles fragments and reads from the src byte array. file, adjust, err := parse(fset, filename, src, opt) if err != nil { return nil, nil, nil, err } pkgFiles = append(pkgFiles, file) var importPath string if pkgDir != "" { // Parse other package files by reading from the filesystem. dir := filepath.Dir(filename) buildPkg, err := build.ImportDir(dir, 0) if err != nil { // TODO(sqs): support parser-only mode (that doesn't require // files passed to goreturns to be part of a valid package) return nil, nil, nil, err } importPath = buildPkg.ImportPath for _, files := range [...][]string{buildPkg.GoFiles, buildPkg.CgoFiles} { for _, file := range files { if file == filepath.Base(filename) { // already parsed this file above continue } f, err := parser.ParseFile(fset, filepath.Join(dir, file), nil, 0) if err != nil { fmt.Fprintf(os.Stderr, "could not parse %q: %v\n", file, err) continue } pkgFiles = append(pkgFiles, f) } } } var nerrs int cfg := types.Config{ Error: func(err error) { if opt.PrintErrors && (opt.AllErrors || nerrs == 0) { fmt.Fprintln(os.Stderr, err) } nerrs++ }, } info := &types.Info{ Types: map[ast.Expr]types.TypeAndValue{}, Uses: map[*ast.Ident]types.Object{}, Defs: map[*ast.Ident]types.Object{}, } if _, err := cfg.Check(importPath, fset, pkgFiles, info); err != nil { if terr, ok := err.(types.Error); ok && strings.HasPrefix(terr.Msg, "wrong number of return values") { // ignore "wrong number of return values" errors } else { if opt.PrintErrors { fmt.Fprintf(os.Stderr, "%s: typechecking failed (continuing without type info)\n", filename) } // proceed but without type info return file, adjust, nil, nil } } return file, adjust, info, nil }
func (w *PkgWalker) Import(parentDir string, name string, conf *PkgConfig) (pkg *types.Package, err error) { defer func() { err := recover() if err != nil && typesVerbose { log.Println(err) } }() if strings.HasPrefix(name, ".") && parentDir != "" { name = filepath.Join(parentDir, name) } pkg = w.imported[name] if pkg != nil { // if pkg == &w.importing { // return nil, fmt.Errorf("cycle importing package %q", name) // } return pkg, nil } if typesVerbose { log.Println("parser pkg", name) } bp, err := w.importPath(name, 0) if err != nil { return nil, err } checkName := name if bp.ImportPath == "." { checkName = bp.Name } else { checkName = bp.ImportPath } if w.importingName[checkName] { return nil, fmt.Errorf("cycle importing package %q", name) } w.importingName[checkName] = true // if err != nil { // return nil, err // //if _, nogo := err.(*build.NoGoError); nogo { // // return // //} // //return // //log.Fatalf("pkg %q, dir %q: ScanDir: %v", name, info.Dir, err) // } filenames := append(append([]string{}, bp.GoFiles...), bp.CgoFiles...) if conf.WithTestFiles { filenames = append(filenames, bp.TestGoFiles...) } if name == "runtime" { n := fmt.Sprintf("zgoos_%s.go", w.context.GOOS) if !contains(filenames, n) { filenames = append(filenames, n) } n = fmt.Sprintf("zgoarch_%s.go", w.context.GOARCH) if !contains(filenames, n) { filenames = append(filenames, n) } } parserFiles := func(filenames []string, cursor *FileCursor, xtest bool) (files []*ast.File) { for _, file := range filenames { var f *ast.File if cursor != nil && cursor.fileName == file { f, err = w.parseFile(bp.Dir, file, cursor.src) cursor.pos = token.Pos(w.fset.File(f.Pos()).Base()) + token.Pos(cursor.cursorPos) cursor.fileDir = bp.Dir cursor.xtest = xtest } else { f, err = w.parseFile(bp.Dir, file, nil) } if err != nil && typesVerbose { log.Printf("error parsing package %s: %s\n", name, err) } files = append(files, f) } return } files := parserFiles(filenames, conf.Cursor, false) xfiles := parserFiles(bp.XTestGoFiles, conf.Cursor, true) typesConf := types.Config{ IgnoreFuncBodies: conf.IgnoreFuncBodies, FakeImportC: true, Packages: w.gcimporter, Import: func(imports map[string]*types.Package, name string) (pkg *types.Package, err error) { if pkg != nil { return pkg, nil } if conf.AllowBinary && w.isBinaryPkg(name) { pkg = w.gcimporter[name] if pkg != nil && pkg.Complete() { return } pkg, err = gcimporter.Import(imports, name) if pkg != nil && pkg.Complete() { w.gcimporter[name] = pkg return } } return w.Import(bp.Dir, name, &PkgConfig{IgnoreFuncBodies: true, AllowBinary: true, WithTestFiles: false}) }, Error: func(err error) { if typesVerbose { log.Println(err) } }, } if pkg == nil { pkg, err = typesConf.Check(checkName, w.fset, files, conf.Info) conf.Pkg = pkg } w.importingName[checkName] = false w.imported[name] = pkg if len(xfiles) > 0 { xpkg, _ := typesConf.Check(checkName+"_test", w.fset, xfiles, conf.XInfo) w.imported[checkName+"_test"] = xpkg conf.XPkg = xpkg } return }
func (imp *Importer) realImport(imports map[string]*types.Package, path string) (pkg *types.Package, err error) { // types.Importer does not seem to be designed for recursive // parsing like we're doing here. Specifically, each nested import // will maintain its own imports map. This will lead to duplicate // imports and in turn packages, which will lead to funny errors // such as "cannot pass argument ip (variable of type net.IP) to // variable of type net.IP" // // To work around this, we keep a global imports map, allImports, // to which we add all nested imports, and which we use as the // cache, instead of imports. // // Since all nested imports will also use this importer, there // should be no way to end up with duplicate imports. // We first try to use GcImport directly. This has the downside of // using possibly out-of-date packages, but it has the upside of // not having to parse most of the Go standard library. imported := func(pkg *types.Package) { // We don't use imports, but per API we have to add the package. imports[pkg.Path()] = pkg imp.Imports[pkg.Path()] = pkg } buildPkg, buildErr := build.Import(path, ".", 0) // If we found no build dir, assume we're dealing with installed // but no source. If we found a build dir, only use GcImport if // it's in GOROOT. This way we always use up-to-date code for // normal packages but avoid parsing the standard library. if (buildErr == nil && buildPkg.Goroot) || buildErr != nil { pkg, err = gcimporter.Import(imp.Imports, path) if err == nil { imported(pkg) return pkg, nil } } // See if we already imported this package if pkg = imp.Imports[path]; pkg != nil && pkg.Complete() { return pkg, nil } // allImports failed, try to use go/build if buildErr != nil { return nil, fmt.Errorf("build.Import failed: %s", buildErr) } // TODO check if the .a file is up to date and use it instead fileSet := token.NewFileSet() isGoFile := func(d os.FileInfo) bool { allFiles := make([]string, 0, len(buildPkg.GoFiles)+len(buildPkg.CgoFiles)) allFiles = append(allFiles, buildPkg.GoFiles...) allFiles = append(allFiles, buildPkg.CgoFiles...) for _, file := range allFiles { if file == d.Name() { return true } } return false } pkgs, err := parser.ParseDir(fileSet, buildPkg.Dir, isGoFile, 0) if err != nil { return nil, err } delete(pkgs, "documentation") var astPkg *ast.Package var name string for name, astPkg = range pkgs { // Use the first non-main package, or the only package we // found. // // NOTE(dh) I can't think of a reason why there should be // multiple packages in a single directory, but ParseDir // accommodates for that possibility. if len(pkgs) == 1 || name != "main" { break } } if astPkg == nil { return nil, fmt.Errorf("can't find import: %s", name) } var ff []*ast.File for _, f := range astPkg.Files { ff = append(ff, f) } if imp.cycleSeen[path] { return nil, fmt.Errorf("import cycle %s -> %s", strings.Join(imp.cyclesStack, " -> "), path) } imp.cycleSeen[path] = true imp.cyclesStack = append(imp.cyclesStack, path) context := types.Config{ Import: imp.realImport, } pkg, err = context.Check(name, fileSet, ff, nil) if err != nil { // As a special case, if type checking failed due cgo, try // again by using GcImport. That way we can extract all // required type information, but we risk importing an // outdated version. if imp.Config.UseGcFallback && strings.Contains(err.Error(), `cannot find package "C" in`) { gcPkg, gcErr := gcimporter.Import(imp.Imports, path) if gcErr == nil { imported(gcPkg) imp.Fallbacks = append(imp.Fallbacks, path) return gcPkg, nil } } return pkg, err } imports[path] = pkg imp.Imports[path] = pkg return pkg, nil }