func parseFiles(path string) (*token.FileSet, *types.Package, types.Info, *ast.File) { fset := token.NewFileSet() files := []*ast.File{} for _, name := range []string{"server.go", "client.go"} { f, err := parser.ParseFile(fset, filepath.Join(path, name), nil, 0) if err != nil { log.Fatal(err) } files = append(files, f) } info := types.Info{ Types: make(map[ast.Expr]types.TypeAndValue), Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), } var conf types.Config conf.Importer = importer.Default() pkg, err := conf.Check(path, fset, files, &info) if err != nil { log.Fatal(err) } return fset, pkg, info, files[1] }
//Collect going through package and collect info //using conf.Check method. It's using this implementation //of importer for check all inner packages and go/types/importer.Default() //to check all built in packages (without sources) func (_importer *CollectInfoImporter) Collect() (*types.Package, *token.FileSet, error) { var conf types.Config conf.Importer = _importer conf.Error = _importer.errorHandler if _importer.packages == nil { _importer.packages = make(map[string]*types.Package) } var pkg *types.Package var err error var files []string if files, err = fs.SourceFiles(_importer.Pkg, false); err != nil { return nil, nil, err } if _importer.fset, _importer.astFiles, err = doParseFiles(files, _importer.fset); err != nil { return nil, nil, err } //XXX: return positive result if check() returns error. pkg, _ = conf.Check(_importer.Pkg, _importer.fset, _importer.astFiles, _importer.Info) // if pkg, err = conf.Check(_importer.Pkg, _importer.fset, _importer.astFiles, _importer.Info); err != nil { // return pkg, _importer.fset, err // } _importer.packages[_importer.Pkg] = pkg util.Debug("package [%s] successfully parsed\n", pkg.Name()) return pkg, _importer.fset, nil }
func typeCheck(t *testing.T, filename string, gopath string) *types.Package { f, err := parser.ParseFile(fset, filename, nil, parser.AllErrors) if err != nil { t.Fatalf("%s: %v", filename, err) } pkgName := filepath.Base(filename) pkgName = strings.TrimSuffix(pkgName, ".go") // typecheck and collect typechecker errors var conf types.Config conf.Error = func(err error) { t.Error(err) } if gopath != "" { conf.Importer = importer.Default() oldDefault := build.Default defer func() { build.Default = oldDefault }() build.Default.GOPATH = gopath } pkg, err := conf.Check(pkgName, fset, []*ast.File{f}, nil) if err != nil { t.Fatal(err) } return pkg }
func main() { var pkg = flag.String("pkg", "", "help message for flagname") var dir = flag.String("dir", "", "help message for dir") var out = flag.String("o", "", "help message for out") flag.Parse() var pkgName = *pkg var absDirPath = *dir if !filepath.IsAbs(absDirPath) { absDirPath = filepath.Join(os.Getenv("PWD"), absDirPath) } fset := token.NewFileSet() pkgs, err := parser.ParseDir(fset, absDirPath, nil, parser.ParseComments|parser.AllErrors) info := types.Info{ Types: make(map[ast.Expr]types.TypeAndValue), Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), } var conf types.Config conf.Importer = importer.Default() files := make([]*ast.File, 0, len(pkgs[pkgName].Files)) for _, file := range pkgs[pkgName].Files { files = append(files, file) } _, err = conf.Check(pkgName, fset, files, &info) if err != nil { fmt.Println("checked", err) return } b := builder{} for _, astFile := range pkgs[pkgName].Files { err = b.extractString(astFile, &info, fset) if err != nil { fmt.Println(err) } } err = b.write(*out) if err != nil { fmt.Errorf("got error during write (%v)\n", err) } }
// check type-checks the package. The package must be OK to proceed. func (f *File) Check() { // TODO typs := make(map[ast.Expr]types.TypeAndValue) defs := make(map[*ast.Ident]types.Object) uses := make(map[*ast.Ident]types.Object) config := types.Config{FakeImportC: true} config.Importer = importer.Default() info := &types.Info{Types: typs, Defs: defs, Uses: uses} astFiles := []*ast.File{f.ast} typesPkg, err := config.Check(fileDir(f), f.fs, astFiles, info) if err != nil { log.Fatalf("checking package: %s", err) } f.Info = info f.Pkg = typesPkg }
func (c *Suggester) analyzePackage(importer types.Importer, filename string, data []byte, cursor int) (*token.FileSet, token.Pos, *types.Package) { // If we're in trailing white space at the end of a scope, // sometimes go/types doesn't recognize that variables should // still be in scope there. filesemi := bytes.Join([][]byte{data[:cursor], []byte(";"), data[cursor:]}, nil) fset := token.NewFileSet() fileAST, err := parser.ParseFile(fset, filename, filesemi, parser.AllErrors) if err != nil && c.debug { logParseError("Error parsing input file (outer block)", err) } pos := fset.File(fileAST.Pos()).Pos(cursor) var otherASTs []*ast.File for _, otherName := range c.findOtherPackageFiles(filename, fileAST.Name.Name) { ast, err := parser.ParseFile(fset, otherName, nil, 0) if err != nil && c.debug { logParseError("Error parsing other file", err) } otherASTs = append(otherASTs, ast) } var cfg types.Config cfg.Importer = importer cfg.Error = func(err error) {} var info types.Info info.Scopes = make(map[ast.Node]*types.Scope) pkg, _ := cfg.Check("", fset, append(otherASTs, fileAST), &info) // Workaround golang.org/issue/15686. for node, scope := range info.Scopes { switch node := node.(type) { case *ast.RangeStmt: for _, name := range scope.Names() { setScopePos(scope.Lookup(name).(*types.Var), node.X.End()) } } } return fset, pos, pkg }
func (_importer *CollectInfoImporter) doImport(path string, collectInfo bool) (*types.Package, error) { var pkg *types.Package var err error var conf types.Config conf.Importer = _importer conf.Error = _importer.errorHandler files, err := fs.SourceFiles(path, false) if err != nil { return nil, err } fset, astFiles, err := doParseFiles(files, _importer.fset) if err != nil { return nil, err } if collectInfo { pkg, err = conf.Check(path, fset, astFiles, _importer.Info) } else { pkg, err = conf.Check(path, fset, astFiles, nil) } return pkg, err }
func twoPass(myPkg string, fs []*ast.File, target *ast.Ident) types.Object { // first pass to find out the package of target obj, otherPkg := findInMyPkg(myPkg, fs, target) if obj != nil { lg("find in mypkg") return obj } if otherPkg == "" { return nil } // second pass to find out the object of target in otherPkg cfg := types.Config{ Importer: newHybridImporter(otherPkg), Error: func(err error) {}, DisableUnusedImportCheck: true, } cfg.Importer = newHybridImporter(otherPkg) info := types.Info{ Uses: make(map[*ast.Ident]types.Object), } cfg.Check(myPkg, fset, fs, &info) return info.Uses[target] }
//do useful for testing func do(writer io.Writer, flags *flag.FlagSet, args []string) error { var pkgPath string var rematchUser string flags.Usage = usage writeToFilesFlag := flags.Bool("w", false, "write to files") simulateFlag := flags.Bool("s", false, "simulate updates") flags.Parse(args) pkgPath, rematchUser = parseArgs(flags) packages, err := parser.ParseDir(fileSet, pkgPath, isGoFile, 0) if err != nil { return fmt.Errorf("parserDir: %s", err.Error()) } info := types.Info{ Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), } var files []*ast.File fileByPath := make(map[string]*os.File) //cache files for _, pkg := range packages { for filePath, file := range pkg.Files { fp, err := os.OpenFile(filePath, os.O_RDWR, 0777) if err != nil { return err } defer fp.Close() files = append(files, file) fileByPath[filePath] = fp } } //parse package var conf types.Config conf.Importer = importer.Default() conf.DisableUnusedImportCheck = true conf.Error = func(err error) { //omit errors try parse it } pkg, _ := conf.Check(pkgPath, fileSet, files, &info) //queries for user matching queries := make(chan *queryIdent) //queries for unexported fields used it for detect collision chqueriesUnexported := make(chan *queryIdent) go buildQueryStrings(queries, pkg, &info, true) go buildQueryStrings(chqueriesUnexported, pkg, &info, false) queriesUnexported := make(map[string]*ast.Ident) for queryUnexport := range chqueriesUnexported { queriesUnexported[queryUnexport.Query] = queryUnexport.Ident } //go routine for unexporting doneSave := make(chan bool) unexportPositions := make(chan token.Position, 1) unexports := make(map[string]bool) //start updater on files go unexportOnFile(doneSave, fileByPath, unexportPositions, *simulateFlag) //try match user query var where func(string) bool if rematchUser == "" { rematchUser = "******" } where = regexp.MustCompile(rematchUser).MatchString //used only for showing var generalQueries []*queryIdent for query := range queries { if where(query.Query) { //detect collision _, isKeyword := goKeywords[nameUnexported(query.Ident.Name)] if queriesUnexported[query.Query] != nil || isKeyword { if !*writeToFilesFlag { generalQueries = append(generalQueries, &queryIdent{query.Query + " !!collision", query.Ident}) } else { fmt.Fprintln(writer, "sorry collision detected for", query.Query) } continue } pos := fileSet.Position(query.Ident.Pos()) unexports[query.Ident.Name] = true if !*writeToFilesFlag { generalQueries = append(generalQueries, query) continue } fmt.Fprintln(writer, "Unexported", query.Ident.Name, "from", query.Query) unexportPositions <- pos } } displayUses := make(map[string]map[string][]string) for ident := range info.Uses { if _, ok := unexports[ident.Name]; ok { pos := fileSet.Position(ident.Pos()) if !*writeToFilesFlag { if _, ok := displayUses[pos.Filename]; !ok { displayUses[pos.Filename] = make(map[string][]string) } posLine := fmt.Sprintf("\t\t\t\t%s:%d:%d", pos.Filename, pos.Line, pos.Column) displayUses[pos.Filename][ident.Name] = append(displayUses[pos.Filename][ident.Name], posLine) continue } unexportPositions <- pos } } if !*writeToFilesFlag { sort.Sort(byQuery(generalQueries)) prettyQueries(writer, generalQueries, displayUses) return nil } close(unexportPositions) <-doneSave return nil }
func main() { var pkgName = flag.String("pkg", "", "input file package name") var f = flag.String("f", "", "input file with function definitions") var fn = flag.String("fn", "", "function name") flag.Parse() file := os.ExpandEnv("$GOFILE") log.SetFlags(log.Lshortfile) if *f != "" { file = *f } if *fn == "" { log.Fatalf("Error no function name(s) provided") } if *pkgName == "" { *pkgName = filePath(file) } var conf types.Config conf.Importer = importer.Default() fset := token.NewFileSet() fileAst, err := parser.ParseFile(fset, file, nil, parser.AllErrors) fileTok := fset.File(fileAst.Pos()) if err != nil { fmt.Printf("Error parsing %v, error message: %v\n", file, err) return } files := []*ast.File{fileAst} info := types.Info{ Types: make(map[ast.Expr]types.TypeAndValue), Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), } pkg, err := conf.Check(*pkgName, fset, files, &info) if err != nil { fmt.Printf("Error type checking %v, error message: %v\n", file, err) return } fmt.Println("pkg: ", pkg) fmt.Println("pkg.Complete:", pkg.Complete()) scope := pkg.Scope() obj := scope.Lookup(*fn) if obj == nil { fmt.Println("Couldnt lookup function: ", *fn) return } function, ok := obj.(*types.Func) if !ok { fmt.Printf("%v is a %v, not a function\n", *fn, obj.Type().String()) } var fnDecl *ast.FuncDecl for _, decl := range fileAst.Decls { if fdecl, ok := decl.(*ast.FuncDecl); ok { if fdecl.Name.Name == *fn { fnDecl = fdecl break } } } if fnDecl == nil { fmt.Println("couldn't find function: ", *fn) return } ssafn, ok := gcssa.BuildSSA(fileTok, fileAst, fnDecl, function, &info) if ssafn == nil || !ok { fmt.Println("Error building SSA form") } else { fmt.Println("ssa:\n", ssafn) } if ssafn != nil && ok { fnProgs, ok := gcssa.GenSSA(ssafn) if !ok { fmt.Println("Error creating assembly for SSA") } else { fmt.Printf("ssa assembly:\n%s", gcssa.Assemble(fnProgs)) } } }
// ParseSSA parses the function, fn, which must be in ssa form and returns // the corresponding ssa.Func func ParseSSA(file, pkgName, fn string) (ssafn *ssa.Func, usessa bool) { var conf types.Config conf.Importer = importer.Default() conf.Error = func(err error) { fmt.Println("terror:", err) } fset := token.NewFileSet() fileAst, err := parser.ParseFile(fset, file, nil, parser.AllErrors) fileTok := fset.File(fileAst.Pos()) var terrors string if err != nil { fmt.Printf("Error parsing %v, error message: %v\n", file, err) terrors += fmt.Sprintf("err: %v\n", err) return } ast.FilterFile(fileAst, func(declName string) bool { return declName == fn }) var fnDcl *ast.FuncDecl for _, decl := range fileAst.Decls { if fdecl, ok := decl.(*ast.FuncDecl); ok { fnDcl = fdecl } } if fnDcl == nil { fmt.Printf("Error \"%v\" not found", fn) return } fnSSA := fnSSA{decl: fnDcl, removedPhi: []phi{}, vars: []ssaVar{}} if !fnSSA.removePhi() { fmt.Printf("Error rewriting phi vars") return } if !fnSSA.rewriteAssign() { fmt.Printf("Error rewriting assignments") return } if !fnSSA.restorePhi() { fmt.Printf("Error rewriting phi vars") return } files := []*ast.File{fileAst} info := types.Info{ Types: make(map[ast.Expr]types.TypeAndValue), Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), } pkg, err := conf.Check(pkgName, fset, files, &info) if err != nil { if terrors != fmt.Sprintf("err: %v\n", err) { fmt.Printf("Type error (%v) message: %v\n", file, err) return } } fmt.Println("pkg: ", pkg) fmt.Println("pkg.Complete:", pkg.Complete()) scope := pkg.Scope() obj := scope.Lookup(fn) if obj == nil { fmt.Println("Couldnt lookup function: ", fn) return } function, ok := obj.(*types.Func) if !ok { fmt.Printf("%v is a %v, not a function\n", fn, obj.Type().String()) } var fnDecl *ast.FuncDecl for _, decl := range fileAst.Decls { if fdecl, ok := decl.(*ast.FuncDecl); ok { if fdecl.Name.Name == fn { fnDecl = fdecl break } } } if fnDecl == nil { fmt.Println("couldn't find function: ", fn) return } ssafn, ok = parseSSA(fileTok, fileAst, fnDecl, function, &info) if ssafn == nil || !ok { fmt.Println("Error building SSA form") } else { fmt.Println("ssa:\n", ssafn) } if ssafn != nil && ok { fmt.Println("ssafn:", ssafn) } return ssafn, ok }