//Import parses the package or returns it from cache if it was //already imported. Also, it collects information if path is under //Pkg package func (_importer *CollectInfoImporter) Import(path string) (*types.Package, error) { if _importer.packages[path] != nil { return _importer.packages[path], nil } util.Info("importing package [%s]", path) var pkg *types.Package var err error if strings.Contains(path, _importer.Pkg) { if pkg, err = _importer.doImport(path, true); err != nil { return pkg, err } } pkg, err = defaultImporter.Import(path) if err != nil { pkg, err = _importer.doImport(path, true) } if pkg != nil { _importer.packages[path] = pkg } util.Info("package [%s] imported: [%v] [%v]", path, pkg, err) return pkg, err }
func getSourceFiles(pkgPath string, deep bool) ([]string, error) { pkgDir, err := os.Open(pkgPath) if err != nil { util.Err("error while opening package at path [%s]\n%v", pkgPath, err) return nil, err } filesInfos, err := pkgDir.Readdir(0) if err != nil { util.Err("error reading opening package at path [%s]\n%v", pkgPath, err) return nil, err } var files []string for _, f := range filesInfos { if f.IsDir() && deep && isValidSourceDir(f.Name()) { util.Debug("append folder [%s]", f.Name()) dirFiles, err := getSourceFiles(pkgPath+"/"+f.Name(), true) if err != nil { return dirFiles, err } files = append(files, dirFiles...) } else if isValidSourceFile(f.Name()) { util.Debug("append file [%s]", f.Name()) if strings.HasSuffix(f.Name(), "_test.go") { files = append(files, pkgPath+"/"+f.Name()) } else { files = append([]string{pkgPath + "/" + f.Name()}, files...) } } } util.Info("FILES [%v]", files) return files, nil }
//ExampleGetUnusedDefitions shows how to use gounexport package //to find all definition that not used in a package. As the result, //all unused definitions will be printed in console. func Example() { //package to check pkg := "github.com/dooman87/gounexport" //Regular expression to exclude //tests methods from the result. regex, _ := regexp.Compile("Test*") excludes := []*regexp.Regexp{regex} //Internal info structure that required for //ParsePackage call info := types.Info{ Types: make(map[ast.Expr]types.TypeAndValue), Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), } //Parsing package to fill info struct and //get file set. _, fset, err := gounexport.ParsePackage(pkg, &info) if err != nil { util.Err("error while parsing package %v", err) } //Analyze info and extract all definitions with usages. defs := gounexport.GetDefinitions(&info, fset) //Find all definitions that not used unusedDefs := gounexport.FindUnusedDefinitions(pkg, defs, excludes) //Print all unused definition to stdout. for _, d := range unusedDefs { util.Info("DEFINITION %s", d.Name) } }
func doParseFiles(filePathes []string, fset *token.FileSet) (*token.FileSet, []*ast.File, error) { if fset == nil { fset = token.NewFileSet() } util.Info("parsing files %v", filePathes) astFiles := make([]*ast.File, 0, len(filePathes)) for _, f := range filePathes { //XXX: Ignoring files with packages ends with _test. //XXX: Doing that because getting error in check() //XXX: cause source file is still going to current //XXX: packages. Need to analyze package before //XXX: and check both packages separately. tempFset := token.NewFileSet() astFile, err := parser.ParseFile(tempFset, f, nil, 0) if !strings.HasSuffix(astFile.Name.Name, "_test") { if err != nil { return nil, nil, err } astFile, _ := parser.ParseFile(fset, f, nil, 0) astFiles = append(astFiles, astFile) } } iterateFunc := func(f *token.File) bool { util.Debug("\t%s", f.Name()) return true } fset.Iterate(iterateFunc) return fset, astFiles, nil }
func logDefinition(def *Definition, obj types.Object, ident *ast.Ident, ctx *getDefinitionsContext) { if ident == nil { return } util.Info("definition [%s] [%s], exported [%v], position %s", ident.Name, def.TypeOf.String(), obj.Exported(), posToStr(ctx.fset, ident.Pos())) switch obj.(type) { case *types.TypeName: t := obj.(*types.TypeName) underlyingType := t.Type().Underlying() util.Info("\t [%s] [%s] [%s]", t.Type().String(), t.Type().Underlying().String(), reflect.TypeOf(t.Type().Underlying()).String()) switch underlyingType.(type) { case *types.Struct: s := underlyingType.(*types.Struct) util.Info("\t\t[%d] fields", s.NumFields()) for i := 0; i < s.NumFields(); i++ { field := s.Field(i) util.Info("\t\t\t[%s]", posToStr(ctx.fset, field.Pos())) } } case *types.Func: f := obj.(*types.Func) underlyingType := f.Type().Underlying() util.Info("\t full name: [%s] [%s] [%s]", f.FullName(), underlyingType.String(), reflect.TypeOf(underlyingType)) } util.Info("\tinterfaces [%d]", len(def.InterfacesDefs)) for _, i := range def.InterfacesDefs { util.Info("\tinterface [%s]", i.Name) } }
func TestGetDefinitionsToHideThis(t *testing.T) { pkg := "github.com/dooman87/gounexport" regex, _ := regexp.Compile("Test*") excludes := []*regexp.Regexp{regex} _, fset, info := parsePackage(pkg, t) defs := gounexport.GetDefinitions(info, fset) unusedDefs := gounexport.FindUnusedDefinitions(pkg, defs, excludes) log.Print("<<<<<<<<<<<<<<<<<<<<<<<<<<<") for _, d := range unusedDefs { util.Info("DEFINITION %s", d.Name) util.Info("\t%s:%d:%d", d.File, d.Line, d.Col) } log.Print("<<<<<<<<<<<<<<<<<<<<<<<<<<<") if len(unusedDefs) != 22 { t.Errorf("expected %d unused exported definitions, but found %d", 22, len(unusedDefs)) } }
func isExcluded(def *Definition, excludes []*regexp.Regexp) bool { if excludes == nil || len(excludes) == 0 { return false } for _, exc := range excludes { if exc.MatchString(def.Name) { util.Info("definition [%s] excluded, because matched [%s]", def.Name, exc.String()) return true } } return false }
//FindUnusedDefinitions returns list of definitions that could be //moved to private e.g. renamed. Criteria for renaming: // - Definition should be exported // - Definition should be in target package // - Definition is not implementing external interfaces // - Definition is not used in external packages func FindUnusedDefinitions(pkg string, defs map[string]*Definition, excludes []*regexp.Regexp) []*Definition { var unused []*Definition for _, def := range defs { if !def.Exported { continue } if strings.HasPrefix(def.Name, pkg) && !isExcluded(def, excludes) && !isUsed(def) { util.Info("adding [%s] to unexport list", def.Name) unused = append(unused, def) } } return unused }
//ParsePackage parses package and filling info structure. //It's filling info about all internal packages even if they //are not imported in the root package. func ParsePackage(pkgName string, info *types.Info) (*types.Package, *token.FileSet, error) { collectImporter := new(importer.CollectInfoImporter) collectImporter.Info = info var resultPkg *types.Package var resultFset *token.FileSet parsedPackages := make(map[string]bool) notParsedPackage := pkgName for len(notParsedPackage) > 0 { collectImporter.Pkg = notParsedPackage pkg, fset, err := collectImporter.Collect() if err != nil { return nil, nil, err } //Filling results only from first package //that was passed as argument to function if resultPkg == nil { resultPkg = pkg resultFset = fset } parsedPackages[notParsedPackage] = true //Searching for a new package that was not parsed before notParsedPackage = "" files, err := fs.GetUnusedSources(pkgName, fset) if err != nil { return nil, nil, err } for _, f := range files { newNotParsedPackage := fs.GetPackagePath(f) if !parsedPackages[newNotParsedPackage] { notParsedPackage = newNotParsedPackage break } else { util.Info("package %s has been already parsed, however %s file is still unused", newNotParsedPackage, f) } } } return resultPkg, resultFset, nil }
func logType(t types.TypeAndValue) { if t.Type != nil { util.Debug("type [%s] [%s] [%s] [%s]", reflect.TypeOf(t.Type), t.Type.String(), reflect.TypeOf(t.Type.Underlying()), t.Type.Underlying().String()) switch t.Type.(type) { case *types.Signature: s := t.Type.(*types.Signature) if s.Recv() != nil { util.Info("\t\t[%s] [%s]", s.Recv(), s.Recv().Type().String()) } if tuple := s.Params(); tuple != nil { for i := 0; i < tuple.Len(); i++ { v := tuple.At(i) util.Debug("\t\t%s", v.Name()) if types.IsInterface(v.Type()) { util.Debug("\t\t\t<------interface") } } } } } }
//Unexport hides definition by changing first letter //to lower case. It won't rename if there is already existing //unexported symbol with the same name. //renameFunc is a func that accepts four arguments: full path to file, //offset in a file to replace, original string, string to replace. It will //be called when renaming is possible. func Unexport(def *Definition, allDefs map[string]*Definition, renameFunc func(string, int, string, string) error) error { util.Info("unexporting %s in %s:%d:%d", def.SimpleName, def.File, def.Line, def.Col) newName := strings.ToLower(def.SimpleName[0:1]) + def.SimpleName[1:] //Searching for conflict lastIdx := strings.LastIndex(def.Name, def.SimpleName) newFullName := def.Name[0:lastIdx] + newName + def.Name[lastIdx+len(newName):] if allDefs[newFullName] != nil { return fmt.Errorf("can't unexport %s because it conflicts with existing member", def.Name) } //rename definitions and usages err := renameFunc(def.File, def.Offset, def.SimpleName, newName) for _, u := range def.Usages { if err != nil { break } err = renameFunc(u.Pos.Filename, u.Pos.Offset, def.SimpleName, newName) } return err }
func logInterfaces(ctx *getDefinitionsContext) { for _, i := range ctx.interfaces { util.Info("interface [%s]", i.def.Name) } }
func logInterfaces(ctx *context) { for _, i := range ctx.interfaces { util.Info("interface [%s]", i.def.Name) } }