// UsesImport reports whether a given import is used. func UsesImport(f *ast.File, path string) (used bool) { spec := importSpec(f, path) if spec == nil { return } name := spec.Name.String() switch name { case "<nil>": // If the package name is not explicitly specified, // make an educated guess. This is not guaranteed to be correct. lastSlash := strings.LastIndex(path, "/") if lastSlash == -1 { name = path } else { name = path[lastSlash+1:] } case "_", ".": // Not sure if this import is used - err on the side of caution. return true } ast.Walk(visitFn(func(n ast.Node) { sel, ok := n.(*ast.SelectorExpr) if ok && isTopName(sel.X, name) { used = true } }), f) return }
// idents is an iterator that returns all idents in f via the result channel. func idents(f *ast.File) <-chan *ast.Ident { v := make(visitor) go func() { ast.Walk(v, f) close(v) }() return v }
func simplify(f *ast.File) { var s simplifier // determine if f contains dot imports for _, imp := range f.Imports { if imp.Name != nil && imp.Name.Name == "." { s.hasDotImport = true break } } // remove empty declarations such as "const ()", etc removeEmptyDeclGroups(f) ast.Walk(&s, f) }
func fixImports(fset *token.FileSet, f *ast.File, filename string) (added []string, err error) { // refs are a set of possible package references currently unsatisfied by imports. // first key: either base package (e.g. "fmt") or renamed package // second key: referenced package symbol (e.g. "Println") refs := make(map[string]map[string]bool) // decls are the current package imports. key is base package or renamed package. decls := make(map[string]*ast.ImportSpec) // collect potential uses of packages. var visitor visitFn visitor = visitFn(func(node ast.Node) ast.Visitor { if node == nil { return visitor } switch v := node.(type) { case *ast.ImportSpec: if v.Name != nil { decls[v.Name.Name] = v } else { local := importPathToName(strings.Trim(v.Path.Value, `\"`)) decls[local] = v } case *ast.SelectorExpr: xident, ok := v.X.(*ast.Ident) if !ok { break } if xident.Obj != nil { // if the parser can resolve it, it's not a package ref break } pkgName := xident.Name if refs[pkgName] == nil { refs[pkgName] = make(map[string]bool) } if decls[pkgName] == nil { refs[pkgName][v.Sel.Name] = true } } return visitor }) ast.Walk(visitor, f) // Nil out any unused ImportSpecs, to be removed in following passes unusedImport := map[string]string{} for pkg, is := range decls { if refs[pkg] == nil && pkg != "_" && pkg != "." { name := "" if is.Name != nil { name = is.Name.Name } unusedImport[strings.Trim(is.Path.Value, `"`)] = name } } for ipath, name := range unusedImport { if ipath == "C" { // Don't remove cgo stuff. continue } astutil.DeleteNamedImport(fset, f, name, ipath) } // Search for imports matching potential package references. searches := 0 type result struct { ipath string name string err error } results := make(chan result) for pkgName, symbols := range refs { if len(symbols) == 0 { continue // skip over packages already imported } go func(pkgName string, symbols map[string]bool) { ipath, rename, err := findImport(pkgName, symbols, filename) r := result{ipath: ipath, err: err} if rename { r.name = pkgName } results <- r }(pkgName, symbols) searches++ } for i := 0; i < searches; i++ { result := <-results if result.err != nil { return nil, result.err } if result.ipath != "" { if result.name != "" { astutil.AddNamedImport(fset, f, result.name, result.ipath) } else { astutil.AddImport(fset, f, result.ipath) } added = append(added, result.ipath) } } return added, nil }
func (s *simplifier) Visit(node ast.Node) ast.Visitor { switch n := node.(type) { case *ast.CompositeLit: // array, slice, and map composite literals may be simplified outer := n var eltType ast.Expr switch typ := outer.Type.(type) { case *ast.ArrayType: eltType = typ.Elt case *ast.MapType: eltType = typ.Value } if eltType != nil { typ := reflect.ValueOf(eltType) for i, x := range outer.Elts { px := &outer.Elts[i] // look at value of indexed/named elements if t, ok := x.(*ast.KeyValueExpr); ok { x = t.Value px = &t.Value } ast.Walk(s, x) // simplify x // if the element is a composite literal and its literal type // matches the outer literal's element type exactly, the inner // literal type may be omitted if inner, ok := x.(*ast.CompositeLit); ok { if match(nil, typ, reflect.ValueOf(inner.Type)) { inner.Type = nil } } // if the outer literal's element type is a pointer type *T // and the element is & of a composite literal of type T, // the inner &T may be omitted. if ptr, ok := eltType.(*ast.StarExpr); ok { if addr, ok := x.(*ast.UnaryExpr); ok && addr.Op == token.AND { if inner, ok := addr.X.(*ast.CompositeLit); ok { if match(nil, reflect.ValueOf(ptr.X), reflect.ValueOf(inner.Type)) { inner.Type = nil // drop T *px = inner // drop & } } } } } // node was simplified - stop walk (there are no subnodes to simplify) return nil } case *ast.SliceExpr: // a slice expression of the form: s[a:len(s)] // can be simplified to: s[a:] // if s is "simple enough" (for now we only accept identifiers) if n.Max != nil || s.hasDotImport { // - 3-index slices always require the 2nd and 3rd index // - if dot imports are present, we cannot be certain that an // unresolved "len" identifier refers to the predefined len() break } if s, _ := n.X.(*ast.Ident); s != nil && s.Obj != nil { // the array/slice object is a single, resolved identifier if call, _ := n.High.(*ast.CallExpr); call != nil && len(call.Args) == 1 && !call.Ellipsis.IsValid() { // the high expression is a function call with a single argument if fun, _ := call.Fun.(*ast.Ident); fun != nil && fun.Name == "len" && fun.Obj == nil { // the function called is "len" and it is not locally defined; and // because we don't have dot imports, it must be the predefined len() if arg, _ := call.Args[0].(*ast.Ident); arg != nil && arg.Obj == s.Obj { // the len argument is the array/slice object n.High = nil } } } } // Note: We could also simplify slice expressions of the form s[0:b] to s[:b] // but we leave them as is since sometimes we want to be very explicit // about the lower bound. // An example where the 0 helps: // x, y, z := b[0:2], b[2:4], b[4:6] // An example where it does not: // x, y := b[:n], b[n:] case *ast.RangeStmt: // - a range of the form: for x, _ = range v {...} // can be simplified to: for x = range v {...} // - a range of the form: for _ = range v {...} // can be simplified to: for range v {...} if isBlank(n.Value) { n.Value = nil } if isBlank(n.Key) && n.Value == nil { n.Key = nil } } return s }
func fileWithAnnotationComments(file *ast.File, fset, oldFset *token.FileSet, src []byte) ([]byte, *ast.File, error) { // TODO: So this is an extremely hacky way of doing this. We're going to // add the comments directly to the source comments, as text, and then // we're going to re-parse it. This is because I tried manipulating the // AST, adding the commments there an shifting the nodes' positions, but // doing that right is very very convoluted; you need to be tracking all // the time where you are, where you _were_, figure out where's a line // break, etc. So, well, this will do for now. var err error var dstChunks [][]byte var lastChunkEnd int skipNextSpec := false addDoc := func(node ast.Node, name *ast.Ident, typ ast.Expr) { if typ == nil { return } if name != nil && len(name.Name) > 0 { c := name.Name[0] if !(c >= 'A' && c <= 'Z') { return } } buf := &bytes.Buffer{} err = printer.Fprint(buf, token.NewFileSet(), typ) if err != nil { return } pos := int(node.Pos()) - oldFset.File(file.Pos()).Base() var space []byte for i := pos - 1; i >= 0 && (src[i] == ' ' || src[i] == '\t'); i-- { space = append([]byte{src[i]}, space...) } text := append([]byte("// For SGo: "+buf.String()+"\n"), space...) dstChunks = append(dstChunks, src[lastChunkEnd:pos], text) lastChunkEnd = pos } var visitor visitorFunc visitor = visitorFunc(func(node ast.Node) (w ast.Visitor) { var typ ast.Expr var name *ast.Ident switch node := node.(type) { case *ast.FuncDecl: typ = node.Type name = node.Name case *ast.GenDecl: if node.Lparen != 0 || node.Tok == token.IMPORT || node.Tok == token.CONST { return visitor } switch spec := node.Specs[0].(type) { case *ast.TypeSpec: skipNextSpec = true typ = spec.Type name = spec.Name case *ast.ValueSpec: skipNextSpec = true typ = spec.Type if len(spec.Names.List) > 0 { name = spec.Names.List[0] } } switch typ.(type) { case *ast.InterfaceType, *ast.StructType: return visitor } case *ast.InterfaceType: for i := 0; i < len(node.Methods.List); i++ { item := node.Methods.List[i] if len(item.Names) > 0 { name = item.Names[0] } addDoc(item, name, item.Type) } return visitor case *ast.StructType: for i := 0; i < len(node.Fields.List); i++ { item := node.Fields.List[i] if len(item.Names) > 0 { name = item.Names[0] } addDoc(item, name, item.Type) } return visitor case *ast.TypeSpec: if skipNextSpec { skipNextSpec = false return visitor } typ = node.Type name = node.Name case *ast.ValueSpec: if skipNextSpec { skipNextSpec = false return visitor } typ = node.Type if len(node.Names.List) > 0 { name = node.Names.List[0] } default: return visitor } addDoc(node, name, typ) return visitor }) ast.Walk(visitor, file) if err != nil { return nil, nil, err } dst := append( []byte("// Autogenerated by SGo revision: "+SGoRevision+"\n// DO NOT EDIT!\n\n"), bytes.Join(append(dstChunks, src[lastChunkEnd:]), nil)...) dstFile, err := parser.ParseFile(fset, file.Name.Name, dst, parser.ParseComments) return dst, dstFile, err }