// addFile adds the AST for a source file to the docReader. // Adding the same AST multiple times is a no-op. // func (doc *docReader) addFile(src *ast.File) { // add package documentation if src.Doc != nil { // TODO(gri) This won't do the right thing if there is more // than one file with package comments. Consider // using ast.MergePackageFiles which handles these // comments correctly (but currently looses BUG(...) // comments). doc.doc = src.Doc src.Doc = nil // doc consumed - remove from ast.File node } // add all declarations for _, decl := range src.Decls { doc.addDecl(decl) } // collect BUG(...) comments for c := src.Comments; c != nil; c = c.Next { text := c.List[0].Text cstr := string(text) if m := bug_markers.ExecuteString(cstr); len(m) > 0 { // found a BUG comment; maybe empty if bstr := cstr[m[1]:]; bug_content.MatchString(bstr) { // non-empty BUG comment; collect comment without BUG prefix list := copyCommentList(c.List) list[0].Text = text[m[1]:] doc.bugs.Push(&ast.CommentGroup{list, nil}) } } } src.Comments = nil // consumed unassociated comments - remove from ast.File node }
// addFile adds the AST for a source file to the docReader. // Adding the same AST multiple times is a no-op. // func (doc *docReader) addFile(src *ast.File) { // add package documentation if src.Doc != nil { doc.addDoc(src.Doc) src.Doc = nil // doc consumed - remove from ast.File node } // add all declarations for _, decl := range src.Decls { doc.addDecl(decl) } // collect BUG(...) comments for _, c := range src.Comments { text := c.List[0].Text if m := bug_markers.FindStringIndex(text); m != nil { // found a BUG comment; maybe empty if btxt := text[m[1]:]; bug_content.MatchString(btxt) { // non-empty BUG comment; collect comment without BUG prefix list := copyCommentList(c.List) list[0].Text = text[m[1]:] doc.bugs = append(doc.bugs, &ast.CommentGroup{list}) } } } src.Comments = nil // consumed unassociated comments - remove from ast.File node }
// readFile adds the AST for a source file to the reader. // func (r *reader) readFile(src *ast.File) { // add package documentation if src.Doc != nil { r.readDoc(src.Doc) src.Doc = nil // doc consumed - remove from AST } // add all declarations for _, decl := range src.Decls { switch d := decl.(type) { case *ast.GenDecl: switch d.Tok { case token.IMPORT: // imports are handled individually for _, spec := range d.Specs { if s, ok := spec.(*ast.ImportSpec); ok { if import_, err := strconv.Unquote(s.Path.Value); err == nil { r.imports[import_] = 1 } } } case token.CONST, token.VAR: // constants and variables are always handled as a group r.readValue(d) case token.TYPE: // types are handled individually for _, spec := range d.Specs { if s, ok := spec.(*ast.TypeSpec); ok { // use an individual (possibly fake) declaration // for each type; this also ensures that each type // gets to (re-)use the declaration documentation // if there's none associated with the spec itself fake := &ast.GenDecl{ d.Doc, d.Pos(), token.TYPE, token.NoPos, []ast.Spec{s}, token.NoPos, } r.readType(fake, s) } } } case *ast.FuncDecl: r.readFunc(d) } } // collect BUG(...) comments for _, c := range src.Comments { text := c.List[0].Text if m := bug_markers.FindStringIndex(text); m != nil { // found a BUG comment; maybe empty if btxt := text[m[1]:]; bug_content.MatchString(btxt) { // non-empty BUG comment; collect comment without BUG prefix list := append([]*ast.Comment(nil), c.List...) // make a copy list[0].Text = text[m[1]:] r.bugs = append(r.bugs, (&ast.CommentGroup{list}).Text()) } } } src.Comments = nil // consumed unassociated comments - remove from AST }
// Inline replaces each instance of identifier k with v.Ident in ast.File f, // for k, v := range m. // For all inlines that were triggeres it also adds imports from v.Imports to f. // In addition, it removes top level type declarations of the form // type k ... // for all k in m. // // Every k in m should be a valid identifier. // Every v.Ident should be a valid expression. func Inline(fset *token.FileSet, f *ast.File, m map[string]Target) error { // Build the inline map. im := map[string]reflect.Value{} for k, v := range m { expr, err := parser.ParseExpr(k) if err != nil { return fmt.Errorf("failed to parse `%s`: %s", k, err) } if _, ok := expr.(*ast.Ident); !ok { return fmt.Errorf("expected identifier, got %s which is %T", k, expr) } expr, err = parser.ParseExpr(v.Ident) if err != nil { return fmt.Errorf("failed to parse `%s`: %s", v.Ident, err) } s := v.Ident if _, ok := expr.(*ast.StarExpr); ok { s = fmt.Sprintf("(%s)", s) } im[k] = reflect.ValueOf(ast.Ident{Name: s}) } // Filter `type XXX ...` declarations out if we are inlining XXX. cmap := ast.NewCommentMap(fset, f, f.Comments) to := 0 for _, d := range f.Decls { skip := false if t, ok := d.(*ast.GenDecl); ok { for _, s := range t.Specs { ts, ok := s.(*ast.TypeSpec) if !ok { continue } if _, ok = im[ts.Name.String()]; ok { skip = true } } } if !skip { f.Decls[to] = d to++ } } if to != len(f.Decls) { f.Decls = f.Decls[:to] // Remove comments for the declarations that were filtered out. f.Comments = cmap.Filter(f).Comments() } // Add imports for the inlines that were triggered. for k := range inline(im, f) { for _, imp := range m[k].Imports { astutil.AddImport(fset, f, imp) } } return nil }
// RewriteImportComments rewrites package import comments (https://golang.org/s/go14customimport) func RewriteImportComments(f *ast.File, fset *token.FileSet, prefix string, remove bool) (changed bool, err error) { pkgpos := fset.Position(f.Package) // Print the AST. // ast.Print(fset, f) newcommentgroups := make([]*ast.CommentGroup, 0) for _, c := range f.Comments { commentpos := fset.Position(c.Pos()) // keep the comment if we are not on the "package <X>" line // or the comment after the package statement does not look like import comment if commentpos.Line != pkgpos.Line || !strings.HasPrefix(c.Text(), `import "`) { newcommentgroups = append(newcommentgroups, c) continue } parts := strings.Split(strings.Trim(c.Text(), "\n\r\t "), " ") oldimp, err := strconv.Unquote(parts[1]) if err != nil { log.Fatalf("Error unquoting import value [%v] - %s\n", parts[1], err) } if remove { // the prefix is not there = nothing to remove, keep the comment if !strings.HasPrefix(oldimp, prefix) { newcommentgroups = append(newcommentgroups, c) continue } } else { // the prefix is already in the import path, keep the comment if strings.HasPrefix(oldimp, prefix) { newcommentgroups = append(newcommentgroups, c) continue } } newimp := "" if remove { newimp = oldimp[len(prefix):] } else { newimp = prefix + oldimp } changed = true c2 := ast.Comment{Slash: c.Pos(), Text: `// import ` + strconv.Quote(newimp)} cg := ast.CommentGroup{List: []*ast.Comment{&c2}} newcommentgroups = append(newcommentgroups, &cg) } // change the AST only if there are pending mods if changed { f.Comments = newcommentgroups } return changed, nil }
func deleteCommentsInRange(file *ast.File, posStart, posEnd token.Pos) { idxs := make(map[int]bool) for i, cg := range file.Comments { if cg.Pos() >= posStart && cg.End() <= posEnd { idxs[i] = true } } if len(idxs) > 0 { newComments := make([]*ast.CommentGroup, len(file.Comments)-len(idxs)) i, j := 0, 0 for i < len(file.Comments) { if _, ok := idxs[i]; !ok { newComments[j] = file.Comments[i] j++ } i++ } file.Comments = newComments } }
// DeleteNamedImport deletes the import with the given name and path from the file f, if present. func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) { var delspecs []*ast.ImportSpec var delcomments []*ast.CommentGroup // Find the import nodes that import path, if any. for i := 0; i < len(f.Decls); i++ { decl := f.Decls[i] gen, ok := decl.(*ast.GenDecl) if !ok || gen.Tok != token.IMPORT { continue } for j := 0; j < len(gen.Specs); j++ { spec := gen.Specs[j] impspec := spec.(*ast.ImportSpec) if impspec.Name == nil && name != "" { continue } if impspec.Name != nil && impspec.Name.Name != name { continue } if importPath(impspec) != path { continue } // We found an import spec that imports path. // Delete it. delspecs = append(delspecs, impspec) deleted = true copy(gen.Specs[j:], gen.Specs[j+1:]) gen.Specs = gen.Specs[:len(gen.Specs)-1] // If this was the last import spec in this decl, // delete the decl, too. if len(gen.Specs) == 0 { copy(f.Decls[i:], f.Decls[i+1:]) f.Decls = f.Decls[:len(f.Decls)-1] i-- break } else if len(gen.Specs) == 1 { if impspec.Doc != nil { delcomments = append(delcomments, impspec.Doc) } if impspec.Comment != nil { delcomments = append(delcomments, impspec.Comment) } for _, cg := range f.Comments { // Found comment on the same line as the import spec. if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line { delcomments = append(delcomments, cg) break } } gen.Lparen = token.NoPos // drop parens spec := gen.Specs[0].(*ast.ImportSpec) if spec.Doc != nil { // Move the documentation above the import statement. gen.TokPos = spec.Doc.End() + 1 } for _, cg := range f.Comments { if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line { for fset.Position(gen.TokPos).Line != fset.Position(spec.Pos()).Line { fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) } break } } } if j > 0 { lastImpspec := gen.Specs[j-1].(*ast.ImportSpec) lastLine := fset.Position(lastImpspec.Path.ValuePos).Line line := fset.Position(impspec.Path.ValuePos).Line // We deleted an entry but now there may be // a blank line-sized hole where the import was. if line-lastLine > 1 { // There was a blank line immediately preceding the deleted import, // so there's no need to close the hole. // Do nothing. } else { // There was no blank line. Close the hole. fset.File(gen.Rparen).MergeLine(line) } } j-- } } // Delete imports from f.Imports. for i := 0; i < len(f.Imports); i++ { imp := f.Imports[i] for j, del := range delspecs { if imp == del { copy(f.Imports[i:], f.Imports[i+1:]) f.Imports = f.Imports[:len(f.Imports)-1] copy(delspecs[j:], delspecs[j+1:]) delspecs = delspecs[:len(delspecs)-1] i-- break } } } // Delete comments from f.Comments. for i := 0; i < len(f.Comments); i++ { cg := f.Comments[i] for j, del := range delcomments { if cg == del { copy(f.Comments[i:], f.Comments[i+1:]) f.Comments = f.Comments[:len(f.Comments)-1] copy(delcomments[j:], delcomments[j+1:]) delcomments = delcomments[:len(delcomments)-1] i-- break } } } if len(delspecs) > 0 { panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs)) } return }
func (in *instrumenter) instrumentFile(f *ast.File, fset *token.FileSet, pkgpath string) error { pkgObj := in.instrumented[pkgpath] pkgCreated := false if pkgObj == nil { pkgCreated = true pkgObj = gocov.RegisterPackage(f.Name.Name) // FIXME(axw) use full package path in.instrumented[pkgpath] = pkgObj } state := &state{fset, f, pkgObj, nil} ast.Walk(&funcVisitor{state}, f) // Count the number of import GenDecl's. They're always first. nImportDecls := 0 for _, decl := range f.Decls { if decl, ok := decl.(*ast.GenDecl); !ok || decl.Tok != token.IMPORT { break } nImportDecls++ } // Redirect imports of instrumented packages. in.redirectImports(f) // Insert variable declarations for registered objects. var vardecls []ast.Decl pkgvarname := fmt.Sprint(pkgObj) if pkgCreated { // FIXME(axw) use full package path value := makeCall("gocov.RegisterPackage", makeLit(f.Name.Name)) vardecls = append(vardecls, makeVarDecl(pkgvarname, value)) } for _, fn := range state.functions { fnvarname := fmt.Sprint(fn) value := makeCall(pkgvarname+".RegisterFunction", makeLit(fn.Name), makeLit(fn.File), makeLit(fn.Start), makeLit(fn.End)) vardecls = append(vardecls, makeVarDecl(fnvarname, value)) for _, stmt := range fn.Statements { varname := fmt.Sprint(stmt) value := makeCall( fnvarname+".RegisterStatement", makeLit(stmt.Start), makeLit(stmt.End)) vardecls = append(vardecls, makeVarDecl(varname, value)) } } if len(f.Decls) > 0 { vardecls = append(vardecls, f.Decls[nImportDecls:]...) f.Decls = append(f.Decls[:nImportDecls], vardecls...) } else { f.Decls = vardecls } // Add a "gocov" import. if pkgCreated { gocovImportSpec := &ast.ImportSpec{ Path: makeLit(gocovPackagePath).(*ast.BasicLit)} gocovImportGenDecl := &ast.GenDecl{ Tok: token.IMPORT, Specs: []ast.Spec{gocovImportSpec}} tail := make([]ast.Decl, len(f.Decls)-nImportDecls) copy(tail, f.Decls[nImportDecls:]) head := append(f.Decls[:nImportDecls], gocovImportGenDecl) f.Decls = append(head, tail...) } // Clear out all cached comments. This forces the AST printer to use // node comments instead, repositioning them correctly. f.Comments = nil return nil }
// readFile adds the AST for a source file to the reader. // func (r *reader) readFile(src *ast.File) { // add package documentation if src.Doc != nil { r.readDoc(src.Doc) src.Doc = nil // doc consumed - remove from AST } // add all declarations for _, decl := range src.Decls { switch d := decl.(type) { case *ast.GenDecl: switch d.Tok { case token.IMPORT: // imports are handled individually for _, spec := range d.Specs { if s, ok := spec.(*ast.ImportSpec); ok { if import_, err := strconv.Unquote(s.Path.Value); err == nil { r.imports[import_] = 1 } } } case token.CONST, token.VAR: // constants and variables are always handled as a group r.readValue(d) case token.TYPE: // types are handled individually if len(d.Specs) == 1 && !d.Lparen.IsValid() { // common case: single declaration w/o parentheses // (if a single declaration is parenthesized, // create a new fake declaration below, so that // go/doc type declarations always appear w/o // parentheses) if s, ok := d.Specs[0].(*ast.TypeSpec); ok { r.readType(d, s) } break } for _, spec := range d.Specs { if s, ok := spec.(*ast.TypeSpec); ok { // use an individual (possibly fake) declaration // for each type; this also ensures that each type // gets to (re-)use the declaration documentation // if there's none associated with the spec itself fake := &ast.GenDecl{ Doc: d.Doc, // don't use the existing TokPos because it // will lead to the wrong selection range for // the fake declaration if there are more // than one type in the group (this affects // src/cmd/godoc/godoc.go's posLink_urlFunc) TokPos: s.Pos(), Tok: token.TYPE, Specs: []ast.Spec{s}, } r.readType(fake, s) } } } case *ast.FuncDecl: r.readFunc(d) } } // collect BUG(...) comments for _, c := range src.Comments { text := c.List[0].Text if m := bug_markers.FindStringIndex(text); m != nil { // found a BUG comment; maybe empty if btxt := text[m[1]:]; bug_content.MatchString(btxt) { // non-empty BUG comment; collect comment without BUG prefix list := append([]*ast.Comment(nil), c.List...) // make a copy list[0].Text = text[m[1]:] r.bugs = append(r.bugs, (&ast.CommentGroup{List: list}).Text()) } } } src.Comments = nil // consumed unassociated comments - remove from AST }
func (in *instrumenter) instrumentFile(filename string, f *ast.File, fset *token.FileSet, pkgpath string) error { pkgObj := in.instrumented[pkgpath] pkgCreated := false if pkgObj == nil { pkgCreated = true pkgObj = gocov.RegisterPackage(f.Name.Name) // FIXME(axw) use full package path in.instrumented[pkgpath] = pkgObj } state := &state{fset, f, pkgObj, nil} ast.Walk(&funcVisitor{state}, f) // Count the number of import GenDecl's. They're always first. nImportDecls := 0 for _, decl := range f.Decls { if decl, ok := decl.(*ast.GenDecl); !ok || decl.Tok != token.IMPORT { break } nImportDecls++ } // Redirect imports of instrumented packages. in.redirectImports(f) // Insert variable declarations for registered objects. var vardecls []ast.Decl pkgvarname := fmt.Sprint(pkgObj) if pkgCreated { // FIXME(axw) use full package path value := makeCall("gocov.RegisterPackage", makeLit(f.Name.Name)) vardecls = append(vardecls, makeVarDecl(pkgvarname, value)) } for _, fn := range state.functions { fnvarname := fmt.Sprint(fn) value := makeCall(pkgvarname+".RegisterFunction", makeLit(fn.Name), makeLit(fn.File), makeLit(fn.Start), makeLit(fn.End)) vardecls = append(vardecls, makeVarDecl(fnvarname, value)) for _, stmt := range fn.Statements { varname := fmt.Sprint(stmt) value := makeCall( fnvarname+".RegisterStatement", makeLit(stmt.Start), makeLit(stmt.End)) vardecls = append(vardecls, makeVarDecl(varname, value)) } } if len(f.Decls) > 0 { vardecls = append(vardecls, f.Decls[nImportDecls:]...) f.Decls = append(f.Decls[:nImportDecls], vardecls...) } else { f.Decls = vardecls } // Add a "gocov" import. if pkgCreated { gocovImportSpec := &ast.ImportSpec{ Path: makeLit(gocovPackagePath).(*ast.BasicLit)} gocovImportGenDecl := &ast.GenDecl{ Tok: token.IMPORT, Specs: []ast.Spec{gocovImportSpec}} tail := make([]ast.Decl, len(f.Decls)-nImportDecls) copy(tail, f.Decls[nImportDecls:]) head := append(f.Decls[:nImportDecls], gocovImportGenDecl) f.Decls = append(head, tail...) } // Record function comment associations var funcComments = make(map[string]*ast.FuncDecl, 0) for _, decl := range f.Decls { if n, ok := decl.(*ast.FuncDecl); ok && n.Doc != nil { funcComments[n.Name.String()] = n } } // Clear out all comments except for the comments attached to // existing import specs. if nImportDecls > 0 { end := f.Decls[nImportDecls-1].Pos() comments := make([]*ast.CommentGroup, 0, len(f.Comments)) for _, group := range f.Comments { if group.End() < end { comments = append(comments, group) } else { break } } f.Comments = comments } else { f.Comments = []*ast.CommentGroup{} } // Print and reparse to reinsert comments o := bytes.NewBuffer(make([]byte, 0, 512)) printer.Fprint(o, fset, f) parserMode := parser.DeclarationErrors | parser.ParseComments reparsed, err := parser.ParseFile(fset, filename, o, parserMode) if err != nil { return errors.New(fmt.Sprint("second pass parse error:", err)) } ric := &reinsertComments{funcs: funcComments, fileSet: fset, content: o.Bytes()} ast.Walk(ric, reparsed) reparsed, err = parser.ParseFile(fset, filename, ric.content, parserMode) *f = *reparsed return err }
// readFile adds the AST for a source file to the reader. // func (r *reader) readFile(src *ast.File) { // add package documentation if src.Doc != nil { r.readDoc(src.Doc) src.Doc = nil // doc consumed - remove from AST } // add all declarations for _, decl := range src.Decls { switch d := decl.(type) { case *ast.GenDecl: switch d.Tok { case token.IMPORT: // imports are handled individually for _, spec := range d.Specs { if s, ok := spec.(*ast.ImportSpec); ok { if import_, err := strconv.Unquote(s.Path.Value); err == nil { r.imports[import_] = 1 } } } case token.CONST, token.VAR: // constants and variables are always handled as a group r.readValue(d) case token.TYPE: // types are handled individually if len(d.Specs) == 1 && !d.Lparen.IsValid() { // common case: single declaration w/o parentheses // (if a single declaration is parenthesized, // create a new fake declaration below, so that // go/doc type declarations always appear w/o // parentheses) if s, ok := d.Specs[0].(*ast.TypeSpec); ok { r.readType(d, s) } break } for _, spec := range d.Specs { if s, ok := spec.(*ast.TypeSpec); ok { // use an individual (possibly fake) declaration // for each type; this also ensures that each type // gets to (re-)use the declaration documentation // if there's none associated with the spec itself fake := &ast.GenDecl{ Doc: d.Doc, // don't use the existing TokPos because it // will lead to the wrong selection range for // the fake declaration if there are more // than one type in the group (this affects // src/cmd/godoc/godoc.go's posLink_urlFunc) TokPos: s.Pos(), Tok: token.TYPE, Specs: []ast.Spec{s}, } r.readType(fake, s) } } } case *ast.FuncDecl: r.readFunc(d) } } // collect MARKER(...): annotations r.readNotes(src.Comments) src.Comments = nil // consumed unassociated comments - remove from AST }