func (vis *pointerTransformVisitor) makeNewNode(i *ast.Ident, depth int) ast.Expr { d := depth i.NamePos += token.Pos(d) switch { case depth > 0: res := &ast.UnaryExpr{i.NamePos - token.Pos(depth), token.AND, nil} e := res for depth > 1 { e.X = &ast.UnaryExpr{i.NamePos - token.Pos(depth), token.AND, nil} e = e.X.(*ast.UnaryExpr) depth-- } e.X = i return res case depth < 0: res := &ast.StarExpr{i.NamePos - token.Pos(depth), nil} e := res for depth < -1 { e.X = &ast.StarExpr{i.NamePos - token.Pos(depth), nil} e = e.X.(*ast.StarExpr) depth++ } e.X = i return res } return i }
func (p *parser) parsePackageClause(n *parse.Node) (token.Pos, *ast.Ident) { pac, name := n.Child(0), n.Child(1) return token.Pos(pac.Pos()), &ast.Ident{ NamePos: token.Pos(name.Pos()), Name: string(name.Value()), } }
func (p *parser) parseIndex(n *parse.Node) ast.Expr { index := n.Child(1) return &ast.IndexExpr{ X: p.parsePrimaryExpr(n.Child(0)), Lbrack: token.Pos(index.Child(0).Pos()), Index: p.parseExpr(index.Child(1)), Rbrack: token.Pos(index.Child(2).Pos()), } }
func (p *parser) parseCompositeLit(n *parse.Node) ast.Expr { litValue := n.Child(1) return &ast.CompositeLit{ Type: p.parseLiteralType(n.Child(0)), Lbrace: token.Pos(litValue.Child(0).Pos()), Elts: p.parseLiteralValue(litValue), Rbrace: token.Pos(litValue.LastChild().Pos()), } }
func (p *parser) parseBlock(n *parse.Node, scope *ast.Scope) *ast.BlockStmt { block := ast.BlockStmt{ Lbrace: token.Pos(n.Child(0).Pos()), Rbrace: token.Pos(n.LastChild().Pos()), } eachListItem(stmt, n.Child(1), func(item *parse.Node) { block.List = append(block.List, p.parseStmt(item)) }) return &block }
func GetLines(f *token.File) []int { lines := make([]int, 0, 20) l := -1 for i := f.Base(); i < f.Base()+f.Size(); i++ { if f.Line(token.Pos(i)) > l { l = f.Line(token.Pos(i)) lines = append(lines, f.Offset(token.Pos(i))) } } return lines }
func (p *parser) parseCallExpr(n *parse.Node) *ast.CallExpr { call := n.Child(1) callExpr := ast.CallExpr{ Fun: p.parsePrimaryExpr(n.Child(0)), Lparen: token.Pos(call.Child(0).Pos()), Rparen: token.Pos(call.LastChild().Pos()), } if call.ChildCount() > 2 { callExpr.Args, callExpr.Ellipsis = p.parseArgs(call.Child(1)) } return &callExpr }
func (p *parser) parseParams(n *parse.Node, scope *ast.Scope) *ast.FieldList { fieldList := ast.FieldList{ Opening: token.Pos(n.Child(0).Pos()), Closing: token.Pos(n.LastChild().Pos()), } if n.Child(1).Is(parameterList) { eachListItem(parameterDecl, n.Child(1), func(item *parse.Node) { fieldList.List = append(fieldList.List, p.parseParamDecl(item, scope)) }) } return &fieldList }
func (p *parser) parseVarDecl(n *parse.Node) ast.Decl { var specs []ast.Spec lParen, rParen := declListEach(n.Child(1), func(item *parse.Node) { specs = append(specs, p.parseVarSpec(item)) }) return &ast.GenDecl{ TokPos: token.Pos(n.Child(0).Pos()), Tok: token.Token(n.Child(0).ID()), Lparen: token.Pos(lParen), Specs: specs, Rparen: token.Pos(rParen), } }
func makeStmtList(block *vector.Vector) []ast.Stmt { stmtList := make([]ast.Stmt, len(*block)) for i, stmt := range *block { switch el := stmt.(type) { case ast.Stmt: stmtList[i] = el case []ast.Expr: // i == 0 stmtList[i] = &ast.ReturnStmt{el[0].Pos() - token.Pos(7), el} case ast.Expr: // i == 0 stmtList[i] = &ast.ReturnStmt{el.Pos() - token.Pos(7), []ast.Expr{el}} } } return stmtList }
func commentsBetween(f *ast.File, begin token.Pos, end token.Pos) string { var buf bytes.Buffer var ignore token.Pos ignoreUntilNextGroup := func(c *ast.Comment) { ignore = c.Slash + token.Pos(len(c.Text)) + 3 // allow for newlines, tabs, etc.. } for _, group := range f.Comments { for _, v := range group.List { if v.Slash > begin && v.Slash < end { if v.Slash < ignore { ignoreUntilNextGroup(v) } text := strings.TrimSpace(strings.TrimPrefix(v.Text, "//")) if text != "" { if buf.Len() == 0 && text[0] == '!' { // Marked as non-doc comment ignoreUntilNextGroup(v) continue } if buf.Len() > 0 { buf.WriteByte(' ') } buf.WriteString(text) } } } } return buf.String() }
// BeforeComments rewinds start past any blank lines or line comments // and return the result. It does not rewind past leading blank lines: // the returned position, if changed, is always the start of a non-blank line. func (b *EditBuffer) BeforeComments(start token.Pos) token.Pos { i := b.tx(start) // Back up to newline. for i > 0 && (b.text[i-1] == ' ' || b.text[i-1] == '\t') { i-- } if i > 0 && b.text[i-1] != '\n' { return start } // Go backward by lines. lastNonBlank := i for i > 0 { j := i - 1 for j > 0 && b.text[j-1] != '\n' { j-- } trim := strings.TrimSpace(b.text[j:i]) if len(trim) > 0 && !strings.HasPrefix(trim, "//") { break } if len(trim) > 0 { lastNonBlank = j } i = j } return start - token.Pos(b.tx(start)-lastNonBlank) }
func (p *parser) parseBasicLit(n *parse.Node) *ast.BasicLit { return &ast.BasicLit{ ValuePos: token.Pos(n.Pos()), Kind: token.Token(n.ID()), Value: string(n.Value()), } }
func getImports(node *parser.CallNode) ast.Decl { if len(node.Args) < 2 { return nil } imports := node.Args[1:] specs := make([]ast.Spec, len(imports)) for i, imp := range imports { if t := imp.Type(); t == parser.NodeVector { specs[i] = makeImportSpecFromVector(imp.(*parser.VectorNode)) } else if t == parser.NodeString { importPath := imp.(*parser.StringNode).Value if importPath[1] == '/' { importPath = "\"" + standardLibrary + importPath[1:] } path := makeBasicLit(token.STRING, importPath) specs[i] = makeImportSpec(path, nil) } else { panic("invalid import!") } } decl := makeGeneralDecl(token.IMPORT, specs) decl.Lparen = token.Pos(1) // Need this so we can have multiple imports return decl }
func (p *parser) parseForStmt(n *parse.Node) (r ast.Stmt) { p.openScope() forPos := token.Pos(n.Child(0).Pos()) n = n.Child(1) if n.Is(block) { return &ast.ForStmt{ For: forPos, Body: p.parseBlock(n, p.topScope), } } option := n.Child(0).Child(0) body := p.parseBlock(n.Child(1), p.topScope) switch option.Rule() { case condition: fmt.Println(option) case forClause: fmt.Println(option) case rangeClause: forStmt := p.parseRangeStmt(option) forStmt.For, forStmt.Body = forPos, body r = forStmt } p.closeScope() return }
func makeCallExpr(name string, params *st.SymbolTable, pointerSymbols map[st.Symbol]int, pos token.Pos, recvSym *st.VariableSymbol, pack *st.Package, filename string) (*ast.CallExpr, int) { var Fun ast.Expr if recvSym != nil { x := ast.NewIdent(recvSym.Name()) x.NamePos = pos Fun = &ast.SelectorExpr{x, ast.NewIdent(name)} } else { x := ast.NewIdent(name) x.NamePos = pos Fun = x } l, _ := utils.GetNodeLength(Fun) l += 2 args, i := make([]ast.Expr, params.Count()), 0 params.ForEachNoLock(func(sym st.Symbol) { args[i] = sym.ToAstExpr(pack, filename) if depth, ok := pointerSymbols[sym]; ok { for depth > 0 { args[i] = &ast.UnaryExpr{token.NoPos, token.AND, args[i]} depth-- } } ll, _ := utils.GetNodeLength(args[i]) l += ll + 2 i++ }) l -= 2 return &ast.CallExpr{Fun, token.NoPos, args, token.NoPos, pos + token.Pos(l-1)}, l }
func (p *parser) parseStructType(n *parse.Node) ast.Expr { struct_ := n.Child(0) return &ast.StructType{ Struct: token.Pos(struct_.Pos()), // TODO: Fields } }
// statementBoundary finds the location in s that terminates the current basic // block in the source. func (f *File) statementBoundary(s ast.Stmt) token.Pos { // Control flow statements are easy. switch s := s.(type) { case *ast.BlockStmt: // Treat blocks like basic blocks to avoid overlapping counters. return s.Lbrace case *ast.IfStmt: return s.Body.Lbrace case *ast.ForStmt: return s.Body.Lbrace case *ast.LabeledStmt: return f.statementBoundary(s.Stmt) case *ast.RangeStmt: return s.Body.Lbrace case *ast.SwitchStmt: return s.Body.Lbrace case *ast.SelectStmt: return s.Body.Lbrace case *ast.TypeSwitchStmt: return s.Body.Lbrace } // If not a control flow statement, it is a declaration, expression, call, etc. and it may have a function literal. // If it does, that's tricky because we want to exclude the body of the function from this block. // Draw a line at the start of the body of the first function literal we find. // TODO: what if there's more than one? Probably doesn't matter much. var literal funcLitFinder ast.Walk(&literal, s) if literal.found() { return token.Pos(literal) } return s.End() }
func (p *parser) parseKeyValue(n *parse.Node) *ast.KeyValueExpr { return &ast.KeyValueExpr{ Key: p.parseExpr(n.Child(0)), Colon: token.Pos(n.Child(1).Pos()), Value: p.parseValue(n.Child(2)), } }
func (w *PkgWalker) LookupImport(pkg *types.Package, pkgInfo *types.Info, cursor *FileCursor, is *ast.ImportSpec) { fpath, err := strconv.Unquote(is.Path.Value) if err != nil { return } fbase := fpath pos := strings.LastIndexAny(fpath, "./-\\") if pos != -1 { fbase = fpath[pos+1:] } fid := fpath + "." + fbase //kind := ObjPkgName //fmt.Println(kind, true) if typeFindDef { fmt.Println(w.fset.Position(is.Pos())) } if typeFindInfo { fmt.Println("package", fpath) } if !typeFindUse { return } var usages []int for id, obj := range pkgInfo.Uses { if obj != nil && obj.Id() == fid { //!= nil && cursorObj.Pos() == obj.Pos() { usages = append(usages, int(id.Pos())) } } (sort.IntSlice(usages)).Sort() for _, pos := range usages { fmt.Println(w.fset.Position(token.Pos(pos))) } }
func (m *FileBuilder) Build() *ast.File { file := &ast.File{ Name: ast.NewIdent(m.filePackageName), } if len(m.aliasToImport) > 0 { importDeclaration := &ast.GenDecl{ Tok: token.IMPORT, Lparen: token.Pos(1), Specs: []ast.Spec{}, } for alias, location := range m.aliasToImport { importDeclaration.Specs = append(importDeclaration.Specs, &ast.ImportSpec{ Name: ast.NewIdent(alias), Path: &ast.BasicLit{ Kind: token.STRING, Value: fmt.Sprintf("\"%s\"", location), }, }) } file.Decls = append(file.Decls, importDeclaration) } for _, builder := range m.generalDeclarationBuilders { file.Decls = append(file.Decls, builder.Build()) } return file }
func (p *parser) parseArgs(n *parse.Node) ([]ast.Expr, token.Pos) { n = n.Child(0) if n.Is(exprList) { return p.parseExprList(n), 0 } return p.parseExprList(n.Child(0)), token.Pos(n.Child(1).Pos()) }
func fixImportCheck(body []byte, importPath string) ([]byte, error) { fset := token.NewFileSet() // todo: see if we can restrict the mode some more f, err := parser.ParseFile(fset, "", body, parser.ParseComments) if err != nil { log.Fatal(err) } var after *ast.CommentGroup var pos token.Pos = token.Pos(len(body)) for _, v := range f.Comments { text := strings.TrimSpace(v.Text()) if v.Pos() > f.Package && v.Pos() < pos && strings.HasPrefix(text, "import") { pos = v.Pos() after = v } } if after != nil && bytes.IndexByte(body[f.Package:pos], '\n') == -1 { comment := fmt.Sprintf(`// import "%s"`, importPath) buf := new(bytes.Buffer) buf.Write(body[:after.Pos()-1]) buf.WriteString(comment) buf.Write(body[after.End()-1:]) body = buf.Bytes() } return body, nil }
func ModifyLine(fset *token.FileSet, file *ast.File, filename string, identMap st.IdentifierMap, Pos token.Pos, mod int) (*token.FileSet, *ast.File, int) { baseMod := 1 if mod > 0 { tokFile := GetFileFromFileSet(fset, filename) baseMod = tokFile.Base() Pos -= token.Pos(tokFile.Base()) - 1 fset, file = ReparseFile(file, filename, mod, identMap) tokFile = GetFileFromFileSet(fset, filename) lines := GetLines(tokFile) tokFile.SetLines(lines[:len(lines)-(mod)]) } tokFile := GetFileFromFileSet(fset, filename) lines := GetLines(tokFile) for i, offset := range lines { fmt.Printf("%d -> %s(%d)\n", i+1, fset.Position(tokFile.Pos(offset)), offset) } var li int for i, l := range lines { if l > tokFile.Offset(Pos) { li = i break } } for i := li; i < len(lines); i++ { lines[i] += mod } tokFile.SetLines(lines) for i, offset := range lines { fmt.Printf("%d -> %s(%d)\n", i+1, fset.Position(tokFile.Pos(offset)), offset) } return fset, file, baseMod }
func (p *parser) parseSliceType(n *parse.Node) ast.Expr { return &ast.ArrayType{ Lbrack: token.Pos(n.Child(0).Pos()), Len: nil, Elt: p.parseType(n.Child(2)), } }
func (f *SourceMapFilter) Write(p []byte) (n int, err error) { var n2 int for { i := bytes.IndexByte(p, '\b') w := p if i != -1 { w = p[:i] } n2, err = f.Writer.Write(w) n += n2 for { i := bytes.IndexByte(w, '\n') if i == -1 { f.column += len(w) break } f.line++ f.column = 0 w = w[i+1:] } if err != nil || i == -1 { return } if f.MappingCallback != nil { f.MappingCallback(f.line+1, f.column, f.fileSet.Position(token.Pos(binary.BigEndian.Uint32(p[i+1:i+5])))) } p = p[i+5:] n += 5 } }
// TestDevendorizeImportPaths checks if vendored // import paths are devendorized correctly. func TestDevendorizeImportPaths(t *testing.T) { i := imports.New("github.com/ernesto-jimenez/gogen/imports") pkg := types.NewPackage("github.com/ernesto-jimenez/gogen/vendor/github.com/stretchr/testify/mock", "mock") named := types.NewNamed(types.NewTypeName(token.Pos(0), pkg, "", &types.Array{}), &types.Array{}, nil) i.AddImportsFrom(named) require.Equal(t, map[string]string{"github.com/stretchr/testify/mock": "mock"}, i.Imports()) }
// chanOps returns a slice of all the channel operations in the instruction. // Derived from cmd/guru/peers.go. func chanOps(instr ssa.Instruction) []chanOp { fn := instr.Parent() var ops []chanOp switch instr := instr.(type) { case *ssa.UnOp: if instr.Op == token.ARROW { // TODO(adonovan): don't assume <-ch; could be 'range ch'. ops = append(ops, chanOp{instr.X, "received", instr.Pos(), len("<-"), fn}) } case *ssa.Send: ops = append(ops, chanOp{instr.Chan, "sent", instr.Pos(), len("<-"), fn}) case *ssa.Select: for _, st := range instr.States { mode := "received" if st.Dir == types.SendOnly { mode = "sent" } ops = append(ops, chanOp{st.Chan, mode, st.Pos, len("<-"), fn}) } case ssa.CallInstruction: call := instr.Common() if blt, ok := call.Value.(*ssa.Builtin); ok && blt.Name() == "close" { pos := instr.Common().Pos() ops = append(ops, chanOp{call.Args[0], "closed", pos - token.Pos(len("close")), len("close("), fn}) } } return ops }
// hasFuncLiteral reports the existence and position of the first func literal // in the node, if any. If a func literal appears, it usually marks the termination // of a basic block because the function body is itself a block. // Therefore we draw a line at the start of the body of the first function literal we find. // TODO: what if there's more than one? Probably doesn't matter much. func hasFuncLiteral(n ast.Node) (bool, token.Pos) { if n == nil { return false, 0 } var literal funcLitFinder ast.Walk(&literal, n) return literal.found(), token.Pos(literal) }
func offsetLine(fset *token.FileSet, af *ast.File, offset int) (line int) { defer func() { if err := recover(); err != nil { line = 0 } }() return fset.File(af.Pos()).Position(token.Pos(offset)).Line }