// TestLineComments, using a simple test case, checks that consequtive line // comments are properly terminated with a newline even if the AST position // information is incorrect. // func TestLineComments(t *testing.T) { const src = `// comment 1 // comment 2 // comment 3 package main ` fset := token.NewFileSet() ast1, err1 := parser.ParseFile(fset, "", src, parser.ParseComments) if err1 != nil { panic(err1) } var buf bytes.Buffer fset = token.NewFileSet() // use the wrong file set Fprint(&buf, fset, ast1) nlines := 0 for _, ch := range buf.Bytes() { if ch == '\n' { nlines++ } } const expected = 3 if nlines < expected { t.Errorf("got %d, expected %d\n", nlines, expected) } }
func NewContext() *Context { ctxt := &Context{ pkgCache: make(map[string]*ast.Package), FileSet: token.NewFileSet(), ChangedFiles: make(map[string]*ast.File), } ctxt.importer = ctxt.importerFunc() return ctxt }
// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package parser import ( "code.google.com/p/rog-go/exp/go/token" "os" "testing" ) var fset = token.NewFileSet() var illegalInputs = []interface{}{ nil, 3.14, []byte(nil), "foo!", `package p; func f() { if /* should have condition */ {} };`, `package p; func f() { if ; /* should have condition */ {} };`, `package p; func f() { if f(); /* should have condition */ {} };`, } func TestParseIllegalInputs(t *testing.T) { for _, src := range illegalInputs { _, err := ParseFile(fset, "", src, 0, nil) if err == nil { t.Errorf("ParseFile(%v) should have failed", src) } }
var falseIdent = predecl("false") var trueIdent = predecl("true") var iotaIdent = predecl("iota") var boolIdent = predecl("bool") var intIdent = predecl("int") var floatIdent = predecl("float") var stringIdent = predecl("string") func predecl(name string) *ast.Ident { return &ast.Ident{Name: name, Obj: parser.Universe.Lookup(name)} } type Importer func(path string) *ast.Package // When DefaultImporter is called, it adds any files to FileSet. var FileSet = token.NewFileSet() // GoPath is used by DefaultImporter to find packages. var GoPath = []string{filepath.Join(os.Getenv("GOROOT"), "src", "pkg")} // DefaultGetPackage looks for the package; if it finds it, // it parses and returns it. If no package was found, it returns nil. func DefaultImporter(path string) *ast.Package { bpkg, err := build.Default.Import(path, "", 0) if err != nil { return nil } pkgs, err := parser.ParseDir(FileSet, bpkg.Dir, isGoFile, 0) if err != nil { if Debug { switch err := err.(type) {
if err != nil { panic("cannot unquote") } return v } type astVisitor func(n ast.Node) bool func (f astVisitor) Visit(n ast.Node) ast.Visitor { if f(n) { return f } return nil } var emptyFileSet = token.NewFileSet() func pretty(n ast.Node) string { var b bytes.Buffer printer.Fprint(&b, emptyFileSet, n) return b.String() } var printConfig = &printer.Config{ Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8, } func (ctxt *Context) gofmtFile(f *ast.File) ([]byte, error) { var buf bytes.Buffer _, err := printConfig.Fprint(&buf, ctxt.FileSet, f)
import ( "fmt" "os" //"go/token" "code.google.com/p/rog-go/exp/go/token" //"go/ast" "code.google.com/p/rog-go/exp/go/ast" "code.google.com/p/rog-go/exp/go/parser" "code.google.com/p/rog-go/exp/go/types" "path/filepath" "strings" //"go/parser" ) var ( AllSourceTops = token.NewFileSet() AllSources = token.NewFileSet() ImportedBy = make(map[string][]string) PackageTops = make(map[string]*ast.Package) Packages = make(map[string]*ast.Package) ) func LocalImporter(path string) (pkg *ast.Package) { path = filepath.Clean(path) //fmt.Printf("Importing %s\n", path) var ok bool var pkgtop *ast.Package if pkgtop, ok = PackageTops[path]; !ok { pkg = types.DefaultImporter(path) return
func (this *SingleMover) RemoveUpdatePkg() (err error) { for fpath, file := range this.pkg.Files { urw := ReferenceWalker{ UnexportedObjs: this.unexportedObjs, SkipNodes: this.moveNodes, MoveObjs: this.moveObjs, SkipNodeParents: make(map[ast.Node]ast.Node), GoodReferenceParents: make(map[ast.Node]ast.Node), BadReferences: new([]ast.Node), } ast.Walk(&urw, file) if len(*urw.BadReferences) != 0 { fmt.Printf("Cannot move some objects:\n") for node := range this.moveNodes { printer.Fprint(os.Stdout, token.NewFileSet(), node) fmt.Println() } fmt.Println("Unexported objects referenced:") for _, node := range *urw.BadReferences { position := AllSources.Position(node.Pos()) fmt.Printf("At %v ", position) printer.Fprint(os.Stdout, token.NewFileSet(), node) fmt.Println() } return MakeErr("Objects to be moved in '%s' contains unexported objects referenced elsewhere in the package", this.oldpath) } removedStuff := false // remove the old definitions for node, parent := range urw.SkipNodeParents { removedStuff = true //fmt.Printf("%T %v\n", parent, parent) switch pn := parent.(type) { case *ast.File: for i, n := range pn.Decls { if n == node { if len(pn.Decls) > 1 { pn.Decls[i], pn.Decls[len(pn.Decls)-1] = pn.Decls[len(pn.Decls)-1], pn.Decls[i] } pn.Decls = pn.Decls[:len(pn.Decls)-1] break } } case *ast.GenDecl: for i, n := range pn.Specs { if n == node { if pn.Lparen == 0 { pn.Lparen = n.Pos() pn.Rparen = n.End() } if len(pn.Specs) > 1 { pn.Specs[i], pn.Specs[len(pn.Specs)-1] = pn.Specs[len(pn.Specs)-1], pn.Specs[i] } pn.Specs = pn.Specs[:len(pn.Specs)-1] break } } default: return MakeErr("Unanticipated parent type: %T", pn) } } //strip out imports that are unnecessary because things are no longer here if removedStuff { for _, file := range this.pkg.Files { iuc := make(ImportUseCollector) ast.Walk(iuc, file) ast.Walk(ImportFilterWalker(iuc), file) } } //if this file refernces things that are moving, import the new package if len(urw.GoodReferenceParents) != 0 { if this.referenceBack { return MakeErr("Moving objects from %s would create a cycle", this.oldpath) } newpkgname := GetUniqueIdent([]*ast.File{file}, this.pkg.Name) //construct the import is := &ast.ImportSpec{ Name: &ast.Ident{Name: newpkgname}, Path: &ast.BasicLit{ Kind: token.STRING, Value: QuotePath(this.newpath), }, } gd := &ast.GenDecl{ Tok: token.IMPORT, Specs: []ast.Spec{is}, } //stick it in there file.Decls = append([]ast.Decl{gd}, file.Decls...) //change the old references to talk about the new package, using our unique name for node, parent := range urw.GoodReferenceParents { getSel := func(idn *ast.Ident) *ast.SelectorExpr { return &ast.SelectorExpr{ X: &ast.Ident{ Name: newpkgname, NamePos: idn.NamePos, }, Sel: idn, } } switch p := parent.(type) { case *ast.CallExpr: if idn, ok := node.(*ast.Ident); ok { p.Fun = getSel(idn) } else { return MakeErr("CallExpr w/ unexpected type %T\n", node) } case *ast.AssignStmt: for i, x := range p.Lhs { if x == node { if idn, ok := x.(*ast.Ident); ok { p.Lhs[i] = getSel(idn) } } } for i, x := range p.Rhs { if x == node { if idn, ok := x.(*ast.Ident); ok { p.Rhs[i] = getSel(idn) } } } case *ast.StarExpr: if p.X == node { if idn, ok := p.X.(*ast.Ident); ok { p.X = getSel(idn) } } default: return MakeErr("Unexpected local parent %T\n", parent) } } } if removedStuff { err = RewriteSource(fpath, file) if err != nil { return } } } return }
func (this *SingleMover) CreateNewSource() (err error) { liw := make(ListImportWalker) for n := range this.moveNodes { ast.Walk(liw, n) } finalImports := make(map[*ast.ImportSpec]bool) for obj, is := range liw { if _, ok := this.moveObjs[obj]; !ok { finalImports[is] = true } } newfile := &ast.File{ Name: &ast.Ident{Name: this.pkg.Name}, } if len(finalImports) != 0 { for is := range finalImports { gdl := &ast.GenDecl{ Tok: token.IMPORT, Specs: []ast.Spec{is}, } newfile.Decls = append(newfile.Decls, gdl) } } var sortedNodes NodeSorter for mn := range this.moveNodes { sortedNodes = append(sortedNodes, mn) } sort.Sort(sortedNodes) for _, mn := range sortedNodes { switch m := mn.(type) { case ast.Decl: newfile.Decls = append(newfile.Decls, m) case *ast.TypeSpec: gdl := &ast.GenDecl{ Tok: token.TYPE, Specs: []ast.Spec{m}, } newfile.Decls = append(newfile.Decls, gdl) } } npf := ExprParentFinder{ ExprParents: make(map[ast.Expr]ast.Node), } for n := range this.moveNodes { ast.Walk(&npf, n) } var pkgfiles []*ast.File for _, pkgfile := range this.pkg.Files { pkgfiles = append(pkgfiles, pkgfile) } oldPkgNewName := GetUniqueIdent(pkgfiles, this.pkg.Name) needOldImport := false this.referenceBack = false for expr, parent := range npf.ExprParents { obj, _ := types.ExprType(expr, LocalImporter) if _, ok := this.moveObjs[obj]; ok { continue } if _, ok := this.allObjs[obj]; !ok { continue } if !unicode.IsUpper([]rune(obj.Name)[0] /*utf8.NewString(obj.Name).At(0)*/) { position := AllSources.Position(expr.Pos()) fmt.Printf("At %v ", position) printer.Fprint(os.Stdout, token.NewFileSet(), expr) fmt.Println() err = MakeErr("Can't move code that references unexported objects") return } needOldImport = true this.referenceBack = true getSel := func(idn *ast.Ident) *ast.SelectorExpr { return &ast.SelectorExpr{ X: &ast.Ident{ Name: oldPkgNewName, NamePos: idn.NamePos, }, Sel: idn, } } switch p := parent.(type) { case *ast.CallExpr: if idn, ok := expr.(*ast.Ident); ok { p.Fun = getSel(idn) } else { err = MakeErr("CallExpr w/ unexpected type %T\n", expr) return } case *ast.AssignStmt: for i, x := range p.Lhs { if x == expr { if idn, ok := x.(*ast.Ident); ok { p.Lhs[i] = getSel(idn) } } } for i, x := range p.Rhs { if x == expr { if idn, ok := x.(*ast.Ident); ok { p.Rhs[i] = getSel(idn) } } } default: err = MakeErr("Unexpected parent %T\n", parent) return } } if needOldImport { is := &ast.ImportSpec{ Name: &ast.Ident{Name: oldPkgNewName}, Path: &ast.BasicLit{Value: QuotePath(this.oldpath)}, } gdl := &ast.GenDecl{ Tok: token.IMPORT, Specs: []ast.Spec{is}, } newfile.Decls = append([]ast.Decl{gdl}, newfile.Decls...) } err = os.MkdirAll(this.newpath, 0755) if err != nil { return } newSourcePath := filepath.Join(this.newpath, this.pkg.Name+".go") containedComments := make(CommentCollector) for node := range this.moveNodes { ast.Walk(containedComments, node) } for _, file := range this.pkg.Files { for i := len(file.Comments) - 1; i >= 0; i-- { cg := file.Comments[i] add := func() { newfile.Comments = append([]*ast.CommentGroup{cg}, newfile.Comments...) file.Comments[i] = file.Comments[len(file.Comments)-1] file.Comments = file.Comments[:len(file.Comments)-1] } if containedComments[cg] { add() } else { for node := range this.moveNodes { if node.Pos() <= cg.Pos() && node.End() >= cg.End() { add() break } } } } } err = NewSource(newSourcePath, newfile) if err != nil { return } return }