// TestLineComments, using a simple test case, checks that consecutive line // comments are properly terminated with a newline even if the AST position // information is incorrect. // func TestLineComments(t *testing.T) { const src = `// comment 1 // comment 2 // comment 3 package main ` fset := token.NewFileSet() f, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err != nil { panic(err) // error in test } var buf bytes.Buffer fset = token.NewFileSet() // use the wrong file set Fprint(&buf, fset, f) nlines := 0 for _, ch := range buf.Bytes() { if ch == '\n' { nlines++ } } const expected = 3 if nlines < expected { t.Errorf("got %d, expected %d\n", nlines, expected) t.Errorf("result:\n%s", buf.Bytes()) } }
// For SGo: func(whence string, files ...NamedFile) ([][]byte, []error) func TranslateFilesFrom(whence string, files ...NamedFile) ([][]byte, []error) { var errs []error fset := token.NewFileSet() cwd, err := os.Getwd() if err != nil { return nil, []error{err} } var parsed []*ast.File var srcs [][]byte for _, named := range files { src, err := ioutil.ReadAll(named.File) if err != nil { errs = append(errs, err) continue } relPath, err := filepath.Rel(cwd, named.Path) if err != nil { relPath = named.Path } file, err := parser.ParseFile(fset, relPath, src, parser.ParseComments) if err != nil { errs = append(errs, err) continue } srcs = append(srcs, src) parsed = append(parsed, file) } // Early typecheck, because fileWithAnnotationComments adds lines and // then type errors are reported in the wrong line. _, typeErrs := typecheck("translate", fset, whence, parsed...) if len(typeErrs) > 0 { errs = append(errs, makeErrList(fset, typeErrs)) return nil, errs } oldFset := fset fset = token.NewFileSet() // fileWithAnnotationComments will reparse. for i, p := range parsed { var err error srcs[i], parsed[i], err = fileWithAnnotationComments(p, fset, oldFset, srcs[i]) if err != nil { errs = append(errs, err) } } if len(errs) > 0 { return nil, errs } info, typeErrs := typecheck("translate", fset, whence, parsed...) if len(typeErrs) > 0 { errs = append(errs, makeErrList(fset, typeErrs)) return nil, errs } return translate(info, srcs, parsed, fset), errs }
func ExampleConvertAST() { fset := token.NewFileSet() a, _ := parser.ParseFile(fset, "example.sgo", ` package example type LinkedList struct { Head int // For SGo: ?*LinkedList Tail *LinkedList } `, parser.ParseComments) info := &types.Info{ Defs: map[*ast.Ident]types.Object{}, Uses: map[*ast.Ident]types.Object{}, } cfg := &types.Config{} cfg.Check("", fset, []*ast.File{a}, info) importer.ConvertAST(a, info, nil) printer.Fprint(os.Stdout, fset, a) // Output: // package example // // type LinkedList struct { // Head int // // For SGo: ?*LinkedList // Tail ?*LinkedList // } }
// TestIncompleteSelection ensures that an incomplete selector // expression is parsed as a (blank) *ast.SelectorExpr, not a // *ast.BadExpr. func TestIncompleteSelection(t *testing.T) { for _, src := range []string{ "package p; var _ = fmt.", // at EOF "package p; var _ = fmt.\ntype X int", // not at EOF } { fset := token.NewFileSet() f, err := ParseFile(fset, "", src, 0) if err == nil { t.Errorf("ParseFile(%s) succeeded unexpectedly", src) continue } const wantErr = "expected selector or type assertion" if !strings.Contains(err.Error(), wantErr) { t.Errorf("ParseFile returned wrong error %q, want %q", err, wantErr) } var sel *ast.SelectorExpr ast.Inspect(f, func(n ast.Node) bool { if n, ok := n.(*ast.SelectorExpr); ok { sel = n } return true }) if sel == nil { t.Error("found no *ast.SelectorExpr") continue } const wantSel = "&{fmt _}" if fmt.Sprint(sel) != wantSel { t.Errorf("found selector %s, want %s", sel, wantSel) continue } } }
func loadExportsGoPath(dir string) map[string]bool { exports := make(map[string]bool) buildPkg, err := build.ImportDir(dir, 0) if err != nil { if strings.Contains(err.Error(), "no buildable Go source files in") { return nil } fmt.Fprintf(os.Stderr, "could not import %q: %v\n", dir, err) return nil } fset := token.NewFileSet() for _, files := range [...][]string{buildPkg.GoFiles, buildPkg.CgoFiles} { for _, file := range files { f, err := parser.ParseFile(fset, filepath.Join(dir, file), nil, 0) if err != nil { fmt.Fprintf(os.Stderr, "could not parse %q: %v\n", file, err) continue } for name := range f.Scope.Objects { if ast.IsExported(name) { exports[name] = true } } } } return exports }
func TestIssue8518(t *testing.T) { fset := token.NewFileSet() imports := make(testImporter) conf := Config{ Error: func(err error) { t.Log(err) }, // don't exit after first error Importer: imports, } makePkg := func(path, src string) { f, err := parser.ParseFile(fset, path, src, 0) if err != nil { t.Fatal(err) } pkg, _ := conf.Check(path, fset, []*ast.File{f}, nil) // errors logged via conf.Error imports[path] = pkg } const libSrc = ` package a import "missing" const C1 = foo const C2 = missing.C ` const mainSrc = ` package main import "a" var _ = a.C1 var _ = a.C2 ` makePkg("a", libSrc) makePkg("main", mainSrc) // don't crash when type-checking this package }
func TestFilterDuplicates(t *testing.T) { // parse input fset := token.NewFileSet() file, err := parser.ParseFile(fset, "", input, 0) if err != nil { t.Fatal(err) } // create package files := map[string]*ast.File{"": file} pkg, err := ast.NewPackage(fset, files, nil, nil) if err != nil { t.Fatal(err) } // filter merged := ast.MergePackageFiles(pkg, ast.FilterFuncDuplicates) // pretty-print var buf bytes.Buffer if err := format.Node(&buf, fset, merged); err != nil { t.Fatal(err) } output := buf.String() if output != golden { t.Errorf("incorrect output:\n%s", output) } }
func TestExamples(t *testing.T) { fset := token.NewFileSet() file, err := parser.ParseFile(fset, "test.go", strings.NewReader(exampleTestFile), parser.ParseComments) if err != nil { t.Fatal(err) } for i, e := range doc.Examples(file) { c := exampleTestCases[i] if e.Name != c.Name { t.Errorf("got Name == %q, want %q", e.Name, c.Name) } if w := c.Play; w != "" { var g string // hah if e.Play == nil { g = "<nil>" } else { var buf bytes.Buffer if err := format.Node(&buf, fset, e.Play); err != nil { t.Fatal(err) } g = buf.String() } if g != w { t.Errorf("%s: got Play == %q, want %q", c.Name, g, w) } } if g, w := e.Output, c.Output; g != w { t.Errorf("%s: got Output == %q, want %q", c.Name, g, w) } } }
func TestUnresolved(t *testing.T) { f, err := ParseFile(token.NewFileSet(), "", ` package p // func f1a(int) func f2a(byte, int, float) func f3a(a, b int, c float) func f4a(...complex) func f5a(a s1a, b ...complex) // func f1b(*int) func f2b([]byte, (int), *float) func f3b(a, b *int, c []float) func f4b(...*complex) func f5b(a s1a, b ...[]complex) // type s1a struct { int } type s2a struct { byte; int; s1a } type s3a struct { a, b int; c float } // type s1b struct { *int } type s2b struct { byte; int; *float } type s3b struct { a, b *s3b; c []float } `, 0) if err != nil { t.Fatal(err) } want := "int " + // f1a "byte int float " + // f2a "int float " + // f3a "complex " + // f4a "complex " + // f5a // "int " + // f1b "byte int float " + // f2b "int float " + // f3b "complex " + // f4b "complex " + // f5b // "int " + // s1a "byte int " + // s2a "int float " + // s3a // "int " + // s1a "byte int float " + // s2a "float " // s3a // collect unresolved identifiers var buf bytes.Buffer for _, u := range f.Unresolved { buf.WriteString(u.Name) buf.WriteByte(' ') } got := buf.String() if got != want { t.Errorf("\ngot: %s\nwant: %s", got, want) } }
func TestLeadAndLineComments(t *testing.T) { f, err := ParseFile(token.NewFileSet(), "", ` package p type T struct { /* F1 lead comment */ // F1 int /* F1 */ // line comment // F2 lead // comment F2 int // F2 line comment // f3 lead comment f3 int // f3 line comment } `, ParseComments) if err != nil { t.Fatal(err) } checkFieldComments(t, f, "T.F1", "/* F1 lead comment *///", "/* F1 */// line comment") checkFieldComments(t, f, "T.F2", "// F2 lead// comment", "// F2 line comment") checkFieldComments(t, f, "T.f3", "// f3 lead comment", "// f3 line comment") ast.FileExports(f) checkFieldComments(t, f, "T.F1", "/* F1 lead comment *///", "/* F1 */// line comment") checkFieldComments(t, f, "T.F2", "// F2 lead// comment", "// F2 line comment") if getField(f, "T.f3") != nil { t.Error("not expected to find T.f3") } }
func ExampleScanner_Scan() { // src is the input that we want to tokenize. src := []byte("cos(x) + 1i*sin(x) // Euler") // Initialize the scanner. var s scanner.Scanner fset := token.NewFileSet() // positions are relative to fset file := fset.AddFile("", fset.Base(), len(src)) // register input "file" s.Init(file, src, nil /* no error handler */, scanner.ScanComments) // Repeated calls to Scan yield the token sequence found in the input. for { pos, tok, lit := s.Scan() if tok == token.EOF { break } fmt.Printf("%s\t%s\t%q\n", fset.Position(pos), tok, lit) } // output: // 1:1 IDENT "cos" // 1:4 ( "" // 1:5 IDENT "x" // 1:6 ) "" // 1:8 + "" // 1:10 IMAG "1i" // 1:12 * "" // 1:13 IDENT "sin" // 1:16 ( "" // 1:17 IDENT "x" // 1:18 ) "" // 1:20 ; "\n" // 1:20 COMMENT "// Euler" }
func TestMultiFileInitOrder(t *testing.T) { fset := token.NewFileSet() mustParse := func(src string) *ast.File { f, err := parser.ParseFile(fset, "main", src, 0) if err != nil { t.Fatal(err) } return f } fileA := mustParse(`package main; var a = 1`) fileB := mustParse(`package main; var b = 2`) // The initialization order must not depend on the parse // order of the files, only on the presentation order to // the type-checker. for _, test := range []struct { files []*ast.File want string }{ {[]*ast.File{fileA, fileB}, "[a = 1 b = 2]"}, {[]*ast.File{fileB, fileA}, "[b = 2 a = 1]"}, } { var info Info if _, err := new(Config).Check("main", fset, test.files, &info); err != nil { t.Fatal(err) } if got := fmt.Sprint(info.InitOrder); got != test.want { t.Fatalf("got %s; want %s", got, test.want) } } }
func TestParse(t *testing.T) { for _, filename := range validFiles { _, err := ParseFile(token.NewFileSet(), filename, nil, DeclarationErrors) if err != nil { t.Fatalf("ParseFile(%s): %v", filename, err) } } }
func BenchmarkParse(b *testing.B) { b.SetBytes(int64(len(src))) for i := 0; i < b.N; i++ { if _, err := ParseFile(token.NewFileSet(), "", src, ParseComments); err != nil { b.Fatalf("benchmark failed due to parse error: %s", err) } } }
// This example illustrates how to remove a variable declaration // in a Go program while maintaining correct comment association // using an ast.CommentMap. func ExampleCommentMap() { // src is the input for which we create the AST that we // are going to manipulate. src := ` // This is the package comment. package main // This comment is associated with the hello constant. const hello = "Hello, World!" // line comment 1 // This comment is associated with the foo variable. var foo = hello // line comment 2 // This comment is associated with the main function. func main() { fmt.Println(hello) // line comment 3 } ` // Create the AST by parsing src. fset := token.NewFileSet() // positions are relative to fset f, err := parser.ParseFile(fset, "src.go", src, parser.ParseComments) if err != nil { panic(err) } // Create an ast.CommentMap from the ast.File's comments. // This helps keeping the association between comments // and AST nodes. cmap := ast.NewCommentMap(fset, f, f.Comments) // Remove the first variable declaration from the list of declarations. f.Decls = removeFirstVarDecl(f.Decls) // Use the comment map to filter comments that don't belong anymore // (the comments associated with the variable declaration), and create // the new comments list. f.Comments = cmap.Filter(f).Comments() // Print the modified AST. var buf bytes.Buffer if err := format.Node(&buf, fset, f); err != nil { panic(err) } fmt.Printf("%s", buf.Bytes()) // output: // // This is the package comment. // package main // // // This comment is associated with the hello constant. // const hello = "Hello, World!" // line comment 1 // // // This comment is associated with the main function. // func main() { // fmt.Println(hello) // line comment 3 // } }
func makePkg(t *testing.T, src string) (*Package, error) { fset := token.NewFileSet() file, err := parser.ParseFile(fset, filename, src, parser.DeclarationErrors) if err != nil { return nil, err } // use the package name as package path conf := Config{Importer: importer.Default([]*ast.File{file})} return conf.Check(file.Name.Name, fset, []*ast.File{file}, nil) }
func pkgFor(path, source string, info *Info) (*Package, error) { fset := token.NewFileSet() f, err := parser.ParseFile(fset, path, source, 0) if err != nil { return nil, err } conf := Config{Importer: importer.Default([]*ast.File{f}), AllowUseUninitializedVars: true, AllowUninitializedExprs: true} return conf.Check(f.Name.Name, fset, []*ast.File{f}, info) }
func parseFunc(filename, functionname string) (fun *ast.FuncDecl, fset *token.FileSet) { fset = token.NewFileSet() if file, err := parser.ParseFile(fset, filename, nil, 0); err == nil { for _, d := range file.Decls { if f, ok := d.(*ast.FuncDecl); ok && f.Name.Name == functionname { fun = f return } } } panic("function not found") }
func TestImports(t *testing.T) { for path, isValid := range imports { src := fmt.Sprintf("package p; import %s", path) _, err := ParseFile(token.NewFileSet(), "", src, 0) switch { case err != nil && isValid: t.Errorf("ParseFile(%s): got %v; expected no error", src, err) case err == nil && !isValid: t.Errorf("ParseFile(%s): got no error; expected one", src) } } }
// Source formats src in canonical gofmt style and returns the result // or an (I/O or syntax) error. src is expected to be a syntactically // correct Go source file, or a list of Go declarations or statements. // // If src is a partial source file, the leading and trailing space of src // is applied to the result (such that it has the same leading and trailing // space as src), and the result is indented by the same amount as the first // line of src containing code. Imports are not sorted for partial source files. // func Source(src []byte) ([]byte, error) { fset := token.NewFileSet() file, sourceAdj, indentAdj, err := parse(fset, "", src, true) if err != nil { return nil, err } if sourceAdj == nil { // Complete source file. // TODO(gri) consider doing this always. ast.SortImports(fset, file) } return format(fset, file, sourceAdj, indentAdj, src, config) }
func main() { flag.Parse() fset := token.NewFileSet() nheadings := 0 err := filepath.Walk(*root, func(path string, fi os.FileInfo, err error) error { if !fi.IsDir() { return nil } pkgs, err := parser.ParseDir(fset, path, isGoFile, parser.ParseComments) if err != nil { if *verbose { fmt.Fprintln(os.Stderr, err) } return nil } for _, pkg := range pkgs { d := doc.New(pkg, path, doc.Mode(0)) list := appendHeadings(nil, d.Doc) for _, d := range d.Consts { list = appendHeadings(list, d.Doc) } for _, d := range d.Types { list = appendHeadings(list, d.Doc) } for _, d := range d.Vars { list = appendHeadings(list, d.Doc) } for _, d := range d.Funcs { list = appendHeadings(list, d.Doc) } if len(list) > 0 { // directories may contain multiple packages; // print path and package name fmt.Printf("%s (package %s)\n", path, pkg.Name) for _, h := range list { fmt.Printf("\t%s\n", h) } nheadings += len(list) } } return nil }) if err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } fmt.Println(nheadings, "headings found") }
func BenchmarkScan(b *testing.B) { b.StopTimer() fset := token.NewFileSet() file := fset.AddFile("", fset.Base(), len(source)) var s Scanner b.StartTimer() for i := 0; i < b.N; i++ { s.Init(file, source, nil, ScanComments) for { _, tok, _ := s.Scan() if tok == token.EOF { break } } } }
func TestEvalArith(t *testing.T) { var tests = []string{ `true`, `false == false`, `12345678 + 87654321 == 99999999`, `10 * 20 == 200`, `(1<<1000)*2 >> 100 == 2<<900`, `"foo" + "bar" == "foobar"`, `"abc" <= "bcd"`, `len([10]struct{}{}) == 2*5`, } fset := token.NewFileSet() for _, test := range tests { testEval(t, fset, nil, token.NoPos, test, Typ[UntypedBool], "", "true") } }
func TestCommentGroups(t *testing.T) { f, err := ParseFile(token.NewFileSet(), "", ` package p /* 1a */ /* 1b */ /* 1c */ // 1d /* 2a */ // 2b const pi = 3.1415 /* 3a */ // 3b /* 3c */ const e = 2.7182 // Example from issue 3139 func ExampleCount() { fmt.Println(strings.Count("cheese", "e")) fmt.Println(strings.Count("five", "")) // before & after each rune // Output: // 3 // 5 } `, ParseComments) if err != nil { t.Fatal(err) } expected := [][]string{ {"/* 1a */", "/* 1b */", "/* 1c */", "// 1d"}, {"/* 2a\n*/", "// 2b"}, {"/* 3a */", "// 3b", "/* 3c */"}, {"// Example from issue 3139"}, {"// before & after each rune"}, {"// Output:", "// 3", "// 5"}, } if len(f.Comments) != len(expected) { t.Fatalf("got %d comment groups; expected %d", len(f.Comments), len(expected)) } for i, exp := range expected { got := f.Comments[i].List if len(got) != len(exp) { t.Errorf("got %d comments in group %d; expected %d", len(got), i, len(exp)) continue } for j, exp := range exp { got := got[j].Text if got != exp { t.Errorf("got %q in group %d; expected %q", got, i, exp) } } } }
func testFile(t *testing.T, b1, b2 *bytes.Buffer, filename string) { // open file f, err := os.Open(filename) if err != nil { t.Error(err) return } // read file b1.Reset() _, err = io.Copy(b1, f) f.Close() if err != nil { t.Error(err) return } // exclude files w/ syntax errors (typically test cases) fset := token.NewFileSet() if _, _, _, err = parse(fset, filename, b1.Bytes(), false); err != nil { if *verbose { fmt.Fprintf(os.Stderr, "ignoring %s\n", err) } return } // gofmt file if err = gofmt(fset, filename, b1); err != nil { t.Errorf("1st gofmt failed: %v", err) return } // make a copy of the result b2.Reset() b2.Write(b1.Bytes()) // gofmt result again if err = gofmt(fset, filename, b2); err != nil { t.Errorf("2nd gofmt failed: %v", err) return } // the first and 2nd result should be identical if !bytes.Equal(b1.Bytes(), b2.Bytes()) { t.Errorf("gofmt %s not idempotent", filename) } }
func TestObjects(t *testing.T) { const src = ` package p import fmt "fmt" const pi = 3.14 type T struct{} var x int func f() { L: } ` f, err := ParseFile(token.NewFileSet(), "", src, 0) if err != nil { t.Fatal(err) } objects := map[string]ast.ObjKind{ "p": ast.Bad, // not in a scope "fmt": ast.Bad, // not resolved yet "pi": ast.Con, "T": ast.Typ, "x": ast.Var, "int": ast.Bad, // not resolved yet "f": ast.Fun, "L": ast.Lbl, } ast.Inspect(f, func(n ast.Node) bool { if ident, ok := n.(*ast.Ident); ok { obj := ident.Obj if obj == nil { if objects[ident.Name] != ast.Bad { t.Errorf("no object for %s", ident.Name) } return true } if obj.Name != ident.Name { t.Errorf("names don't match: obj.Name = %s, ident.Name = %s", obj.Name, ident.Name) } kind := objects[ident.Name] if obj.Kind != kind { t.Errorf("%s: obj.Kind = %s; want %s", ident.Name, obj.Kind, kind) } } return true }) }
// TestIssue9979 verifies that empty statements are contained within their enclosing blocks. func TestIssue9979(t *testing.T) { for _, src := range []string{ "package p; func f() {;}", "package p; func f() {L:}", "package p; func f() {L:;}", "package p; func f() {L:\n}", "package p; func f() {L:\n;}", "package p; func f() { ; }", "package p; func f() { L: }", "package p; func f() { L: ; }", "package p; func f() { L: \n}", "package p; func f() { L: \n; }", } { fset := token.NewFileSet() f, err := ParseFile(fset, "", src, 0) if err != nil { t.Fatal(err) } var pos, end token.Pos ast.Inspect(f, func(x ast.Node) bool { switch s := x.(type) { case *ast.BlockStmt: pos, end = s.Pos()+1, s.End()-1 // exclude "{", "}" case *ast.LabeledStmt: pos, end = s.Pos()+2, s.End() // exclude "L:" case *ast.EmptyStmt: // check containment if s.Pos() < pos || s.End() > end { t.Errorf("%s: %T[%d, %d] not inside [%d, %d]", src, s, s.Pos(), s.End(), pos, end) } // check semicolon offs := fset.Position(s.Pos()).Offset if ch := src[offs]; ch != ';' != s.Implicit { want := "want ';'" if s.Implicit { want = "but ';' is implicit" } t.Errorf("%s: found %q at offset %d; %s", src, ch, offs, want) } } return true }) } }
// ExampleMethodSet prints the method sets of various types. func ExampleMethodSet() { // Parse a single source file. const input = ` package temperature import "fmt" type Celsius float64 func (c Celsius) String() string { return fmt.Sprintf("%g°C", c) } func (c *Celsius) SetF(f float64) { *c = Celsius(f - 32 / 9 * 5) } ` fset := token.NewFileSet() f, err := parser.ParseFile(fset, "celsius.go", input, 0) if err != nil { log.Fatal(err) } // Type-check a package consisting of this file. // Type information for the imported packages // comes from $GOROOT/pkg/$GOOS_$GOOARCH/fmt.a. conf := types.Config{Importer: importer.Default([]*ast.File{f})} pkg, err := conf.Check("temperature", fset, []*ast.File{f}, nil) if err != nil { log.Fatal(err) } // Print the method sets of Celsius and *Celsius. celsius := pkg.Scope().Lookup("Celsius").Type() for _, t := range []types.Type{celsius, types.NewPointer(celsius)} { fmt.Printf("Method set of %s:\n", t) mset := types.NewMethodSet(t) for i := 0; i < mset.Len(); i++ { fmt.Println(mset.At(i)) } fmt.Println() } // Output: // Method set of temperature.Celsius: // method (temperature.Celsius) String() string // // Method set of *temperature.Celsius: // method (*temperature.Celsius) SetF(f float64) // method (*temperature.Celsius) String() string }
func TestNode(t *testing.T) { src, err := ioutil.ReadFile(testfile) if err != nil { t.Fatal(err) } fset := token.NewFileSet() file, err := parser.ParseFile(fset, testfile, src, parser.ParseComments) if err != nil { t.Fatal(err) } var buf bytes.Buffer if err = Node(&buf, fset, file); err != nil { t.Fatal("Node failed:", err) } diff(t, buf.Bytes(), src) }
func TestImports(t *testing.T) { fset := token.NewFileSet() for _, test := range importsTests { f, err := parser.ParseFile(fset, "test.go", test.in, 0) if err != nil { t.Errorf("%s: %v", test.name, err) continue } var got [][]string for _, group := range Imports(fset, f) { var b []string for _, spec := range group { b = append(b, unquote(spec.Path.Value)) } got = append(got, b) } if !reflect.DeepEqual(got, test.want) { t.Errorf("Imports(%s)=%v, want %v", test.name, got, test.want) } } }