// TestLineComments, using a simple test case, checks that consecutive line // comments are properly terminated with a newline even if the AST position // information is incorrect. // func TestLineComments(t *testing.T) { const src = `// comment 1 // comment 2 // comment 3 package main ` fset := token.NewFileSet() f, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err != nil { panic(err) // error in test } var buf bytes.Buffer fset = token.NewFileSet() // use the wrong file set Fprint(&buf, fset, f) nlines := 0 for _, ch := range buf.Bytes() { if ch == '\n' { nlines++ } } const expected = 3 if nlines < expected { t.Errorf("got %d, expected %d\n", nlines, expected) t.Errorf("result:\n%s", buf.Bytes()) } }
func TestLeadAndLineComments(t *testing.T) { f, err := ParseFile(token.NewFileSet(), "", ` package p type T struct { /* F1 lead comment */ // F1 int /* F1 */ // line comment // F2 lead // comment F2 int // F2 line comment // f3 lead comment f3 int // f3 line comment } `, ParseComments) if err != nil { t.Fatal(err) } checkFieldComments(t, f, "T.F1", "/* F1 lead comment *///", "/* F1 */// line comment") checkFieldComments(t, f, "T.F2", "// F2 lead// comment", "// F2 line comment") checkFieldComments(t, f, "T.f3", "// f3 lead comment", "// f3 line comment") ast.FileExports(f) checkFieldComments(t, f, "T.F1", "/* F1 lead comment *///", "/* F1 */// line comment") checkFieldComments(t, f, "T.F2", "// F2 lead// comment", "// F2 line comment") if getField(f, "T.f3") != nil { t.Error("not expected to find T.f3") } }
func TestUnresolved(t *testing.T) { f, err := ParseFile(token.NewFileSet(), "", ` package p // func f1a(int) func f2a(byte, int, float) func f3a(a, b int, c float) func f4a(...complex) func f5a(a s1a, b ...complex) // func f1b(*int) func f2b([]byte, (int), *float) func f3b(a, b *int, c []float) func f4b(...*complex) func f5b(a s1a, b ...[]complex) // type s1a struct { int } type s2a struct { byte; int; s1a } type s3a struct { a, b int; c float } // type s1b struct { *int } type s2b struct { byte; int; *float } type s3b struct { a, b *s3b; c []float } `, 0) if err != nil { t.Fatal(err) } want := "int " + // f1a "byte int float " + // f2a "int float " + // f3a "complex " + // f4a "complex " + // f5a // "int " + // f1b "byte int float " + // f2b "int float " + // f3b "complex " + // f4b "complex " + // f5b // "int " + // s1a "byte int " + // s2a "int float " + // s3a // "int " + // s1a "byte int float " + // s2a "float " // s3a // collect unresolved identifiers var buf bytes.Buffer for _, u := range f.Unresolved { buf.WriteString(u.Name) buf.WriteByte(' ') } got := buf.String() if got != want { t.Errorf("\ngot: %s\nwant: %s", got, want) } }
func ExampleScanner_Scan() { // src is the input that we want to tokenize. src := []byte("cos(x) + 1i*sin(x) // Euler") // Initialize the scanner. var s scanner.Scanner fset := token.NewFileSet() // positions are relative to fset file := fset.AddFile("", fset.Base(), len(src)) // register input "file" s.Init(file, src, nil /* no error handler */, scanner.ScanComments) // Repeated calls to Scan yield the token sequence found in the input. for { pos, tok, lit := s.Scan() if tok == token.EOF { break } fmt.Printf("%s\t%s\t%q\n", fset.Position(pos), tok, lit) } // output: // 1:1 IDENT "cos" // 1:4 ( "" // 1:5 IDENT "x" // 1:6 ) "" // 1:8 + "" // 1:10 IMAG "1i" // 1:12 * "" // 1:13 IDENT "sin" // 1:16 ( "" // 1:17 IDENT "x" // 1:18 ) "" // 1:20 ; "\n" // 1:20 COMMENT "// Euler" }
// TestIncompleteSelection ensures that an incomplete selector // expression is parsed as a (blank) *ast.SelectorExpr, not a // *ast.BadExpr. func TestIncompleteSelection(t *testing.T) { for _, src := range []string{ "package p; var _ = fmt.", // at EOF "package p; var _ = fmt.\ntype X int", // not at EOF } { fset := token.NewFileSet() f, err := ParseFile(fset, "", src, 0) if err == nil { t.Errorf("ParseFile(%s) succeeded unexpectedly", src) continue } const wantErr = "expected selector or type assertion" if !strings.Contains(err.Error(), wantErr) { t.Errorf("ParseFile returned wrong error %q, want %q", err, wantErr) } var sel *ast.SelectorExpr ast.Inspect(f, func(n ast.Node) bool { if n, ok := n.(*ast.SelectorExpr); ok { sel = n } return true }) if sel == nil { t.Error("found no *ast.SelectorExpr") continue } const wantSel = "&{fmt _}" if fmt.Sprint(sel) != wantSel { t.Errorf("found selector %s, want %s", sel, wantSel) continue } } }
func TestParse(t *testing.T) { for _, filename := range validFiles { _, err := ParseFile(token.NewFileSet(), filename, nil, DeclarationErrors) if err != nil { t.Fatalf("ParseFile(%s): %v", filename, err) } } }
func BenchmarkParse(b *testing.B) { b.SetBytes(int64(len(src))) for i := 0; i < b.N; i++ { if _, err := ParseFile(token.NewFileSet(), "", src, ParseComments); err != nil { b.Fatalf("benchmark failed due to parse error: %s", err) } } }
func TestImports(t *testing.T) { for path, isValid := range imports { src := fmt.Sprintf("package p; import %s", path) _, err := ParseFile(token.NewFileSet(), "", src, 0) switch { case err != nil && isValid: t.Errorf("ParseFile(%s): got %v; expected no error", src, err) case err == nil && !isValid: t.Errorf("ParseFile(%s): got no error; expected one", src) } } }
func parseFunc(filename, functionname string) (fun *ast.FuncDecl, fset *token.FileSet) { fset = token.NewFileSet() if file, err := parser.ParseFile(fset, filename, nil, 0); err == nil { for _, d := range file.Decls { if f, ok := d.(*ast.FuncDecl); ok && f.Name.Name == functionname { fun = f return } } } panic("function not found") }
func main() { flag.Parse() fset := token.NewFileSet() nheadings := 0 err := filepath.Walk(*root, func(path string, fi os.FileInfo, err error) error { if !fi.IsDir() { return nil } pkgs, err := parser.ParseDir(fset, path, isGoFile, parser.ParseComments) if err != nil { if *verbose { fmt.Fprintln(os.Stderr, err) } return nil } for _, pkg := range pkgs { d := doc.New(pkg, path, doc.Mode(0)) list := appendHeadings(nil, d.Doc) for _, d := range d.Consts { list = appendHeadings(list, d.Doc) } for _, d := range d.Types { list = appendHeadings(list, d.Doc) } for _, d := range d.Vars { list = appendHeadings(list, d.Doc) } for _, d := range d.Funcs { list = appendHeadings(list, d.Doc) } if len(list) > 0 { // directories may contain multiple packages; // print path and package name fmt.Printf("%s (package %s)\n", path, pkg.Name) for _, h := range list { fmt.Printf("\t%s\n", h) } nheadings += len(list) } } return nil }) if err != nil { fmt.Fprintln(os.Stderr, err) os.Exit(1) } fmt.Println(nheadings, "headings found") }
func RewriteTypesWithProtobufStructTags(name string, structTags map[string]map[string]string) error { fset := token.NewFileSet() src, err := ioutil.ReadFile(name) if err != nil { return err } file, err := parser.ParseFile(fset, name, src, parser.DeclarationErrors|parser.ParseComments) if err != nil { return err } allErrs := []error{} // set any new struct tags for _, d := range file.Decls { if errs := updateStructTags(d, structTags, []string{"protobuf"}); len(errs) > 0 { allErrs = append(allErrs, errs...) } } if len(allErrs) > 0 { var s string for _, err := range allErrs { s += err.Error() + "\n" } return errors.New(s) } b := &bytes.Buffer{} if err := printer.Fprint(b, fset, file); err != nil { return err } body, err := format.Source(b.Bytes()) if err != nil { return fmt.Errorf("%s\n---\nunable to format %q: %v", b, name, err) } f, err := os.OpenFile(name, os.O_WRONLY|os.O_TRUNC, 0644) if err != nil { return err } defer f.Close() if _, err := f.Write(body); err != nil { return err } return f.Close() }
func BenchmarkScan(b *testing.B) { b.StopTimer() fset := token.NewFileSet() file := fset.AddFile("", fset.Base(), len(source)) var s Scanner b.StartTimer() for i := 0; i < b.N; i++ { s.Init(file, source, nil, ScanComments) for { _, tok, _ := s.Scan() if tok == token.EOF { break } } } }
func TestCommentGroups(t *testing.T) { f, err := ParseFile(token.NewFileSet(), "", ` package p /* 1a */ /* 1b */ /* 1c */ // 1d /* 2a */ // 2b const pi = 3.1415 /* 3a */ // 3b /* 3c */ const e = 2.7182 // Example from issue 3139 func ExampleCount() { fmt.Println(strings.Count("cheese", "e")) fmt.Println(strings.Count("five", "")) // before & after each rune // Output: // 3 // 5 } `, ParseComments) if err != nil { t.Fatal(err) } expected := [][]string{ {"/* 1a */", "/* 1b */", "/* 1c */", "// 1d"}, {"/* 2a\n*/", "// 2b"}, {"/* 3a */", "// 3b", "/* 3c */"}, {"// Example from issue 3139"}, {"// before & after each rune"}, {"// Output:", "// 3", "// 5"}, } if len(f.Comments) != len(expected) { t.Fatalf("got %d comment groups; expected %d", len(f.Comments), len(expected)) } for i, exp := range expected { got := f.Comments[i].List if len(got) != len(exp) { t.Errorf("got %d comments in group %d; expected %d", len(got), i, len(exp)) continue } for j, exp := range exp { got := got[j].Text if got != exp { t.Errorf("got %q in group %d; expected %q", got, i, exp) } } } }
func TestObjects(t *testing.T) { const src = ` package p import fmt "fmt" const pi = 3.14 type T struct{} var x int func f() { L: } ` f, err := ParseFile(token.NewFileSet(), "", src, 0) if err != nil { t.Fatal(err) } objects := map[string]ast.ObjKind{ "p": ast.Bad, // not in a scope "fmt": ast.Bad, // not resolved yet "pi": ast.Con, "T": ast.Typ, "x": ast.Var, "int": ast.Bad, // not resolved yet "f": ast.Fun, "L": ast.Lbl, } ast.Inspect(f, func(n ast.Node) bool { if ident, ok := n.(*ast.Ident); ok { obj := ident.Obj if obj == nil { if objects[ident.Name] != ast.Bad { t.Errorf("no object for %s", ident.Name) } return true } if obj.Name != ident.Name { t.Errorf("names don't match: obj.Name = %s, ident.Name = %s", obj.Name, ident.Name) } kind := objects[ident.Name] if obj.Kind != kind { t.Errorf("%s: obj.Kind = %s; want %s", ident.Name, obj.Kind, kind) } } return true }) }
// TestIssue9979 verifies that empty statements are contained within their enclosing blocks. func TestIssue9979(t *testing.T) { for _, src := range []string{ "package p; func f() {;}", "package p; func f() {L:}", "package p; func f() {L:;}", "package p; func f() {L:\n}", "package p; func f() {L:\n;}", "package p; func f() { ; }", "package p; func f() { L: }", "package p; func f() { L: ; }", "package p; func f() { L: \n}", "package p; func f() { L: \n; }", } { fset := token.NewFileSet() f, err := ParseFile(fset, "", src, 0) if err != nil { t.Fatal(err) } var pos, end token.Pos ast.Inspect(f, func(x ast.Node) bool { switch s := x.(type) { case *ast.BlockStmt: pos, end = s.Pos()+1, s.End()-1 // exclude "{", "}" case *ast.LabeledStmt: pos, end = s.Pos()+2, s.End() // exclude "L:" case *ast.EmptyStmt: // check containment if s.Pos() < pos || s.End() > end { t.Errorf("%s: %T[%d, %d] not inside [%d, %d]", src, s, s.Pos(), s.End(), pos, end) } // check semicolon offs := fset.Position(s.Pos()).Offset if ch := src[offs]; ch != ';' != s.Implicit { want := "want ';'" if s.Implicit { want = "but ';' is implicit" } t.Errorf("%s: found %q at offset %d; %s", src, ch, offs, want) } } return true }) } }
func RewriteGeneratedGogoProtobufFile(name string, packageName string, typeExistsFn func(string) bool, header []byte) error { fset := token.NewFileSet() src, err := ioutil.ReadFile(name) if err != nil { return err } file, err := parser.ParseFile(fset, name, src, parser.DeclarationErrors|parser.ParseComments) if err != nil { return err } cmap := ast.NewCommentMap(fset, file, file.Comments) // remove types that are already declared decls := []ast.Decl{} for _, d := range file.Decls { if !dropExistingTypeDeclarations(d, typeExistsFn) { decls = append(decls, d) } } file.Decls = decls // remove unmapped comments file.Comments = cmap.Filter(file).Comments() b := &bytes.Buffer{} b.Write(header) if err := printer.Fprint(b, fset, file); err != nil { return err } body, err := format.Source(b.Bytes()) if err != nil { return err } f, err := os.OpenFile(name, os.O_WRONLY|os.O_TRUNC, 0644) if err != nil { return err } defer f.Close() if _, err := f.Write(body); err != nil { return err } return f.Close() }
// New constructs a new builder. func New() *Builder { c := build.Default if c.GOROOT == "" { if p, err := exec.Command("which", "go").CombinedOutput(); err == nil { // The returned string will have some/path/bin/go, so remove the last two elements. c.GOROOT = filepath.Dir(filepath.Dir(strings.Trim(string(p), "\n"))) } else { fmt.Printf("Warning: $GOROOT not set, and unable to run `which go` to find it: %v\n", err) } } return &Builder{ context: &c, buildInfo: map[string]*build.Package{}, fset: token.NewFileSet(), parsed: map[string][]parsedFile{}, userRequested: map[string]bool{}, endLineToCommentGroup: map[fileLine]*ast.CommentGroup{}, importGraph: map[string]map[string]struct{}{}, } }
func TestVarScope(t *testing.T) { f, err := ParseFile(token.NewFileSet(), "", `package p; func f() { var x, y, z = x, y, z }`, 0) if err != nil { t.Fatal(err) } // RHS refers to undefined globals; LHS does not. as := f.Decls[0].(*ast.FuncDecl).Body.List[0].(*ast.DeclStmt).Decl.(*ast.GenDecl).Specs[0].(*ast.ValueSpec) for _, v := range as.Values { id := v.(*ast.Ident) if id.Obj != nil { t.Errorf("rhs %s has Obj, should not", id.Name) } } for _, id := range as.Names { if id.Obj == nil { t.Errorf("lhs %s does not have Obj, should", id.Name) } } }
func TestColonEqualsScope(t *testing.T) { f, err := ParseFile(token.NewFileSet(), "", `package p; func f() { x, y, z := x, y, z }`, 0) if err != nil { t.Fatal(err) } // RHS refers to undefined globals; LHS does not. as := f.Decls[0].(*ast.FuncDecl).Body.List[0].(*ast.AssignStmt) for _, v := range as.Rhs { id := v.(*ast.Ident) if id.Obj != nil { t.Errorf("rhs %s has Obj, should not", id.Name) } } for _, v := range as.Lhs { id := v.(*ast.Ident) if id.Obj == nil { t.Errorf("lhs %s does not have Obj, should", id.Name) } } }
func ExampleParseFile() { fset := token.NewFileSet() // positions are relative to fset // Parse the file containing this very example // but stop after processing the imports. f, err := parser.ParseFile(fset, "example_test.go", nil, parser.ImportsOnly) if err != nil { fmt.Println(err) return } // Print the imports from the file's AST. for _, s := range f.Imports { fmt.Println(s.Path.Value) } // output: // // "fmt" // "k8s.io/kubernetes/third_party/golang/go/parser" // "k8s.io/kubernetes/third_party/golang/go/token" }
func BenchmarkScanFile(b *testing.B) { b.StopTimer() const filename = "scanner.go" src, err := ioutil.ReadFile(filename) if err != nil { panic(err) } fset := token.NewFileSet() file := fset.AddFile(filename, fset.Base(), len(src)) b.SetBytes(int64(len(src))) var s Scanner b.StartTimer() for i := 0; i < b.N; i++ { s.Init(file, src, nil, ScanComments) for { _, tok, _ := s.Scan() if tok == token.EOF { break } } } }
func checkErrors(t *testing.T, filename string, input interface{}) { src, err := readSource(filename, input) if err != nil { t.Error(err) return } fset := token.NewFileSet() _, err = ParseFile(fset, filename, src, DeclarationErrors|AllErrors) found, ok := err.(scanner.ErrorList) if err != nil && !ok { t.Error(err) return } found.RemoveMultiples() // we are expecting the following errors // (collect these after parsing a file so that it is found in the file set) expected := expectedErrors(t, fset, filename, src) // verify errors returned by the parser compareErrors(t, fset, expected, found) }
func TestParseDir(t *testing.T) { path := "." pkgs, err := ParseDir(token.NewFileSet(), path, dirFilter, 0) if err != nil { t.Fatalf("ParseDir(%s): %v", path, err) } if n := len(pkgs); n != 1 { t.Errorf("got %d packages; want 1", n) } pkg := pkgs["parser"] if pkg == nil { t.Errorf(`package "parser" not found`) return } if n := len(pkg.Files); n != 3 { t.Errorf("got %d package files; want 3", n) } for filename := range pkg.Files { if !nameFilter(filename) { t.Errorf("unexpected package file: %s", filename) } } }
func TestCommentMap(t *testing.T) { fset := token.NewFileSet() f, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err != nil { t.Fatal(err) } cmap := NewCommentMap(fset, f, f.Comments) // very correct association of comments for n, list := range cmap { key := fmt.Sprintf("%2d: %T", fset.Position(n.Pos()).Line, n) got := ctext(list) want := res[key] if got != want { t.Errorf("%s: got %q; want %q", key, got, want) } } // verify that no comments got lost if n := len(cmap.Comments()); n != len(f.Comments) { t.Errorf("got %d comment groups in map; want %d", n, len(f.Comments)) } // support code to update test: // set genMap to true to generate res map const genMap = false if genMap { out := make([]string, 0, len(cmap)) for n, list := range cmap { out = append(out, fmt.Sprintf("\t\"%2d: %T\":\t%q,", fset.Position(n.Pos()).Line, n, ctext(list))) } sort.Strings(out) for _, s := range out { fmt.Println(s) } } }
func rewriteFile(name string, header []byte, rewriteFn func(*token.FileSet, *ast.File) error) error { fset := token.NewFileSet() src, err := ioutil.ReadFile(name) if err != nil { return err } file, err := parser.ParseFile(fset, name, src, parser.DeclarationErrors|parser.ParseComments) if err != nil { return err } if err := rewriteFn(fset, file); err != nil { return err } b := &bytes.Buffer{} b.Write(header) if err := printer.Fprint(b, fset, file); err != nil { return err } body, err := format.Source(b.Bytes()) if err != nil { return err } f, err := os.OpenFile(name, os.O_WRONLY|os.O_TRUNC, 0644) if err != nil { return err } defer f.Close() if _, err := f.Write(body); err != nil { return err } return f.Close() }
// Import returns details about the Go package named by the import path, // interpreting local import paths relative to the srcDir directory. // If the path is a local import path naming a package that can be imported // using a standard import path, the returned package will set p.ImportPath // to that path. // // In the directory containing the package, .go, .c, .h, and .s files are // considered part of the package except for: // // - .go files in package documentation // - files starting with _ or . (likely editor temporary files) // - files with build constraints not satisfied by the context // // If an error occurs, Import returns a non-nil error and a non-nil // *Package containing partial information. // func (ctxt *Context) Import(path string, srcDir string, mode ImportMode) (*Package, error) { p := &Package{ ImportPath: path, } if path == "" { return p, fmt.Errorf("import %q: invalid import path", path) } var pkgtargetroot string var pkga string var pkgerr error suffix := "" if ctxt.InstallSuffix != "" { suffix = "_" + ctxt.InstallSuffix } switch ctxt.Compiler { case "gccgo": pkgtargetroot = "pkg/gccgo_" + ctxt.GOOS + "_" + ctxt.GOARCH + suffix dir, elem := pathpkg.Split(p.ImportPath) pkga = pkgtargetroot + "/" + dir + "lib" + elem + ".a" case "gc": pkgtargetroot = "pkg/" + ctxt.GOOS + "_" + ctxt.GOARCH + suffix pkga = pkgtargetroot + "/" + p.ImportPath + ".a" default: // Save error for end of function. pkgerr = fmt.Errorf("import %q: unknown compiler %q", path, ctxt.Compiler) } binaryOnly := false if IsLocalImport(path) { pkga = "" // local imports have no installed path if srcDir == "" { return p, fmt.Errorf("import %q: import relative to unknown directory", path) } if !ctxt.isAbsPath(path) { p.Dir = ctxt.joinPath(srcDir, path) } // Determine canonical import path, if any. // Exclude results where the import path would include /testdata/. inTestdata := func(sub string) bool { return strings.Contains(sub, "/testdata/") || strings.HasSuffix(sub, "/testdata") || strings.HasPrefix(sub, "testdata/") || sub == "testdata" } if ctxt.GOROOT != "" { root := ctxt.joinPath(ctxt.GOROOT, "src") if sub, ok := ctxt.hasSubdir(root, p.Dir); ok && !inTestdata(sub) { p.Goroot = true p.ImportPath = sub p.Root = ctxt.GOROOT goto Found } } all := ctxt.gopath() for i, root := range all { rootsrc := ctxt.joinPath(root, "src") if sub, ok := ctxt.hasSubdir(rootsrc, p.Dir); ok && !inTestdata(sub) { // We found a potential import path for dir, // but check that using it wouldn't find something // else first. if ctxt.GOROOT != "" { if dir := ctxt.joinPath(ctxt.GOROOT, "src", sub); ctxt.isDir(dir) { p.ConflictDir = dir goto Found } } for _, earlyRoot := range all[:i] { if dir := ctxt.joinPath(earlyRoot, "src", sub); ctxt.isDir(dir) { p.ConflictDir = dir goto Found } } // sub would not name some other directory instead of this one. // Record it. p.ImportPath = sub p.Root = root goto Found } } // It's okay that we didn't find a root containing dir. // Keep going with the information we have. } else { if strings.HasPrefix(path, "/") { return p, fmt.Errorf("import %q: cannot import absolute path", path) } // tried records the location of unsuccessful package lookups var tried struct { goroot string gopath []string } // Determine directory from import path. if ctxt.GOROOT != "" { dir := ctxt.joinPath(ctxt.GOROOT, "src", path) isDir := ctxt.isDir(dir) binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(ctxt.GOROOT, pkga)) if isDir || binaryOnly { p.Dir = dir p.Goroot = true p.Root = ctxt.GOROOT goto Found } tried.goroot = dir } for _, root := range ctxt.gopath() { dir := ctxt.joinPath(root, "src", path) isDir := ctxt.isDir(dir) binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(root, pkga)) if isDir || binaryOnly { p.Dir = dir p.Root = root goto Found } tried.gopath = append(tried.gopath, dir) } // package was not found var paths []string if tried.goroot != "" { paths = append(paths, fmt.Sprintf("\t%s (from $GOROOT)", tried.goroot)) } else { paths = append(paths, "\t($GOROOT not set)") } var i int var format = "\t%s (from $GOPATH)" for ; i < len(tried.gopath); i++ { if i > 0 { format = "\t%s" } paths = append(paths, fmt.Sprintf(format, tried.gopath[i])) } if i == 0 { paths = append(paths, "\t($GOPATH not set)") } return p, fmt.Errorf("cannot find package %q in any of:\n%s", path, strings.Join(paths, "\n")) } Found: if p.Root != "" { p.SrcRoot = ctxt.joinPath(p.Root, "src") p.PkgRoot = ctxt.joinPath(p.Root, "pkg") p.BinDir = ctxt.joinPath(p.Root, "bin") if pkga != "" { p.PkgTargetRoot = ctxt.joinPath(p.Root, pkgtargetroot) p.PkgObj = ctxt.joinPath(p.Root, pkga) } } if mode&FindOnly != 0 { return p, pkgerr } if binaryOnly && (mode&AllowBinary) != 0 { return p, pkgerr } dirs, err := ctxt.readDir(p.Dir) if err != nil { return p, err } var Sfiles []string // files with ".S" (capital S) var firstFile, firstCommentFile string imported := make(map[string][]token.Position) testImported := make(map[string][]token.Position) xTestImported := make(map[string][]token.Position) allTags := make(map[string]bool) fset := token.NewFileSet() for _, d := range dirs { if d.IsDir() { continue } name := d.Name() ext := nameExt(name) match, data, filename, err := ctxt.matchFile(p.Dir, name, true, allTags) if err != nil { return p, err } if !match { if ext == ".go" { p.IgnoredGoFiles = append(p.IgnoredGoFiles, name) } continue } // Going to save the file. For non-Go files, can stop here. switch ext { case ".c": p.CFiles = append(p.CFiles, name) continue case ".cc", ".cpp", ".cxx": p.CXXFiles = append(p.CXXFiles, name) continue case ".m": p.MFiles = append(p.MFiles, name) continue case ".h", ".hh", ".hpp", ".hxx": p.HFiles = append(p.HFiles, name) continue case ".s": p.SFiles = append(p.SFiles, name) continue case ".S": Sfiles = append(Sfiles, name) continue case ".swig": p.SwigFiles = append(p.SwigFiles, name) continue case ".swigcxx": p.SwigCXXFiles = append(p.SwigCXXFiles, name) continue case ".syso": // binary objects to add to package archive // Likely of the form foo_windows.syso, but // the name was vetted above with goodOSArchFile. p.SysoFiles = append(p.SysoFiles, name) continue } pf, err := parser.ParseFile(fset, filename, data, parser.ImportsOnly|parser.ParseComments) if err != nil { return p, err } pkg := pf.Name.Name if pkg == "documentation" { p.IgnoredGoFiles = append(p.IgnoredGoFiles, name) continue } isTest := strings.HasSuffix(name, "_test.go") isXTest := false if isTest && strings.HasSuffix(pkg, "_test") { isXTest = true pkg = pkg[:len(pkg)-len("_test")] } if p.Name == "" { p.Name = pkg firstFile = name } else if pkg != p.Name { return p, &MultiplePackageError{ Dir: p.Dir, Packages: []string{p.Name, pkg}, Files: []string{firstFile, name}, } } if pf.Doc != nil && p.Doc == "" { p.Doc = doc.Synopsis(pf.Doc.Text()) } if mode&ImportComment != 0 { qcom, line := findImportComment(data) if line != 0 { com, err := strconv.Unquote(qcom) if err != nil { return p, fmt.Errorf("%s:%d: cannot parse import comment", filename, line) } if p.ImportComment == "" { p.ImportComment = com firstCommentFile = name } else if p.ImportComment != com { return p, fmt.Errorf("found import comments %q (%s) and %q (%s) in %s", p.ImportComment, firstCommentFile, com, name, p.Dir) } } } // Record imports and information about cgo. isCgo := false for _, decl := range pf.Decls { d, ok := decl.(*ast.GenDecl) if !ok { continue } for _, dspec := range d.Specs { spec, ok := dspec.(*ast.ImportSpec) if !ok { continue } quoted := spec.Path.Value path, err := strconv.Unquote(quoted) if err != nil { log.Panicf("%s: parser returned invalid quoted string: <%s>", filename, quoted) } if isXTest { xTestImported[path] = append(xTestImported[path], fset.Position(spec.Pos())) } else if isTest { testImported[path] = append(testImported[path], fset.Position(spec.Pos())) } else { imported[path] = append(imported[path], fset.Position(spec.Pos())) } if path == "C" { if isTest { return p, fmt.Errorf("use of cgo in test %s not supported", filename) } cg := spec.Doc if cg == nil && len(d.Specs) == 1 { cg = d.Doc } if cg != nil { if err := ctxt.saveCgo(filename, p, cg); err != nil { return p, err } } isCgo = true } } } if isCgo { allTags["cgo"] = true if ctxt.CgoEnabled { p.CgoFiles = append(p.CgoFiles, name) } else { p.IgnoredGoFiles = append(p.IgnoredGoFiles, name) } } else if isXTest { p.XTestGoFiles = append(p.XTestGoFiles, name) } else if isTest { p.TestGoFiles = append(p.TestGoFiles, name) } else { p.GoFiles = append(p.GoFiles, name) } } if len(p.GoFiles)+len(p.CgoFiles)+len(p.TestGoFiles)+len(p.XTestGoFiles) == 0 { return p, &NoGoError{p.Dir} } for tag := range allTags { p.AllTags = append(p.AllTags, tag) } sort.Strings(p.AllTags) p.Imports, p.ImportPos = cleanImports(imported) p.TestImports, p.TestImportPos = cleanImports(testImported) p.XTestImports, p.XTestImportPos = cleanImports(xTestImported) // add the .S files only if we are using cgo // (which means gcc will compile them). // The standard assemblers expect .s files. if len(p.CgoFiles) > 0 { p.SFiles = append(p.SFiles, Sfiles...) sort.Strings(p.SFiles) } return p, pkgerr }
func test(t *testing.T, mode Mode) { // determine file filter filter := isGoFile if *files != "" { rx, err := regexp.Compile(*files) if err != nil { t.Fatal(err) } filter = func(fi os.FileInfo) bool { return isGoFile(fi) && rx.MatchString(fi.Name()) } } // get packages fset := token.NewFileSet() pkgs, err := parser.ParseDir(fset, dataDir, filter, parser.ParseComments) if err != nil { t.Fatal(err) } // test packages for _, pkg := range pkgs { importpath := dataDir + "/" + pkg.Name doc := New(pkg, importpath, mode) // golden files always use / in filenames - canonicalize them for i, filename := range doc.Filenames { doc.Filenames[i] = filepath.ToSlash(filename) } // print documentation var buf bytes.Buffer if err := templateTxt.Execute(&buf, bundle{doc, fset}); err != nil { t.Error(err) continue } got := buf.Bytes() // update golden file if necessary golden := filepath.Join(dataDir, fmt.Sprintf("%s.%d.golden", pkg.Name, mode)) if *update { err := ioutil.WriteFile(golden, got, 0644) if err != nil { t.Error(err) } continue } // get golden file want, err := ioutil.ReadFile(golden) if err != nil { t.Error(err) continue } // compare if !bytes.Equal(got, want) { t.Errorf("package %s\n\tgot:\n%s\n\twant:\n%s", pkg.Name, got, want) } } }
// Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package scanner import ( "io/ioutil" "os" "path/filepath" "runtime" "testing" "k8s.io/kubernetes/third_party/golang/go/token" ) var fset = token.NewFileSet() const /* class */ ( special = iota literal operator keyword ) func tokenclass(tok token.Token) int { switch { case tok.IsLiteral(): return literal case tok.IsOperator(): return operator case tok.IsKeyword():
// ParseExpr is a convenience function for obtaining the AST of an expression x. // The position information recorded in the AST is undefined. The filename used // in error messages is the empty string. // func ParseExpr(x string) (ast.Expr, error) { return ParseExprFrom(token.NewFileSet(), "", []byte(x), 0) }