// File parses a file at the relative path // provided and produces a new *FileSet. // If you pass in a path to a directory, the entire // directory will be parsed. // If unexport is false, only exported identifiers are included in the FileSet. // If the resulting FileSet would be empty, an error is returned. func File(name string, unexported bool) (*FileSet, error) { pushstate(name) defer popstate() fs := &FileSet{ Specs: make(map[string]ast.Expr), Identities: make(map[string]gen.Elem), } fset := token.NewFileSet() finfo, err := os.Stat(name) if err != nil { return nil, err } if finfo.IsDir() { pkgs, err := parser.ParseDir(fset, name, nil, parser.ParseComments) if err != nil { return nil, err } if len(pkgs) != 1 { return nil, fmt.Errorf("multiple packages in directory: %s", name) } var one *ast.Package for _, nm := range pkgs { one = nm break } fs.Package = one.Name for _, fl := range one.Files { pushstate(fl.Name.Name) fs.Directives = append(fs.Directives, yieldComments(fl.Comments)...) if !unexported { ast.FileExports(fl) } fs.getTypeSpecs(fl) popstate() } } else { f, err := parser.ParseFile(fset, name, nil, parser.ParseComments) if err != nil { return nil, err } fs.Package = f.Name.Name fs.Directives = yieldComments(f.Comments) if !unexported { ast.FileExports(f) } fs.getTypeSpecs(f) } if len(fs.Specs) == 0 { return nil, fmt.Errorf("no definitions in %s", name) } fs.process() fs.applyDirectives() fs.propInline() return fs, nil }
// packageExports is a local implementation of ast.PackageExports // which correctly updates each package file's comment list. // (The ast.PackageExports signature is frozen, hence the local // implementation). // func packageExports(fset *token.FileSet, pkg *ast.Package) { for _, src := range pkg.Files { cmap := ast.NewCommentMap(fset, src, src.Comments) ast.FileExports(src) src.Comments = cmap.Filter(src).Comments() } }
func TestLeadAndLineComments(t *testing.T) { f, err := StdParseFile(fset, "", ` package p type T struct { /* F1 lead comment */ // F1 int /* F1 */ // line comment // F2 lead // comment F2 int // F2 line comment // f3 lead comment f3 int // f3 line comment } `, ParseComments) if err != nil { t.Fatal(err) } checkFieldComments(t, f, "T.F1", "/* F1 lead comment *///", "/* F1 */// line comment") checkFieldComments(t, f, "T.F2", "// F2 lead// comment", "// F2 line comment") checkFieldComments(t, f, "T.f3", "// f3 lead comment", "// f3 line comment") ast.FileExports(f) checkFieldComments(t, f, "T.F1", "/* F1 lead comment *///", "/* F1 */// line comment") checkFieldComments(t, f, "T.F2", "// F2 lead// comment", "// F2 line comment") if getField(f, "T.f3") != nil { t.Error("not expected to find T.f3") } }
func writeImport(w io.Writer, pkg *ast.Package, pkgPath string) error { var files []*ast.File for _, f := range pkg.Files { if ast.FileExports(f) { files = append(files, f) } } env := eval.MakeSimpleEnv() env.Path = pkgPath for _, f := range files { for _, d := range f.Decls { if gen, ok := d.(*ast.GenDecl); ok { for _, s := range gen.Specs { if v, ok := s.(*ast.ValueSpec); ok { for _, i := range v.Names { if gen.Tok == token.VAR { env.Vars[i.Name] = reflect.Value{} } else { env.Consts[i.Name] = reflect.Value{} } } } else if t, ok := s.(*ast.TypeSpec); ok { env.Types[t.Name.Name] = nil } } } else if fun, ok := d.(*ast.FuncDecl); ok && fun.Recv == nil { env.Funcs[fun.Name.Name] = reflect.Value{} } } } return writePkg(w, env, pkg.Name) }
// format parses src, prints the corresponding AST, verifies the resulting // src is syntactically correct, and returns the resulting src or an error // if any. func format(src []byte, mode checkMode) ([]byte, error) { // parse src f, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err != nil { return nil, fmt.Errorf("parse: %s\n%s", err, src) } // filter exports if necessary if mode&export != 0 { ast.FileExports(f) // ignore result f.Comments = nil // don't print comments that are not in AST } // determine printer configuration cfg := Config{Tabwidth: tabwidth} if mode&rawFormat != 0 { cfg.Mode |= RawFormat } // print AST var buf bytes.Buffer if err := cfg.Fprint(&buf, fset, f); err != nil { return nil, fmt.Errorf("print: %s", err) } // make sure formatted output is syntactically correct res := buf.Bytes() if _, err := parser.ParseFile(fset, "", res, 0); err != nil { return nil, fmt.Errorf("re-parse: %s\n%s", err, buf.Bytes()) } return res, nil }
func getExportedDeclarationsForFile(path string) ([]string, error) { fset := token.NewFileSet() tree, err := parser.ParseFile(fset, path, nil, 0) if err != nil { return []string{}, err } declarations := []string{} ast.FileExports(tree) for _, decl := range tree.Decls { switch x := decl.(type) { case *ast.GenDecl: switch s := x.Specs[0].(type) { case *ast.ValueSpec: declarations = append(declarations, s.Names[0].Name) } case *ast.FuncDecl: declarations = append(declarations, x.Name.Name) } } return declarations, nil }
func Prune(src []byte) ([]byte, error) { fset := token.NewFileSet() file, err := parser.ParseFile(fset, "", src, parser.ParseComments) if err != nil { return nil, err } ast.FileExports(file) pruner := NewPruner() ast.Walk(pruner, file) file.Comments = nil buf := bytes.Buffer{} err = printer.Fprint(&buf, fset, file) if err != nil { return nil, err } pruned, err := ioutil.ReadAll(&buf) return pruned, err }
func check(t *testing.T, source, golden string, mode checkMode) { // parse source prog, err := parser.ParseFile(source, nil, parser.ParseComments) if err != nil { t.Error(err) return } // filter exports if necessary if mode&export != 0 { ast.FileExports(prog) // ignore result prog.Comments = nil // don't print comments that are not in AST } // determine printer configuration cfg := Config{Tabwidth: tabwidth} if mode&rawFormat != 0 { cfg.Mode |= RawFormat } // format source var buf bytes.Buffer if _, err := cfg.Fprint(&buf, prog); err != nil { t.Error(err) } res := buf.Bytes() // update golden files if necessary if *update { if err := io.WriteFile(golden, res, 0644); err != nil { t.Error(err) } return } // get golden gld, err := io.ReadFile(golden) if err != nil { t.Error(err) return } // compare lengths if len(res) != len(gld) { t.Errorf("len = %d, expected %d (= len(%s))", len(res), len(gld), golden) } // compare contents for i, line, offs := 0, 1, 0; i < len(res) && i < len(gld); i++ { ch := res[i] if ch != gld[i] { t.Errorf("%s:%d:%d: %s", source, line, i-offs+1, lineString(res, offs)) t.Errorf("%s:%d:%d: %s", golden, line, i-offs+1, lineString(gld, offs)) t.Error() return } if ch == '\n' { line++ offs = i + 1 } } }
func (sd *SchemaDSL) Parse() (err error) { fset := token.NewFileSet() f, err := parser.ParseFile(fset, sd.config.SchemaFile, nil, parser.ParseComments) if err != nil { return err } ast.FileExports(f) SCHEMA_PARSE_LOOP: for _, decl := range f.Decls { table := "" fields := []*ast.Field{} if genDecl, ok := decl.(*ast.GenDecl); ok { for _, spec := range genDecl.Specs { if typeSpec, ok := spec.(*ast.TypeSpec); ok { table = snaker.CamelToSnake(typeSpec.Name.Name) if structType, ok := typeSpec.Type.(*ast.StructType); ok { fields = structType.Fields.List } } } if genDecl.Doc != nil { for _, doc := range genDecl.Doc.List { if strings.HasPrefix(doc.Text, COMMENT_IGNORE_PREFIX) { continue SCHEMA_PARSE_LOOP } if strings.HasPrefix(doc.Text, COMMENT_TABLE_PREFIX) { table = strings.TrimSpace(strings.TrimPrefix(doc.Text, COMMENT_TABLE_PREFIX)) } } } } sd.Tables = append(sd.Tables, &Table{Name: table, AstFields: fields}) } for _, table := range sd.Tables { var primaryKeys []string for _, field := range table.AstFields { if field.Tag == nil { continue } var typeName string tagMap := parseTag(field.Tag.Value) if t, ok := field.Type.(*ast.Ident); ok { typeName = t.Name } if t, ok := field.Type.(*ast.SelectorExpr); ok { x := t.X.(*ast.Ident).Name sel := t.Sel.Name typeName = fmt.Sprintf("%s.%s", x, sel) } columns, err := sd.Dialect.ConvertSql(typeName, tagMap) if err != nil { return err } table.Fields = append(table.Fields, &Field{ Name: tagMap["sql"], Attribute: strings.Join(columns, " "), }) if _, ok := tagMap["primary"]; ok { primaryKeys = append(primaryKeys, tagMap["sql"]) } if _, ok := tagMap["uniq"]; ok { name := tagMap["uniq"] if name == "" { name = fmt.Sprintf("`%s_%s`", table.Name, strings.Replace(tagMap["sql"], "`", "", -1)) } table.ParseIndex(&table.UniqueIndexes, name, tagMap["sql"]) } if _, ok := tagMap["index"]; ok { table.ParseIndex(&table.Indexes, tagMap["index"], tagMap["sql"]) } } table.PrimaryKey = strings.Join(primaryKeys, ", ") } return nil }