func (c *converter) convertScope(dst *types.Scope, src *gotypes.Scope) { for _, name := range src.Names() { obj := src.Lookup(name) dst.Insert(c.convertObject(obj)) } for i := 0; i < src.NumChildren(); i++ { child := src.Child(i) newScope := types.NewScope(dst, token.Pos(child.Pos()), token.Pos(child.End()), "") c.convertScope(newScope, child) } }
func (c *converter) convertTypeName(v *gotypes.TypeName) *types.TypeName { if v == nil { return nil } if v, ok := c.converted[v]; ok { return v.(*types.TypeName) } // This part is a bit tricky. gcimport calls NewTypeName with a nil typ // argument, and then calls NewNamed on the resulting *TypeName, which // sets its typ to a *Named referring to itself. So if we get a *TypeName // whose Type() is a *Named whose Obj() is the same *TypeName, we know it // was constructed this way, so we do the same. Otherwise we get into a // infinite recursion converting the *TypeName's type. var typ types.Type if named, ok := v.Type().(*gotypes.Named); !ok || named.Obj() != v { typ = c.convertType(v.Type()) } ret := types.NewTypeName( token.Pos(v.Pos()), c.convertPackage(v.Pkg()), v.Name(), typ, ) c.converted[v] = ret named := types.NewNamed(ret, c.convertType(v.Type().Underlying()), nil) c.converted[v.Type()] = named return ret }
// errMap collects the regular expressions of ERROR comments found // in files and returns them as a map of error positions to error messages. // func errMap(t *testing.T, testname string, files []*ast.File) map[string][]string { // map of position strings to lists of error message patterns errmap := make(map[string][]string) for _, file := range files { filename := fset.Position(file.Package).Filename src, err := ioutil.ReadFile(filename) if err != nil { t.Fatalf("%s: could not read %s", testname, filename) } var s scanner.Scanner s.Init(fset.AddFile(filename, -1, len(src)), src, nil, scanner.ScanComments) var prev token.Pos // position of last non-comment, non-semicolon token var here token.Pos // position immediately after the token at position prev scanFile: for { pos, tok, lit := s.Scan() switch tok { case token.EOF: break scanFile case token.COMMENT: if lit[1] == '*' { lit = lit[:len(lit)-2] // strip trailing */ } if s := errRx.FindStringSubmatch(lit[2:]); len(s) == 3 { pos := prev if s[1] == "HERE" { pos = here } p := fset.Position(pos).String() // TODO: Use also column once we don't mess them up anymore. p = p[:strings.LastIndex(p, ":")] errmap[p] = append(errmap[p], strings.TrimSpace(s[2])) } case token.SEMICOLON: // ignore automatically inserted semicolon if lit == "\n" { continue scanFile } fallthrough default: prev = pos var l int // token length if tok.IsLiteral() { l = len(lit) } else { l = len(tok.String()) } here = prev + token.Pos(l) } } } return errmap }
func (c *converter) convertPkgName(v *gotypes.PkgName) *types.PkgName { if v == nil { return nil } if v, ok := c.converted[v]; ok { return v.(*types.PkgName) } ret := types.NewPkgName( token.Pos(v.Pos()), c.ret, v.Name(), c.convertPackage(v.Imported()), ) c.converted[v] = ret return ret }
func (c *converter) convertVar(v *gotypes.Var) *types.Var { if v == nil { return nil } if v, ok := c.converted[v]; ok { return v.(*types.Var) } ret := types.NewVar( token.Pos(v.Pos()), c.ret, v.Name(), c.convertType(v.Type()), ) c.converted[v] = ret return ret }
func (c *converter) convertFunc(v *gotypes.Func) *types.Func { if v == nil { return nil } if v, ok := c.converted[v]; ok { return v.(*types.Func) } ret := types.NewFunc( token.Pos(v.Pos()), c.ret, v.Name(), c.convertSignature(v.Type().(*gotypes.Signature)), ) c.converted[v] = ret return ret }
func (c *converter) convertConst(v *gotypes.Const) *types.Const { if v == nil { return nil } if v, ok := c.converted[v]; ok { return v.(*types.Const) } ret := types.NewConst( token.Pos(v.Pos()), c.ret, v.Name(), c.convertType(v.Type()), c.convertConstantValue(v.Val()), ) c.converted[v] = ret return ret }
// expectedErrors collects the regular expressions of ERROR comments found // in files and returns them as a map of error positions to error messages. // func expectedErrors(t *testing.T, fset *token.FileSet, filename string, src []byte) map[token.Pos]string { errors := make(map[token.Pos]string) var s scanner.Scanner // file was parsed already - do not add it again to the file // set otherwise the position information returned here will // not match the position information collected by the parser s.Init(getFile(fset, filename), src, nil, scanner.ScanComments) var prev token.Pos // position of last non-comment, non-semicolon token var here token.Pos // position immediately after the token at position prev for { pos, tok, lit := s.Scan() switch tok { case token.EOF: return errors case token.COMMENT: s := errRx.FindStringSubmatch(lit) if len(s) == 3 { pos := prev if s[1] == "HERE" { pos = here } errors[pos] = string(s[2]) } default: prev = pos var l int // token length if tok.IsLiteral() { l = len(lit) } else { l = len(tok.String()) } here = prev + token.Pos(l) } } }
// collectObjects collects all file and package objects and inserts them // into their respective scopes. It also performs imports and associates // methods with receiver base type names. func (check *Checker) collectObjects() { pkg := check.pkg // pkgImports is the set of packages already imported by any package file seen // so far. Used to avoid duplicate entries in pkg.imports. Allocate and populate // it (pkg.imports may not be empty if we are checking test files incrementally). var pkgImports = make(map[*Package]bool) for _, imp := range pkg.imports { pkgImports[imp] = true } // srcDir is the directory used by the Importer to look up packages. // The typechecker itself doesn't need this information so it is not // explicitly provided. Instead, we extract it from position info of // the source files as needed. // This is the only place where the type-checker (just the importer) // needs to know the actual source location of a file. // TODO(gri) can we come up with a better API instead? var srcDir string if len(check.files) > 0 { // FileName may be "" (typically for tests) in which case // we get "." as the srcDir which is what we would want. srcDir = dir(check.fset.Position(check.files[0].Name.Pos()).Filename) } for fileNo, file := range check.files { // The package identifier denotes the current package, // but there is no corresponding package object. check.recordDef(file.Name, nil) // Use the actual source file extent rather than *ast.File extent since the // latter doesn't include comments which appear at the start or end of the file. // Be conservative and use the *ast.File extent if we don't have a *token.File. pos, end := file.Pos(), file.End() if f := check.fset.File(file.Pos()); f != nil { pos, end = token.Pos(f.Base()), token.Pos(f.Base()+f.Size()) } fileScope := NewScope(check.pkg.scope, pos, end, check.filename(fileNo)) check.recordScope(file, fileScope) for _, decl := range file.Decls { switch d := decl.(type) { case *ast.BadDecl: // ignore case *ast.GenDecl: var last *ast.ValueSpec // last ValueSpec with type or init exprs seen for iota, spec := range d.Specs { switch s := spec.(type) { case *ast.ImportSpec: // import package var imp *Package path, err := validatedImportPath(s.Path.Value) if err != nil { check.errorf(s.Path.Pos(), "invalid import path (%s)", err) continue } if path == "C" && check.conf.FakeImportC { // TODO(gri) shouldn't create a new one each time imp = NewPackage("C", "C") imp.fake = true } else { // ordinary import if importer := check.conf.Importer; importer == nil { err = fmt.Errorf("Config.Importer not installed") } else if importerFrom, ok := importer.(ImporterFrom); ok { imp, err = importerFrom.ImportFrom(path, srcDir, 0) if imp == nil && err == nil { err = fmt.Errorf("Config.Importer.ImportFrom(%s, %s, 0) returned nil but no error", path, pkg.path) } } else { imp, err = importer.Import(path) if imp == nil && err == nil { err = fmt.Errorf("Config.Importer.Import(%s) returned nil but no error", path) } } if err != nil { check.errorf(s.Path.Pos(), "could not import %s (%s)", path, err) continue } } // add package to list of explicit imports // (this functionality is provided as a convenience // for clients; it is not needed for type-checking) if !pkgImports[imp] { pkgImports[imp] = true if imp != Unsafe { pkg.imports = append(pkg.imports, imp) } } // local name overrides imported package name name := imp.name if s.Name != nil { name = s.Name.Name if path == "C" { // match cmd/compile (not prescribed by spec) check.errorf(s.Name.Pos(), `cannot rename import "C"`) continue } if name == "init" { check.errorf(s.Name.Pos(), "cannot declare init - must be func") continue } } obj := NewPkgName(s.Pos(), pkg, name, imp) if s.Name != nil { // in a dot-import, the dot represents the package check.recordDef(s.Name, obj) } else { check.recordImplicit(s, obj) } if path == "C" { // match cmd/compile (not prescribed by spec) obj.used = true } // add import to file scope if name == "." { // merge imported scope with file scope for _, obj := range imp.scope.elems { // A package scope may contain non-exported objects, // do not import them! if obj.Exported() { // TODO(gri) When we import a package, we create // a new local package object. We should do the // same for each dot-imported object. That way // they can have correct position information. // (We must not modify their existing position // information because the same package - found // via Config.Packages - may be dot-imported in // another package!) check.declare(fileScope, nil, obj, token.NoPos) check.recordImplicit(s, obj) } } // add position to set of dot-import positions for this file // (this is only needed for "imported but not used" errors) check.addUnusedDotImport(fileScope, imp, s.Pos()) } else { // declare imported package object in file scope check.declare(fileScope, nil, obj, token.NoPos) } case *ast.ValueSpec: switch d.Tok { case token.CONST: // determine which initialization expressions to use switch { case s.Type != nil || len(s.Values.List) > 0: last = s case last == nil: last = new(ast.ValueSpec) // make sure last exists } // declare all constants for i, name := range s.Names.List { obj := NewConst(name.Pos(), pkg, name.Name, nil, constant.MakeInt64(int64(iota))) var init ast.Expr if last.Values != nil && i < len(last.Values.List) { init = last.Values.List[i] } d := &declInfo{file: fileScope, typ: last.Type, init: init} check.declarePkgObj(name, obj, d) } check.arityMatch(s, last) case token.VAR: lhsLen := len(s.Names.List) if s.Names.EntangledPos > 0 { lhsLen-- } lhs := make([]*Var, lhsLen) // If there's exactly one rhs initializer, use // the same declInfo d1 for all lhs variables // so that each lhs variable depends on the same // rhs initializer (n:1 var declaration). var d1 *declInfo if len(s.Values.List) == 1 { // The lhs elements are only set up after the for loop below, // but that's ok because declareVar only collects the declInfo // for a later phase. d1 = &declInfo{file: fileScope, lhs: lhs, typ: s.Type, init: s.Values.List[0]} } // declare all variables for i, name := range s.Names.List { obj := NewVar(name.Pos(), pkg, name.Name, nil) d := d1 if d == nil { // individual assignments var init ast.Expr if i < len(s.Values.List) { init = s.Values.List[i] } d = &declInfo{file: fileScope, typ: s.Type, init: init} } if s.Names.EntangledPos > 0 && i == s.Names.EntangledPos-1 { d.entangledLhs = obj } else { lhs[i] = obj } check.declarePkgObj(name, obj, d) } check.arityMatch(s, nil) default: check.invalidAST(s.Pos(), "invalid token %s", d.Tok) } case *ast.TypeSpec: obj := NewTypeName(s.Name.Pos(), pkg, s.Name.Name, nil) check.declarePkgObj(s.Name, obj, &declInfo{file: fileScope, typ: s.Type}) default: check.invalidAST(s.Pos(), "unknown ast.Spec node %T", s) } } case *ast.FuncDecl: name := d.Name.Name obj := NewFunc(d.Name.Pos(), pkg, name, nil) if d.Recv == nil { // regular function if name == "init" { // don't declare init functions in the package scope - they are invisible obj.parent = pkg.scope check.recordDef(d.Name, obj) // init functions must have a body if d.Body == nil { check.softErrorf(obj.pos, "missing function body") } } else { check.declare(pkg.scope, d.Name, obj, token.NoPos) } } else { // method check.recordDef(d.Name, obj) // Associate method with receiver base type name, if possible. // Ignore methods that have an invalid receiver, or a blank _ // receiver name. They will be type-checked later, with regular // functions. if list := d.Recv.List; len(list) > 0 { typ := list[0].Type if opt, _ := typ.(*ast.OptionalType); opt != nil { typ = opt.Elt } if ptr, _ := typ.(*ast.StarExpr); ptr != nil { typ = ptr.X } if base, _ := typ.(*ast.Ident); base != nil && base.Name != "_" { check.assocMethod(base.Name, obj) } } } info := &declInfo{file: fileScope, fdecl: d} check.objMap[obj] = info obj.setOrder(uint32(len(check.objMap))) default: check.invalidAST(d.Pos(), "unknown ast.Decl node %T", d) } } } // verify that objects in package and file scopes have different names for _, scope := range check.pkg.scope.children /* file scopes */ { for _, obj := range scope.elems { if alt := pkg.scope.Lookup(obj.Name()); alt != nil { if pkg, ok := obj.(*PkgName); ok { check.errorf(alt.Pos(), "%s already declared through import of %s", alt.Name(), pkg.Imported()) check.reportAltDecl(pkg) } else { check.errorf(alt.Pos(), "%s already declared through dot-import of %s", alt.Name(), obj.Pkg()) // TODO(gri) dot-imported objects don't have a position; reportAltDecl won't print anything check.reportAltDecl(obj) } } } } }
func (s *BranchStmt) End() token.Pos { if s.Label != nil { return s.Label.End() } return token.Pos(int(s.TokPos) + len(s.Tok.String())) }
func (x *BasicLit) End() token.Pos { return token.Pos(int(x.ValuePos) + len(x.Value)) }
func (x *Ident) End() token.Pos { return token.Pos(int(x.NamePos) + len(x.Name)) }
func (c *Comment) End() token.Pos { return token.Pos(int(c.Slash) + len(c.Text)) }