func (d *DIBuilder) descriptorInterface(t *types.Interface, name string) llvm.Metadata { ifaceStruct := types.NewStruct([]*types.Var{ types.NewVar(0, nil, "type", types.NewPointer(types.Typ[types.Uint8])), types.NewVar(0, nil, "data", types.NewPointer(types.Typ[types.Uint8])), }, nil) return d.typeDebugDescriptor(ifaceStruct, name) }
// emitNew emits to f a new (heap Alloc) instruction allocating an // object of type typ. pos is the optional source location. // func emitNew(f *Function, typ types.Type, pos token.Pos) *Alloc { v := &Alloc{Heap: true} v.setType(types.NewPointer(typ)) v.setPos(pos) f.emit(v) return v }
// emitFieldSelection emits to f code to select the index'th field of v. // // If wantAddr, the input must be a pointer-to-struct and the result // will be the field's address; otherwise the result will be the // field's value. // Ident id is used for position and debug info. // func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value { fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) if isPointer(v.Type()) { instr := &FieldAddr{ X: v, Field: index, } instr.setPos(id.Pos()) instr.setType(types.NewPointer(fld.Type())) v = f.emit(instr) // Load the field's value iff we don't want its address. if !wantAddr { v = emitLoad(f, v) } } else { instr := &Field{ X: v, Field: index, } instr.setPos(id.Pos()) instr.setType(fld.Type()) v = f.emit(instr) } emitDebugRef(f, id, v, wantAddr) return v }
// emitImplicitSelections emits to f code to apply the sequence of // implicit field selections specified by indices to base value v, and // returns the selected value. // // If v is the address of a struct, the result will be the address of // a field; if it is the value of a struct, the result will be the // value of a field. // func emitImplicitSelections(f *Function, v Value, indices []int) Value { for _, index := range indices { fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) if isPointer(v.Type()) { instr := &FieldAddr{ X: v, Field: index, } instr.setType(types.NewPointer(fld.Type())) v = f.emit(instr) // Load the field's value iff indirectly embedded. if isPointer(fld.Type()) { v = emitLoad(f, v) } } else { instr := &Field{ X: v, Field: index, } instr.setType(fld.Type()) v = f.emit(instr) } } return v }
// PointerType = "*" ("any" | Type) . func (p *parser) parsePointerType(pkg *types.Package) types.Type { p.expect('*') if p.tok == scanner.Ident { p.expectKeyword("any") return types.Typ[types.UnsafePointer] } return types.NewPointer(p.parseType(pkg)) }
func (d *DIBuilder) descriptorSlice(t *types.Slice, name string) llvm.Metadata { sliceStruct := types.NewStruct([]*types.Var{ types.NewVar(0, nil, "ptr", types.NewPointer(t.Elem())), types.NewVar(0, nil, "len", types.Typ[types.Int]), types.NewVar(0, nil, "cap", types.Typ[types.Int]), }, nil) return d.typeDebugDescriptor(sliceStruct, name) }
// addLocal creates an anonymous local variable of type typ, adds it // to function f and returns it. pos is the optional source location. // func (f *Function) addLocal(typ types.Type, pos token.Pos) *Alloc { v := &Alloc{} v.setType(types.NewPointer(typ)) v.setPos(pos) f.Locals = append(f.Locals, v) f.emit(v) return v }
// memberFromObject populates package pkg with a member for the // typechecker object obj. // // For objects from Go source code, syntax is the associated syntax // tree (for funcs and vars only); it will be used during the build // phase. // func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) { name := obj.Name() switch obj := obj.(type) { case *types.TypeName: pkg.Members[name] = &Type{ object: obj, pkg: pkg, } case *types.Const: c := &NamedConst{ object: obj, Value: NewConst(obj.Val(), obj.Type()), pkg: pkg, } pkg.values[obj] = c.Value pkg.Members[name] = c case *types.Var: g := &Global{ Pkg: pkg, name: name, object: obj, typ: types.NewPointer(obj.Type()), // address pos: obj.Pos(), } pkg.values[obj] = g pkg.Members[name] = g case *types.Func: sig := obj.Type().(*types.Signature) if sig.Recv() == nil && name == "init" { pkg.ninit++ name = fmt.Sprintf("init#%d", pkg.ninit) } fn := &Function{ name: name, object: obj, Signature: sig, syntax: syntax, pos: obj.Pos(), Pkg: pkg, Prog: pkg.Prog, } if syntax == nil { fn.Synthetic = "loaded from gc object file" } pkg.values[obj] = fn if sig.Recv() == nil { pkg.Members[name] = fn // package-level function } default: // (incl. *types.Package) panic("unexpected Object type: " + obj.String()) } }
// mapIterInit creates a map iterator func (fr *frame) mapIterInit(m *govalue) []*govalue { // We represent an iterator as a tuple (map, *bool). The second element // controls whether the code we generate for "next" (below) calls the // runtime function for the first or the next element. We let the // optimizer reorganize this into something more sensible. isinit := fr.allocaBuilder.CreateAlloca(llvm.Int1Type(), "") fr.builder.CreateStore(llvm.ConstNull(llvm.Int1Type()), isinit) return []*govalue{m, newValue(isinit, types.NewPointer(types.Typ[types.Bool]))} }
// addSpilledParam declares a parameter that is pre-spilled to the // stack; the function body will load/store the spilled location. // Subsequent lifting will eliminate spills where possible. // func (f *Function) addSpilledParam(obj types.Object) { param := f.addParamObj(obj) spill := &Alloc{Comment: obj.Name()} spill.setType(types.NewPointer(obj.Type())) spill.setPos(obj.Pos()) f.objects[obj] = spill f.Locals = append(f.Locals, spill) f.emit(spill) f.emit(&Store{Addr: spill, Val: param}) }
// testMainSlice emits to fn code to construct a slice of type slice // (one of []testing.Internal{Test,Benchmark,Example}) for all // functions in testfuncs. It returns the slice value. // func testMainSlice(fn *Function, testfuncs []*Function, slice types.Type) Value { if testfuncs == nil { return nilConst(slice) } tElem := slice.(*types.Slice).Elem() tPtrString := types.NewPointer(tString) tPtrElem := types.NewPointer(tElem) tPtrFunc := types.NewPointer(funcField(slice)) // Emit: array = new [n]testing.InternalTest tArray := types.NewArray(tElem, int64(len(testfuncs))) array := emitNew(fn, tArray, token.NoPos) array.Comment = "test main" for i, testfunc := range testfuncs { // Emit: pitem = &array[i] ia := &IndexAddr{X: array, Index: intConst(int64(i))} ia.setType(tPtrElem) pitem := fn.emit(ia) // Emit: pname = &pitem.Name fa := &FieldAddr{X: pitem, Field: 0} // .Name fa.setType(tPtrString) pname := fn.emit(fa) // Emit: *pname = "testfunc" emitStore(fn, pname, stringConst(testfunc.Name()), token.NoPos) // Emit: pfunc = &pitem.F fa = &FieldAddr{X: pitem, Field: 1} // .F fa.setType(tPtrFunc) pfunc := fn.emit(fa) // Emit: *pfunc = testfunc emitStore(fn, pfunc, testfunc, token.NoPos) } // Emit: slice array[:] sl := &Slice{X: array} sl.setType(slice) return fn.emit(sl) }
// Type = // BasicType | TypeName | ArrayType | SliceType | StructType | // PointerType | FuncType | InterfaceType | MapType | ChanType | // "(" Type ")" . // // BasicType = ident . // TypeName = ExportedName . // SliceType = "[" "]" Type . // PointerType = "*" Type . // FuncType = "func" Signature . // func (p *parser) parseType() types.Type { switch p.tok { case scanner.Ident: switch p.lit { default: return p.parseBasicType() case "struct": return p.parseStructType() case "func": // FuncType p.next() return p.parseSignature(nil) case "interface": return p.parseInterfaceType() case "map": return p.parseMapType() case "chan": return p.parseChanType() } case '@': // TypeName pkg, name := p.parseExportedName() return declTypeName(pkg, name).Type() case '[': p.next() // look ahead if p.tok == ']' { // SliceType p.next() return types.NewSlice(p.parseType()) } return p.parseArrayType() case '*': // PointerType p.next() return types.NewPointer(p.parseType()) case '<': return p.parseChanType() case '(': // "(" Type ")" p.next() typ := p.parseType() p.expect(')') return typ } p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit) return nil }
// callInstruction translates function call instructions. func (fr *frame) callInstruction(instr ssa.CallInstruction) []*govalue { call := instr.Common() if builtin, ok := call.Value.(*ssa.Builtin); ok { var typ types.Type if v := instr.Value(); v != nil { typ = v.Type() } return fr.callBuiltin(typ, builtin, call.Args) } args := make([]*govalue, len(call.Args)) for i, arg := range call.Args { args[i] = fr.value(arg) } var fn *govalue var chain llvm.Value if call.IsInvoke() { var recv *govalue fn, recv = fr.interfaceMethod(fr.llvmvalue(call.Value), call.Value.Type(), call.Method) args = append([]*govalue{recv}, args...) } else { if ssafn, ok := call.Value.(*ssa.Function); ok { llfn := fr.resolveFunctionGlobal(ssafn) llfn = llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0)) fn = newValue(llfn, ssafn.Type()) } else { // First-class function values are stored as *{*fnptr}, so // we must extract the function pointer. We must also // set the chain, in case the function is a closure. fn = fr.value(call.Value) chain = fn.value fnptr := fr.builder.CreateBitCast(fn.value, llvm.PointerType(fn.value.Type(), 0), "") fnptr = fr.builder.CreateLoad(fnptr, "") fn = newValue(fnptr, fn.Type()) } if recv := call.Signature().Recv(); recv != nil { if _, ok := recv.Type().Underlying().(*types.Pointer); !ok { recvalloca := fr.allocaBuilder.CreateAlloca(args[0].value.Type(), "") fr.builder.CreateStore(args[0].value, recvalloca) args[0] = newValue(recvalloca, types.NewPointer(args[0].Type())) } } } return fr.createCall(fn, chain, args) }
func (tm *llvmTypeMap) getSignatureInfo(sig *types.Signature) functionTypeInfo { var args, results []types.Type if sig.Recv() != nil { recvtype := sig.Recv().Type() if _, ok := recvtype.Underlying().(*types.Pointer); !ok && recvtype != types.Typ[types.UnsafePointer] { recvtype = types.NewPointer(recvtype) } args = []types.Type{recvtype} } for i := 0; i != sig.Params().Len(); i++ { args = append(args, sig.Params().At(i).Type()) } for i := 0; i != sig.Results().Len(); i++ { results = append(results, sig.Results().At(i).Type()) } return tm.getFunctionTypeInfo(args, results) }
// IntuitiveMethodSet returns the intuitive method set of a type, T. // // The result contains MethodSet(T) and additionally, if T is a // concrete type, methods belonging to *T if there is no identically // named method on T itself. This corresponds to user intuition about // method sets; this function is intended only for user interfaces. // // The order of the result is as for types.MethodSet(T). // func IntuitiveMethodSet(T types.Type, msets *types.MethodSetCache) []*types.Selection { var result []*types.Selection mset := msets.MethodSet(T) if _, ok := T.Underlying().(*types.Interface); ok { for i, n := 0, mset.Len(); i < n; i++ { result = append(result, mset.At(i)) } } else { pmset := msets.MethodSet(types.NewPointer(T)) for i, n := 0, pmset.Len(); i < n; i++ { meth := pmset.At(i) if m := mset.Lookup(meth.Obj().Pkg(), meth.Obj().Name()); m != nil { meth = m } result = append(result, meth) } } return result }
// Smoke test to ensure that imported methods get the correct package. func TestCorrectMethodPackage(t *testing.T) { // This package does not handle gccgo export data. if runtime.Compiler == "gccgo" { return } imports := make(map[string]*types.Package) _, err := Import(imports, "net/http") if err != nil { t.Fatal(err) } mutex := imports["sync"].Scope().Lookup("Mutex").(*types.TypeName).Type() mset := types.NewMethodSet(types.NewPointer(mutex)) // methods of *sync.Mutex sel := mset.Lookup(nil, "Lock") lock := sel.Obj().(*types.Func) if got, want := lock.Pkg().Path(), "sync"; got != want { t.Errorf("got package path %q; want %q", got, want) } }
// findNamedFunc returns the named function whose FuncDecl.Ident is at // position pos. // func findNamedFunc(pkg *Package, pos token.Pos) *Function { // Look at all package members and method sets of named types. // Not very efficient. for _, mem := range pkg.Members { switch mem := mem.(type) { case *Function: if mem.Pos() == pos { return mem } case *Type: mset := pkg.Prog.MethodSets.MethodSet(types.NewPointer(mem.Type())) for i, n := 0, mset.Len(); i < n; i++ { // Don't call Program.Method: avoid creating wrappers. obj := mset.At(i).Obj().(*types.Func) if obj.Pos() == pos { return pkg.values[obj].(*Function) } } } } return nil }
func (d *DIBuilder) descriptorBasic(t *types.Basic, name string) llvm.Metadata { switch t.Kind() { case types.String: return d.typeDebugDescriptor(types.NewStruct([]*types.Var{ types.NewVar(0, nil, "ptr", types.NewPointer(types.Typ[types.Uint8])), types.NewVar(0, nil, "len", types.Typ[types.Int]), }, nil), name) case types.UnsafePointer: return d.builder.CreateBasicType(llvm.DIBasicType{ Name: name, SizeInBits: uint64(d.sizes.Sizeof(t) * 8), AlignInBits: uint64(d.sizes.Alignof(t) * 8), Encoding: llvm.DW_ATE_unsigned, }) default: bt := llvm.DIBasicType{ Name: t.String(), SizeInBits: uint64(d.sizes.Sizeof(t) * 8), AlignInBits: uint64(d.sizes.Alignof(t) * 8), } switch bi := t.Info(); { case bi&types.IsBoolean != 0: bt.Encoding = llvm.DW_ATE_boolean case bi&types.IsUnsigned != 0: bt.Encoding = llvm.DW_ATE_unsigned case bi&types.IsInteger != 0: bt.Encoding = llvm.DW_ATE_signed case bi&types.IsFloat != 0: bt.Encoding = llvm.DW_ATE_float case bi&types.IsComplex != 0: bt.Encoding = llvm.DW_ATE_imaginary_float case bi&types.IsUnsigned != 0: bt.Encoding = llvm.DW_ATE_unsigned default: panic(fmt.Sprintf("unhandled: %#v", t)) } return d.builder.CreateBasicType(bt) } }
func checkVarValue(t *testing.T, prog *ssa.Program, pkg *ssa.Package, ref []ast.Node, obj *types.Var, expKind string, wantAddr bool) { // The prefix of all assertions messages. prefix := fmt.Sprintf("VarValue(%s @ L%d)", obj, prog.Fset.Position(ref[0].Pos()).Line) v, gotAddr := prog.VarValue(obj, pkg, ref) // Kind is the concrete type of the ssa Value. gotKind := "nil" if v != nil { gotKind = fmt.Sprintf("%T", v)[len("*ssa."):] } // fmt.Printf("%s = %v (kind %q; expect %q) wantAddr=%t gotAddr=%t\n", prefix, v, gotKind, expKind, wantAddr, gotAddr) // debugging // Check the kinds match. // "nil" indicates expected failure (e.g. optimized away). if expKind != gotKind { t.Errorf("%s concrete type == %s, want %s", prefix, gotKind, expKind) } // Check the types match. // If wantAddr, the expected type is the object's address. if v != nil { expType := obj.Type() if wantAddr { expType = types.NewPointer(expType) if !gotAddr { t.Errorf("%s: got value, want address", prefix) } } else if gotAddr { t.Errorf("%s: got address, want value", prefix) } if !types.Identical(v.Type(), expType) { t.Errorf("%s.Type() == %s, want %s", prefix, v.Type(), expType) } } }
// Tests that programs partially loaded from gc object files contain // functions with no code for the external portions, but are otherwise ok. func TestImportFromBinary(t *testing.T) { test := ` package main import ( "bytes" "io" "testing" ) func main() { var t testing.T t.Parallel() // static call to external declared method t.Fail() // static call to promoted external declared method testing.Short() // static call to external package-level function var w io.Writer = new(bytes.Buffer) w.Write(nil) // interface invoke of external declared method } ` // Create a single-file main package. conf := loader.Config{ImportFromBinary: true} f, err := conf.ParseFile("<input>", test) if err != nil { t.Error(err) return } conf.CreateFromFiles("main", f) iprog, err := conf.Load() if err != nil { t.Error(err) return } prog := ssa.Create(iprog, ssa.SanityCheckFunctions) mainPkg := prog.Package(iprog.Created[0].Pkg) mainPkg.Build() // The main package, its direct and indirect dependencies are loaded. deps := []string{ // directly imported dependencies: "bytes", "io", "testing", // indirect dependencies (partial list): "errors", "fmt", "os", "runtime", } all := prog.AllPackages() if len(all) <= len(deps) { t.Errorf("unexpected set of loaded packages: %q", all) } for _, path := range deps { pkg := prog.ImportedPackage(path) if pkg == nil { t.Errorf("package not loaded: %q", path) continue } // External packages should have no function bodies (except for wrappers). isExt := pkg != mainPkg // init() if isExt && !isEmpty(pkg.Func("init")) { t.Errorf("external package %s has non-empty init", pkg) } else if !isExt && isEmpty(pkg.Func("init")) { t.Errorf("main package %s has empty init", pkg) } for _, mem := range pkg.Members { switch mem := mem.(type) { case *ssa.Function: // Functions at package level. if isExt && !isEmpty(mem) { t.Errorf("external function %s is non-empty", mem) } else if !isExt && isEmpty(mem) { t.Errorf("function %s is empty", mem) } case *ssa.Type: // Methods of named types T. // (In this test, all exported methods belong to *T not T.) if !isExt { t.Fatalf("unexpected name type in main package: %s", mem) } mset := prog.MethodSets.MethodSet(types.NewPointer(mem.Type())) for i, n := 0, mset.Len(); i < n; i++ { m := prog.Method(mset.At(i)) // For external types, only synthetic wrappers have code. expExt := !strings.Contains(m.Synthetic, "wrapper") if expExt && !isEmpty(m) { t.Errorf("external method %s is non-empty: %s", m, m.Synthetic) } else if !expExt && isEmpty(m) { t.Errorf("method function %s is empty: %s", m, m.Synthetic) } } } } } expectedCallee := []string{ "(*testing.T).Parallel", "(*testing.common).Fail", "testing.Short", "N/A", } callNum := 0 for _, b := range mainPkg.Func("main").Blocks { for _, instr := range b.Instrs { switch instr := instr.(type) { case ssa.CallInstruction: call := instr.Common() if want := expectedCallee[callNum]; want != "N/A" { got := call.StaticCallee().String() if want != got { t.Errorf("call #%d from main.main: got callee %s, want %s", callNum, got, want) } } callNum++ } } } if callNum != 4 { t.Errorf("in main.main: got %d calls, want %d", callNum, 4) } }
// addRuntimeType is called for each concrete type that can be the // dynamic type of some interface or reflect.Value. // Adapted from needMethods in go/ssa/builder.go // func (r *rta) addRuntimeType(T types.Type, skip bool) { if prev, ok := r.result.RuntimeTypes.At(T).(bool); ok { if skip && !prev { r.result.RuntimeTypes.Set(T, skip) } return } r.result.RuntimeTypes.Set(T, skip) mset := r.prog.MethodSets.MethodSet(T) if _, ok := T.Underlying().(*types.Interface); !ok { // T is a new concrete type. for i, n := 0, mset.Len(); i < n; i++ { sel := mset.At(i) m := sel.Obj() if m.Exported() { // Exported methods are always potentially callable via reflection. r.addReachable(r.prog.Method(sel), true) } } // Add callgraph edge for each existing dynamic // "invoke"-mode call via that interface. for _, I := range r.interfaces(T) { sites, _ := r.invokeSites.At(I).([]ssa.CallInstruction) for _, site := range sites { r.addInvokeEdge(site, T) } } } // Precondition: T is not a method signature (*Signature with Recv()!=nil). // Recursive case: skip => don't call makeMethods(T). // Each package maintains its own set of types it has visited. var n *types.Named switch T := T.(type) { case *types.Named: n = T case *types.Pointer: n, _ = T.Elem().(*types.Named) } if n != nil { owner := n.Obj().Pkg() if owner == nil { return // built-in error type } } // Recursion over signatures of each exported method. for i := 0; i < mset.Len(); i++ { if mset.At(i).Obj().Exported() { sig := mset.At(i).Type().(*types.Signature) r.addRuntimeType(sig.Params(), true) // skip the Tuple itself r.addRuntimeType(sig.Results(), true) // skip the Tuple itself } } switch t := T.(type) { case *types.Basic: // nop case *types.Interface: // nop---handled by recursion over method set. case *types.Pointer: r.addRuntimeType(t.Elem(), false) case *types.Slice: r.addRuntimeType(t.Elem(), false) case *types.Chan: r.addRuntimeType(t.Elem(), false) case *types.Map: r.addRuntimeType(t.Key(), false) r.addRuntimeType(t.Elem(), false) case *types.Signature: if t.Recv() != nil { panic(fmt.Sprintf("Signature %s has Recv %s", t, t.Recv())) } r.addRuntimeType(t.Params(), true) // skip the Tuple itself r.addRuntimeType(t.Results(), true) // skip the Tuple itself case *types.Named: // A pointer-to-named type can be derived from a named // type via reflection. It may have methods too. r.addRuntimeType(types.NewPointer(T), false) // Consider 'type T struct{S}' where S has methods. // Reflection provides no way to get from T to struct{S}, // only to S, so the method set of struct{S} is unwanted, // so set 'skip' flag during recursion. r.addRuntimeType(t.Underlying(), true) case *types.Array: r.addRuntimeType(t.Elem(), false) case *types.Struct: for i, n := 0, t.NumFields(); i < n; i++ { r.addRuntimeType(t.Field(i).Type(), false) } case *types.Tuple: for i, n := 0, t.Len(); i < n; i++ { r.addRuntimeType(t.At(i).Type(), false) } default: panic(T) } }
func ext۰reflect۰New(fr *frame, args []value) value { // Signature: func (t reflect.Type) reflect.Value t := args[0].(iface).v.(rtype).t alloc := zero(t) return makeReflectValue(types.NewPointer(t), &alloc) }
// Analyze runs the pointer analysis with the scope and options // specified by config, and returns the (synthetic) root of the callgraph. // // Pointer analysis of a transitively closed well-typed program should // always succeed. An error can occur only due to an internal bug. // func Analyze(config *Config) (result *Result, err error) { if config.Mains == nil { return nil, fmt.Errorf("no main/test packages to analyze (check $GOROOT/$GOPATH)") } defer func() { if p := recover(); p != nil { err = fmt.Errorf("internal error in pointer analysis: %v (please report this bug)", p) fmt.Fprintln(os.Stderr, "Internal panic in pointer analysis:") debug.PrintStack() } }() a := &analysis{ config: config, log: config.Log, prog: config.prog(), globalval: make(map[ssa.Value]nodeid), globalobj: make(map[ssa.Value]nodeid), flattenMemo: make(map[types.Type][]*fieldInfo), trackTypes: make(map[types.Type]bool), atFuncs: make(map[*ssa.Function]bool), hasher: typeutil.MakeHasher(), intrinsics: make(map[*ssa.Function]intrinsic), result: &Result{ Queries: make(map[ssa.Value]Pointer), IndirectQueries: make(map[ssa.Value]Pointer), }, deltaSpace: make([]int, 0, 100), } if false { a.log = os.Stderr // for debugging crashes; extremely verbose } if a.log != nil { fmt.Fprintln(a.log, "==== Starting analysis") } // Pointer analysis requires a complete program for soundness. // Check to prevent accidental misconfiguration. for _, pkg := range a.prog.AllPackages() { // (This only checks that the package scope is complete, // not that func bodies exist, but it's a good signal.) if !pkg.Object.Complete() { return nil, fmt.Errorf(`pointer analysis requires a complete program yet package %q was incomplete (don't set loader.Config.ImportFromBinary during loading)`, pkg.Object.Path()) } } if reflect := a.prog.ImportedPackage("reflect"); reflect != nil { rV := reflect.Object.Scope().Lookup("Value") a.reflectValueObj = rV a.reflectValueCall = a.prog.LookupMethod(rV.Type(), nil, "Call") a.reflectType = reflect.Object.Scope().Lookup("Type").Type().(*types.Named) a.reflectRtypeObj = reflect.Object.Scope().Lookup("rtype") a.reflectRtypePtr = types.NewPointer(a.reflectRtypeObj.Type()) // Override flattening of reflect.Value, treating it like a basic type. tReflectValue := a.reflectValueObj.Type() a.flattenMemo[tReflectValue] = []*fieldInfo{{typ: tReflectValue}} // Override shouldTrack of reflect.Value and *reflect.rtype. // Always track pointers of these types. a.trackTypes[tReflectValue] = true a.trackTypes[a.reflectRtypePtr] = true a.rtypes.SetHasher(a.hasher) a.reflectZeros.SetHasher(a.hasher) } if runtime := a.prog.ImportedPackage("runtime"); runtime != nil { a.runtimeSetFinalizer = runtime.Func("SetFinalizer") } a.computeTrackBits() a.generate() a.showCounts() if optRenumber { a.renumber() } N := len(a.nodes) // excludes solver-created nodes if optHVN { if debugHVNCrossCheck { // Cross-check: run the solver once without // optimization, once with, and compare the // solutions. savedConstraints := a.constraints a.solve() a.dumpSolution("A.pts", N) // Restore. a.constraints = savedConstraints for _, n := range a.nodes { n.solve = new(solverState) } a.nodes = a.nodes[:N] // rtypes is effectively part of the solver state. a.rtypes = typeutil.Map{} a.rtypes.SetHasher(a.hasher) } a.hvn() } if debugHVNCrossCheck { runtime.GC() runtime.GC() } a.solve() // Compare solutions. if optHVN && debugHVNCrossCheck { a.dumpSolution("B.pts", N) if !diff("A.pts", "B.pts") { return nil, fmt.Errorf("internal error: optimization changed solution") } } // Create callgraph.Nodes in deterministic order. if cg := a.result.CallGraph; cg != nil { for _, caller := range a.cgnodes { cg.CreateNode(caller.fn) } } // Add dynamic edges to call graph. var space [100]int for _, caller := range a.cgnodes { for _, site := range caller.sites { for _, callee := range a.nodes[site.targets].solve.pts.AppendTo(space[:0]) { a.callEdge(caller, site, nodeid(callee)) } } } return a.result, nil }
func (fr *frame) instruction(instr ssa.Instruction) { fr.logf("[%T] %v @ %s\n", instr, instr, fr.pkg.Prog.Fset.Position(instr.Pos())) if fr.GenerateDebug { fr.debug.SetLocation(fr.builder, instr.Pos()) } switch instr := instr.(type) { case *ssa.Alloc: typ := deref(instr.Type()) llvmtyp := fr.llvmtypes.ToLLVM(typ) var value llvm.Value if !instr.Heap { value = fr.env[instr].value fr.memsetZero(value, llvm.SizeOf(llvmtyp)) } else if fr.isInit && fr.shouldStaticallyAllocate(instr) { // If this is the init function and we think it may be beneficial, // allocate memory statically in the object file rather than on the // heap. This allows us to optimize constant stores into such // variables as static initializations. global := llvm.AddGlobal(fr.module.Module, llvmtyp, "") global.SetLinkage(llvm.InternalLinkage) fr.addGlobal(global, typ) ptr := llvm.ConstBitCast(global, llvm.PointerType(llvm.Int8Type(), 0)) fr.env[instr] = newValue(ptr, instr.Type()) } else { value = fr.createTypeMalloc(typ) value.SetName(instr.Comment) value = fr.builder.CreateBitCast(value, llvm.PointerType(llvm.Int8Type(), 0), "") fr.env[instr] = newValue(value, instr.Type()) } case *ssa.BinOp: lhs, rhs := fr.value(instr.X), fr.value(instr.Y) fr.env[instr] = fr.binaryOp(lhs, instr.Op, rhs) case *ssa.Call: tuple := fr.callInstruction(instr) if len(tuple) == 1 { fr.env[instr] = tuple[0] } else { fr.tuples[instr] = tuple } case *ssa.ChangeInterface: x := fr.value(instr.X) // The source type must be a non-empty interface, // as ChangeInterface cannot fail (E2I may fail). if instr.Type().Underlying().(*types.Interface).NumMethods() > 0 { x = fr.changeInterface(x, instr.Type(), false) } else { x = fr.convertI2E(x) } fr.env[instr] = x case *ssa.ChangeType: value := fr.llvmvalue(instr.X) if _, ok := instr.Type().Underlying().(*types.Pointer); ok { value = fr.builder.CreateBitCast(value, fr.llvmtypes.ToLLVM(instr.Type()), "") } fr.env[instr] = newValue(value, instr.Type()) case *ssa.Convert: v := fr.value(instr.X) fr.env[instr] = fr.convert(v, instr.Type()) case *ssa.Defer: fn, arg := fr.createThunk(instr) fr.runtime.Defer.call(fr, fr.frameptr, fn, arg) case *ssa.Extract: var elem llvm.Value if t, ok := fr.tuples[instr.Tuple]; ok { elem = t[instr.Index].value } else { tuple := fr.llvmvalue(instr.Tuple) elem = fr.builder.CreateExtractValue(tuple, instr.Index, instr.Name()) } elemtyp := instr.Type() fr.env[instr] = newValue(elem, elemtyp) case *ssa.Field: fieldtyp := instr.Type() if p, ok := fr.ptr[instr.X]; ok { field := fr.builder.CreateStructGEP(p, instr.Field, instr.Name()) if fr.canAvoidElementLoad(instr) { fr.ptr[instr] = field } else { fr.env[instr] = newValue(fr.builder.CreateLoad(field, ""), fieldtyp) } } else { value := fr.llvmvalue(instr.X) field := fr.builder.CreateExtractValue(value, instr.Field, instr.Name()) fr.env[instr] = newValue(field, fieldtyp) } case *ssa.FieldAddr: ptr := fr.llvmvalue(instr.X) fr.nilCheck(instr.X, ptr) xtyp := instr.X.Type().Underlying().(*types.Pointer).Elem() ptrtyp := llvm.PointerType(fr.llvmtypes.ToLLVM(xtyp), 0) ptr = fr.builder.CreateBitCast(ptr, ptrtyp, "") fieldptr := fr.builder.CreateStructGEP(ptr, instr.Field, instr.Name()) fieldptr = fr.builder.CreateBitCast(fieldptr, llvm.PointerType(llvm.Int8Type(), 0), "") fieldptrtyp := instr.Type() fr.env[instr] = newValue(fieldptr, fieldptrtyp) case *ssa.Go: fn, arg := fr.createThunk(instr) fr.runtime.Go.call(fr, fn, arg) case *ssa.If: cond := fr.llvmvalue(instr.Cond) block := instr.Block() trueBlock := fr.block(block.Succs[0]) falseBlock := fr.block(block.Succs[1]) cond = fr.builder.CreateTrunc(cond, llvm.Int1Type(), "") fr.builder.CreateCondBr(cond, trueBlock, falseBlock) case *ssa.Index: var arrayptr llvm.Value if ptr, ok := fr.ptr[instr.X]; ok { arrayptr = ptr } else { array := fr.llvmvalue(instr.X) arrayptr = fr.allocaBuilder.CreateAlloca(array.Type(), "") fr.builder.CreateStore(array, arrayptr) } index := fr.llvmvalue(instr.Index) arraytyp := instr.X.Type().Underlying().(*types.Array) arraylen := llvm.ConstInt(fr.llvmtypes.inttype, uint64(arraytyp.Len()), false) // The index may not have been promoted to int (for example, if it // came from a composite literal). index = fr.createZExtOrTrunc(index, fr.types.inttype, "") // Bounds checking: 0 <= index < len zero := llvm.ConstNull(fr.types.inttype) i0 := fr.builder.CreateICmp(llvm.IntSLT, index, zero, "") li := fr.builder.CreateICmp(llvm.IntSLE, arraylen, index, "") cond := fr.builder.CreateOr(i0, li, "") fr.condBrRuntimeError(cond, gccgoRuntimeErrorARRAY_INDEX_OUT_OF_BOUNDS) addr := fr.builder.CreateGEP(arrayptr, []llvm.Value{zero, index}, "") if fr.canAvoidElementLoad(instr) { fr.ptr[instr] = addr } else { fr.env[instr] = newValue(fr.builder.CreateLoad(addr, ""), instr.Type()) } case *ssa.IndexAddr: x := fr.llvmvalue(instr.X) index := fr.llvmvalue(instr.Index) var arrayptr, arraylen llvm.Value var elemtyp types.Type var errcode uint64 switch typ := instr.X.Type().Underlying().(type) { case *types.Slice: elemtyp = typ.Elem() arrayptr = fr.builder.CreateExtractValue(x, 0, "") arraylen = fr.builder.CreateExtractValue(x, 1, "") errcode = gccgoRuntimeErrorSLICE_INDEX_OUT_OF_BOUNDS case *types.Pointer: // *array arraytyp := typ.Elem().Underlying().(*types.Array) elemtyp = arraytyp.Elem() fr.nilCheck(instr.X, x) arrayptr = x arraylen = llvm.ConstInt(fr.llvmtypes.inttype, uint64(arraytyp.Len()), false) errcode = gccgoRuntimeErrorARRAY_INDEX_OUT_OF_BOUNDS } // The index may not have been promoted to int (for example, if it // came from a composite literal). index = fr.createZExtOrTrunc(index, fr.types.inttype, "") // Bounds checking: 0 <= index < len zero := llvm.ConstNull(fr.types.inttype) i0 := fr.builder.CreateICmp(llvm.IntSLT, index, zero, "") li := fr.builder.CreateICmp(llvm.IntSLE, arraylen, index, "") cond := fr.builder.CreateOr(i0, li, "") fr.condBrRuntimeError(cond, errcode) ptrtyp := llvm.PointerType(fr.llvmtypes.ToLLVM(elemtyp), 0) arrayptr = fr.builder.CreateBitCast(arrayptr, ptrtyp, "") addr := fr.builder.CreateGEP(arrayptr, []llvm.Value{index}, "") addr = fr.builder.CreateBitCast(addr, llvm.PointerType(llvm.Int8Type(), 0), "") fr.env[instr] = newValue(addr, types.NewPointer(elemtyp)) case *ssa.Jump: succ := instr.Block().Succs[0] fr.builder.CreateBr(fr.block(succ)) case *ssa.Lookup: x := fr.value(instr.X) index := fr.value(instr.Index) if isString(x.Type().Underlying()) { fr.env[instr] = fr.stringIndex(x, index) } else { v, ok := fr.mapLookup(x, index) if instr.CommaOk { fr.tuples[instr] = []*govalue{v, ok} } else { fr.env[instr] = v } } case *ssa.MakeChan: fr.env[instr] = fr.makeChan(instr.Type(), fr.value(instr.Size)) case *ssa.MakeClosure: llfn := fr.resolveFunctionGlobal(instr.Fn.(*ssa.Function)) llfn = llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0)) fn := newValue(llfn, instr.Fn.(*ssa.Function).Signature) bindings := make([]*govalue, len(instr.Bindings)) for i, binding := range instr.Bindings { bindings[i] = fr.value(binding) } fr.env[instr] = fr.makeClosure(fn, bindings) case *ssa.MakeInterface: // fr.ptr[instr.X] will be set if a pointer load was elided by canAvoidLoad if ptr, ok := fr.ptr[instr.X]; ok { fr.env[instr] = fr.makeInterfaceFromPointer(ptr, instr.X.Type(), instr.Type()) } else { receiver := fr.llvmvalue(instr.X) fr.env[instr] = fr.makeInterface(receiver, instr.X.Type(), instr.Type()) } case *ssa.MakeMap: fr.env[instr] = fr.makeMap(instr.Type(), fr.value(instr.Reserve)) case *ssa.MakeSlice: length := fr.value(instr.Len) capacity := fr.value(instr.Cap) fr.env[instr] = fr.makeSlice(instr.Type(), length, capacity) case *ssa.MapUpdate: m := fr.value(instr.Map) k := fr.value(instr.Key) v := fr.value(instr.Value) fr.mapUpdate(m, k, v) case *ssa.Next: iter := fr.tuples[instr.Iter] if instr.IsString { fr.tuples[instr] = fr.stringIterNext(iter) } else { fr.tuples[instr] = fr.mapIterNext(iter) } case *ssa.Panic: arg := fr.value(instr.X) fr.callPanic(arg, true) case *ssa.Phi: typ := instr.Type() phi := fr.builder.CreatePHI(fr.llvmtypes.ToLLVM(typ), instr.Comment) fr.env[instr] = newValue(phi, typ) fr.phis = append(fr.phis, pendingPhi{instr, phi}) case *ssa.Range: x := fr.value(instr.X) switch x.Type().Underlying().(type) { case *types.Map: fr.tuples[instr] = fr.mapIterInit(x) case *types.Basic: // string fr.tuples[instr] = fr.stringIterInit(x) default: panic(fmt.Sprintf("unhandled range for type %T", x.Type())) } case *ssa.Return: vals := make([]llvm.Value, len(instr.Results)) for i, res := range instr.Results { vals[i] = fr.llvmvalue(res) } fr.retInf.encode(llvm.GlobalContext(), fr.allocaBuilder, fr.builder, vals) case *ssa.RunDefers: fr.runDefers() case *ssa.Select: index, recvOk, recvElems := fr.chanSelect(instr) tuple := append([]*govalue{index, recvOk}, recvElems...) fr.tuples[instr] = tuple case *ssa.Send: fr.chanSend(fr.value(instr.Chan), fr.value(instr.X)) case *ssa.Slice: x := fr.llvmvalue(instr.X) low := fr.llvmvalue(instr.Low) high := fr.llvmvalue(instr.High) max := fr.llvmvalue(instr.Max) slice := fr.slice(x, instr.X.Type(), low, high, max) fr.env[instr] = newValue(slice, instr.Type()) case *ssa.Store: addr := fr.llvmvalue(instr.Addr) value := fr.llvmvalue(instr.Val) addr = fr.builder.CreateBitCast(addr, llvm.PointerType(value.Type(), 0), "") // If this is the init function, see if we can simulate the effect // of the store in a global's initializer, in which case we can avoid // generating code for it. if !fr.isInit || !fr.maybeStoreInInitializer(value, addr) { fr.nilCheck(instr.Addr, addr) fr.builder.CreateStore(value, addr) } case *switchInstr: fr.emitSwitch(instr) case *ssa.TypeAssert: x := fr.value(instr.X) if instr.CommaOk { v, ok := fr.interfaceTypeCheck(x, instr.AssertedType) fr.tuples[instr] = []*govalue{v, ok} } else { fr.env[instr] = fr.interfaceTypeAssert(x, instr.AssertedType) } case *ssa.UnOp: operand := fr.value(instr.X) switch instr.Op { case token.ARROW: x, ok := fr.chanRecv(operand, instr.CommaOk) if instr.CommaOk { fr.tuples[instr] = []*govalue{x, ok} } else { fr.env[instr] = x } case token.MUL: fr.nilCheck(instr.X, operand.value) if !fr.canAvoidLoad(instr, operand.value) { // The bitcast is necessary to handle recursive pointer loads. llptr := fr.builder.CreateBitCast(operand.value, llvm.PointerType(fr.llvmtypes.ToLLVM(instr.Type()), 0), "") fr.env[instr] = newValue(fr.builder.CreateLoad(llptr, ""), instr.Type()) } default: fr.env[instr] = fr.unaryOp(operand, instr.Op) } default: panic(fmt.Sprintf("unhandled: %v", instr)) } }
// TODO(adonovan): // - test use of explicit hasher across two maps. // - test hashcodes are consistent with equals for a range of types // (e.g. all types generated by type-checking some body of real code). import ( "testing" "llvm.org/llgo/third_party/gotools/go/types" "llvm.org/llgo/third_party/gotools/go/types/typeutil" ) var ( tStr = types.Typ[types.String] // string tPStr1 = types.NewPointer(tStr) // *string tPStr2 = types.NewPointer(tStr) // *string, again tInt = types.Typ[types.Int] // int tChanInt1 = types.NewChan(types.RecvOnly, tInt) // <-chan int tChanInt2 = types.NewChan(types.RecvOnly, tInt) // <-chan int, again ) func checkEqualButNotIdentical(t *testing.T, x, y types.Type, comment string) { if !types.Identical(x, y) { t.Errorf("%s: not equal: %s, %s", comment, x, y) } if x == y { t.Errorf("%s: identical: %v, %v", comment, x, y) } }
// Precondition: T is not a method signature (*Signature with Recv()!=nil). // Recursive case: skip => don't create methods for T. // // EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) // func (prog *Program) needMethods(T types.Type, skip bool) { // Each package maintains its own set of types it has visited. if prevSkip, ok := prog.runtimeTypes.At(T).(bool); ok { // needMethods(T) was previously called if !prevSkip || skip { return // already seen, with same or false 'skip' value } } prog.runtimeTypes.Set(T, skip) tmset := prog.MethodSets.MethodSet(T) if !skip && !isInterface(T) && tmset.Len() > 0 { // Create methods of T. mset := prog.createMethodSet(T) if !mset.complete { mset.complete = true n := tmset.Len() for i := 0; i < n; i++ { prog.addMethod(mset, tmset.At(i)) } } } // Recursion over signatures of each method. for i := 0; i < tmset.Len(); i++ { sig := tmset.At(i).Type().(*types.Signature) prog.needMethods(sig.Params(), false) prog.needMethods(sig.Results(), false) } switch t := T.(type) { case *types.Basic: // nop case *types.Interface: // nop---handled by recursion over method set. case *types.Pointer: prog.needMethods(t.Elem(), false) case *types.Slice: prog.needMethods(t.Elem(), false) case *types.Chan: prog.needMethods(t.Elem(), false) case *types.Map: prog.needMethods(t.Key(), false) prog.needMethods(t.Elem(), false) case *types.Signature: if t.Recv() != nil { panic(fmt.Sprintf("Signature %s has Recv %s", t, t.Recv())) } prog.needMethods(t.Params(), false) prog.needMethods(t.Results(), false) case *types.Named: // A pointer-to-named type can be derived from a named // type via reflection. It may have methods too. prog.needMethods(types.NewPointer(T), false) // Consider 'type T struct{S}' where S has methods. // Reflection provides no way to get from T to struct{S}, // only to S, so the method set of struct{S} is unwanted, // so set 'skip' flag during recursion. prog.needMethods(t.Underlying(), true) case *types.Array: prog.needMethods(t.Elem(), false) case *types.Struct: for i, n := 0, t.NumFields(); i < n; i++ { prog.needMethods(t.Field(i).Type(), false) } case *types.Tuple: for i, n := 0, t.Len(); i < n; i++ { prog.needMethods(t.At(i).Type(), false) } default: panic(T) } }
// CreatePackage constructs and returns an SSA Package from an // error-free package described by info, and populates its Members // mapping. // // Repeated calls with the same info return the same Package. // // The real work of building SSA form for each function is not done // until a subsequent call to Package.Build(). // func (prog *Program) CreatePackage(info *loader.PackageInfo) *Package { if p := prog.packages[info.Pkg]; p != nil { return p // already loaded } p := &Package{ Prog: prog, Members: make(map[string]Member), values: make(map[types.Object]Value), Object: info.Pkg, info: info, // transient (CREATE and BUILD phases) } // Add init() function. p.init = &Function{ name: "init", Signature: new(types.Signature), Synthetic: "package initializer", Pkg: p, Prog: prog, } p.Members[p.init.name] = p.init // CREATE phase. // Allocate all package members: vars, funcs, consts and types. if len(info.Files) > 0 { // Go source package. for _, file := range info.Files { for _, decl := range file.Decls { membersFromDecl(p, decl) } } } else { // GC-compiled binary package. // No code. // No position information. scope := p.Object.Scope() for _, name := range scope.Names() { obj := scope.Lookup(name) memberFromObject(p, obj, nil) if obj, ok := obj.(*types.TypeName); ok { named := obj.Type().(*types.Named) for i, n := 0, named.NumMethods(); i < n; i++ { memberFromObject(p, named.Method(i), nil) } } } } if prog.mode&BareInits == 0 { // Add initializer guard variable. initguard := &Global{ Pkg: p, name: "init$guard", typ: types.NewPointer(tBool), } p.Members[initguard.Name()] = initguard } if prog.mode&GlobalDebug != 0 { p.SetDebugMode(true) } if prog.mode&PrintPackages != 0 { printMu.Lock() p.WriteTo(os.Stdout) printMu.Unlock() } if info.Importable { prog.imported[info.Pkg.Path()] = p } prog.packages[p.Object] = p return p }