// genBuiltinCall generates contraints for a call to a built-in. func (a *analysis) genBuiltinCall(instr ssa.CallInstruction, cgn *cgnode) { call := instr.Common() switch call.Value.(*ssa.Builtin).Name() { case "append": // Safe cast: append cannot appear in a go or defer statement. a.genAppend(instr.(*ssa.Call), cgn) case "copy": tElem := call.Args[0].Type().Underlying().(*types.Slice).Elem() a.copyElems(cgn, tElem, call.Args[0], call.Args[1]) case "panic": a.copy(a.panicNode, a.valueNode(call.Args[0]), 1) case "recover": if v := instr.Value(); v != nil { a.copy(a.valueNode(v), a.panicNode, 1) } case "print": // In the tests, the probe might be the sole reference // to its arg, so make sure we create nodes for it. if len(call.Args) > 0 { a.valueNode(call.Args[0]) } case "ssa:wrapnilchk": a.copy(a.valueNode(instr.Value()), a.valueNode(call.Args[0]), 1) default: // No-ops: close len cap real imag complex print println delete. } }
// genCall generates constraints for call instruction instr. func (a *analysis) genCall(caller *cgnode, instr ssa.CallInstruction) { call := instr.Common() // Intrinsic implementations of built-in functions. if _, ok := call.Value.(*ssa.Builtin); ok { a.genBuiltinCall(instr, caller) return } var result nodeid if v := instr.Value(); v != nil { result = a.valueNode(v) } site := &callsite{instr: instr} if call.StaticCallee() != nil { a.genStaticCall(caller, site, call, result) } else if call.IsInvoke() { a.genInvoke(caller, site, call, result) } else { a.genDynamicCall(caller, site, call, result) } caller.sites = append(caller.sites, site) if a.log != nil { // TODO(adonovan): debug: improve log message. fmt.Fprintf(a.log, "\t%s to targets %s from %s\n", site, site.targets, caller) } }
// visitInvoke is called each time the algorithm encounters an "invoke"-mode call. func (r *rta) visitInvoke(site ssa.CallInstruction) { I := site.Common().Value.Type().Underlying().(*types.Interface) // Record the invoke site. sites, _ := r.invokeSites.At(I).([]ssa.CallInstruction) r.invokeSites.Set(I, append(sites, site)) // Add callgraph edge for each existing // address-taken concrete type implementing I. for _, C := range r.implementations(I) { r.addInvokeEdge(site, C) } }
// visitDynCall is called each time we encounter a dynamic "call"-mode call. func (r *rta) visitDynCall(site ssa.CallInstruction) { S := site.Common().Signature() // Record the call site. sites, _ := r.dynCallSites.At(S).([]ssa.CallInstruction) r.dynCallSites.Set(S, append(sites, site)) // For each function of signature S that we know is address-taken, // mark it reachable. We'll add the callgraph edges later. funcs, _ := r.addrTakenFuncsBySig.At(S).(map[*ssa.Function]bool) for g := range funcs { r.addEdge(site, g, true) } }
// callInstruction translates function call instructions. func (fr *frame) callInstruction(instr ssa.CallInstruction) []*govalue { call := instr.Common() if builtin, ok := call.Value.(*ssa.Builtin); ok { var typ types.Type if v := instr.Value(); v != nil { typ = v.Type() } return fr.callBuiltin(typ, builtin, call.Args) } args := make([]*govalue, len(call.Args)) for i, arg := range call.Args { args[i] = fr.value(arg) } var fn *govalue var chain llvm.Value if call.IsInvoke() { var recv *govalue fn, recv = fr.interfaceMethod(fr.llvmvalue(call.Value), call.Value.Type(), call.Method) args = append([]*govalue{recv}, args...) } else { if ssafn, ok := call.Value.(*ssa.Function); ok { llfn := fr.resolveFunctionGlobal(ssafn) llfn = llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0)) fn = newValue(llfn, ssafn.Type()) } else { // First-class function values are stored as *{*fnptr}, so // we must extract the function pointer. We must also // set the chain, in case the function is a closure. fn = fr.value(call.Value) chain = fn.value fnptr := fr.builder.CreateBitCast(fn.value, llvm.PointerType(fn.value.Type(), 0), "") fnptr = fr.builder.CreateLoad(fnptr, "") fn = newValue(fnptr, fn.Type()) } if recv := call.Signature().Recv(); recv != nil { if _, ok := recv.Type().Underlying().(*types.Pointer); !ok { recvalloca := fr.allocaBuilder.CreateAlloca(args[0].value.Type(), "") fr.builder.CreateStore(args[0].value, recvalloca) args[0] = newValue(recvalloca, types.NewPointer(args[0].Type())) } } } return fr.createCall(fn, chain, args) }
// createThunk creates a thunk from a // given function and arguments, suitable for use with // "defer" and "go". func (fr *frame) createThunk(call ssa.CallInstruction) (thunk llvm.Value, arg llvm.Value) { seenarg := make(map[ssa.Value]bool) var args []ssa.Value var argtypes []*types.Var packArg := func(arg ssa.Value) { switch arg.(type) { case *ssa.Builtin, *ssa.Function, *ssa.Const, *ssa.Global: // Do nothing: we can generate these in the thunk default: if !seenarg[arg] { seenarg[arg] = true args = append(args, arg) field := types.NewField(0, nil, "_", arg.Type(), true) argtypes = append(argtypes, field) } } } packArg(call.Common().Value) for _, arg := range call.Common().Args { packArg(arg) } var isRecoverCall bool i8ptr := llvm.PointerType(llvm.Int8Type(), 0) var structllptr llvm.Type if len(args) == 0 { if builtin, ok := call.Common().Value.(*ssa.Builtin); ok { isRecoverCall = builtin.Name() == "recover" } if isRecoverCall { // When creating a thunk for recover(), we must pass fr.canRecover. arg = fr.builder.CreateZExt(fr.canRecover, fr.target.IntPtrType(), "") arg = fr.builder.CreateIntToPtr(arg, i8ptr, "") } else { arg = llvm.ConstPointerNull(i8ptr) } } else { structtype := types.NewStruct(argtypes, nil) arg = fr.createTypeMalloc(structtype) structllptr = arg.Type() for i, ssaarg := range args { argptr := fr.builder.CreateStructGEP(arg, i, "") fr.builder.CreateStore(fr.llvmvalue(ssaarg), argptr) } arg = fr.builder.CreateBitCast(arg, i8ptr, "") } thunkfntype := llvm.FunctionType(llvm.VoidType(), []llvm.Type{i8ptr}, false) thunkfn := llvm.AddFunction(fr.module.Module, "", thunkfntype) thunkfn.SetLinkage(llvm.InternalLinkage) fr.addCommonFunctionAttrs(thunkfn) thunkfr := newFrame(fr.unit, thunkfn) defer thunkfr.dispose() prologuebb := llvm.AddBasicBlock(thunkfn, "prologue") thunkfr.builder.SetInsertPointAtEnd(prologuebb) if isRecoverCall { thunkarg := thunkfn.Param(0) thunkarg = thunkfr.builder.CreatePtrToInt(thunkarg, fr.target.IntPtrType(), "") thunkfr.canRecover = thunkfr.builder.CreateTrunc(thunkarg, llvm.Int1Type(), "") } else if len(args) > 0 { thunkarg := thunkfn.Param(0) thunkarg = thunkfr.builder.CreateBitCast(thunkarg, structllptr, "") for i, ssaarg := range args { thunkargptr := thunkfr.builder.CreateStructGEP(thunkarg, i, "") thunkarg := thunkfr.builder.CreateLoad(thunkargptr, "") thunkfr.env[ssaarg] = newValue(thunkarg, ssaarg.Type()) } } _, isDefer := call.(*ssa.Defer) entrybb := llvm.AddBasicBlock(thunkfn, "entry") br := thunkfr.builder.CreateBr(entrybb) thunkfr.allocaBuilder.SetInsertPointBefore(br) thunkfr.builder.SetInsertPointAtEnd(entrybb) var exitbb llvm.BasicBlock if isDefer { exitbb = llvm.AddBasicBlock(thunkfn, "exit") thunkfr.runtime.setDeferRetaddr.call(thunkfr, llvm.BlockAddress(thunkfn, exitbb)) } if isDefer && isRecoverCall { thunkfr.callRecover(true) } else { thunkfr.callInstruction(call) } if isDefer { thunkfr.builder.CreateBr(exitbb) thunkfr.builder.SetInsertPointAtEnd(exitbb) } thunkfr.builder.CreateRetVoid() thunk = fr.builder.CreateBitCast(thunkfn, i8ptr, "") return }
// addInvokeEdge is called for each new pair (site, C) in the matrix. func (r *rta) addInvokeEdge(site ssa.CallInstruction, C types.Type) { // Ascertain the concrete method of C to be called. imethod := site.Common().Method cmethod := r.prog.Method(r.prog.MethodSets.MethodSet(C).Lookup(imethod.Pkg(), imethod.Name())) r.addEdge(site, cmethod, true) }