// CheckNames exists because of this comment in the SSA code documentation: // "Many objects are nonetheless named to aid in debugging, but it is not essential that the names be either accurate or unambiguous. " func CheckNames(f *ssa.Function) error { names := make(map[string]*ssa.Instruction) //fmt.Println("DEBUG Check Names for ", f.String()) for blk := range f.Blocks { for ins := range f.Blocks[blk].Instrs { instrVal, hasVal := f.Blocks[blk].Instrs[ins].(ssa.Value) if hasVal { register := instrVal.Name() //fmt.Println("DEBUG name ", register) val, found := names[register] if found { if val != &f.Blocks[blk].Instrs[ins] { return fmt.Errorf("internal error, ssa register names not unique in function %s var name %s", f.String(), register) } } else { names[register] = &f.Blocks[blk].Instrs[ins] } } } } return nil }
// findIntrinsic returns the constraint generation function for an // intrinsic function fn, or nil if the function should be handled normally. // func (a *analysis) findIntrinsic(fn *ssa.Function) intrinsic { // Consult the *Function-keyed cache. // A cached nil indicates a normal non-intrinsic function. impl, ok := a.intrinsics[fn] if !ok { impl = intrinsicsByName[fn.String()] // may be nil if a.isReflect(fn) { if !a.config.Reflection { impl = ext۰NoEffect // reflection disabled } else if impl == nil { // Ensure all "reflect" code is treated intrinsically. impl = ext۰NotYetImplemented } } else if impl == nil && fn.Pkg != nil && fn.Pkg.Pkg.Path() == "runtime" { // Ignore "runtime" (except SetFinalizer): // it has few interesting effects on aliasing // and is full of unsafe code we can't analyze. impl = ext۰NoEffect } a.intrinsics[fn] = impl } return impl }
func (fa *FairnessAnalysis) Visit(fn *ssa.Function) { visitedBlk := make(map[*ssa.BasicBlock]bool) fa.logger.Printf("Visiting: %s", fn.String()) for _, blk := range fn.Blocks { if _, visited := visitedBlk[blk]; !visited { visitedBlk[blk] = true fa.logger.Printf(" block %d %s", blk.Index, blk.Comment) // First consider blocks with loop initialisation blocks. if blk.Comment == "rangeindex.loop" { fa.total++ fa.logger.Println(color.GreenString("✓ range loops are fair")) } else if blk.Comment == "rangechan.loop" { fa.total++ hasClose := false for _, ch := range fa.info.FindChan(blk.Instrs[0].(*ssa.UnOp).X) { if ch.Type == ssabuilder.ChanClose { fa.logger.Println(color.GreenString("✓ found corresponding close() - channel range likely fair")) hasClose = true } } if !hasClose { fa.logger.Println(color.RedString("❌ range over channel w/o close() likely unfair (%s)", fa.info.FSet.Position(blk.Instrs[0].Pos()))) fa.unsafe++ } } else if blk.Comment == "for.loop" { fa.total++ if fa.isLikelyUnsafe(blk) { fa.logger.Println(color.RedString("❌ for.loop maybe bad")) fa.unsafe++ } else { fa.logger.Println(color.GreenString("✓ for.loop is ok")) } } else { // Normal blocks (or loops without initialisation blocks). if len(blk.Instrs) > 1 { if ifInst, ok := blk.Instrs[len(blk.Instrs)-1].(*ssa.If); ok { _, thenVisited := visitedBlk[ifInst.Block().Succs[0]] _, elseVisited := visitedBlk[ifInst.Block().Succs[1]] if thenVisited || elseVisited { // there is a loop! fa.total++ if !fa.isCondFair(ifInst.Cond) { fa.logger.Println(color.YellowString("Warning: recurring block condition probably unfair")) fa.unsafe++ } else { fa.logger.Println(color.GreenString("✓ recurring block is ok")) } } } else if jInst, ok := blk.Instrs[len(blk.Instrs)-1].(*ssa.Jump); ok { if _, visited := visitedBlk[jInst.Block().Succs[0]]; visited { fa.total++ fa.unsafe++ fa.logger.Println(color.RedString("❌ infinite loop or recurring block, probably bad (%s)", fa.info.FSet.Position(blk.Instrs[0].Pos()))) } } } } } } }
// prepareCallFn prepares a caller Function to visit performing necessary context switching and returns a new callee Function. // rcvr is non-nil if invoke call func (caller *Function) prepareCallFn(common *ssa.CallCommon, fn *ssa.Function, rcvr ssa.Value) *Function { callee := NewFunction(caller) callee.Fn = fn // This function was called before if _, ok := callee.Prog.FuncInstance[callee.Fn]; ok { callee.Prog.FuncInstance[callee.Fn]++ } else { callee.Prog.FuncInstance[callee.Fn] = 0 } callee.FuncDef.Name = fn.String() callee.id = callee.Prog.FuncInstance[callee.Fn] for i, param := range callee.Fn.Params { var argCaller ssa.Value if rcvr != nil { if i == 0 { argCaller = rcvr } else { argCaller = common.Args[i-1] } } else { argCaller = common.Args[i] } if _, ok := argCaller.Type().(*types.Chan); ok { callee.FuncDef.AddParams(&migo.Parameter{Caller: argCaller, Callee: param}) } if inst, ok := caller.locals[argCaller]; ok { callee.locals[param] = inst callee.revlookup[argCaller.Name()] = param.Name() // Copy array and struct from parent. if elems, ok := caller.arrays[inst]; ok { callee.arrays[inst] = elems } if fields, ok := caller.structs[inst]; ok { callee.structs[inst] = fields } if maps, ok := caller.maps[inst]; ok { callee.maps[inst] = maps } } else if c, ok := argCaller.(*ssa.Const); ok { callee.locals[param] = &Const{c} } } if inst, ok := caller.locals[common.Value]; ok { if cap, ok := caller.Prog.closures[inst]; ok { for i, fv := range callee.Fn.FreeVars { callee.locals[fv] = cap[i] if _, ok := derefType(fv.Type()).(*types.Chan); ok { callee.FuncDef.AddParams(&migo.Parameter{Caller: fv, Callee: fv}) } } } } return callee }
// callSSA interprets a call to function fn with arguments args, // and lexical environment env, returning its result. // callpos is the position of the callsite. // func callSSA(i *interpreter, caller *frame, callpos token.Pos, fn *ssa.Function, args []value, env []value) value { if i.mode&EnableTracing != 0 { fset := fn.Prog.Fset // TODO(adonovan): fix: loc() lies for external functions. fmt.Fprintf(os.Stderr, "Entering %s%s.\n", fn, loc(fset, fn.Pos())) suffix := "" if caller != nil { suffix = ", resuming " + caller.fn.String() + loc(fset, callpos) } defer fmt.Fprintf(os.Stderr, "Leaving %s%s.\n", fn, suffix) } fr := &frame{ i: i, caller: caller, // for panic/recover fn: fn, } if fn.Parent() == nil { name := fn.String() if ext := externals[name]; ext != nil { if i.mode&EnableTracing != 0 { fmt.Fprintln(os.Stderr, "\t(external)") } return ext(fr, args) } if fn.Blocks == nil { panic("no code for function: " + name) } } fr.env = make(map[ssa.Value]value) fr.block = fn.Blocks[0] fr.locals = make([]value, len(fn.Locals)) for i, l := range fn.Locals { fr.locals[i] = zero(deref(l.Type())) fr.env[l] = &fr.locals[i] } for i, p := range fn.Params { fr.env[p] = args[i] } for i, fv := range fn.FreeVars { fr.env[fv] = env[i] } for fr.block != nil { runFrame(fr) } // Destroy the locals to avoid accidental use after return. for i := range fn.Locals { fr.locals[i] = bad{} } return fr.result }
func checkFn(fn *ssa.Function, prog *ssa.Program, ptrResult *pointer.Result, roots []*ssa.Function) { nam := fn.String() if strings.HasPrefix(strings.TrimPrefix(nam, "("), *prefix) { hasPath := false usedExternally := false for _, r := range roots { if r != nil { nod, ok := ptrResult.CallGraph.Nodes[r] if ok { //fmt.Println("NODE root", r.Name()) pth := callgraph.PathSearch(nod, func(n *callgraph.Node) bool { if n == nil { return false } if n.Func == fn { for _, ine := range n.In { if ine.Caller.Func.Pkg != fn.Pkg { //fmt.Println("DEBUG diff? ", // ine.Caller.Func.Pkg, fn.Pkg) usedExternally = true break } } return true } return false }) if pth != nil { //fmt.Printf("DEBUG path from %v to %v = %v\n", // r, fn, pth) hasPath = true break } } } } isUpper := unicode.IsUpper(rune(fn.Name()[0])) pos := fn.Pos() //if strings.HasPrefix(nam, "(") && (!hasPath || (!usedExternally && isUpper)) { // fmt.Println("bad Pos", pos, prog.Fset.Position(pos).String()) //} loc := strings.TrimPrefix( prog.Fset.Position(pos).String(), gopath+"/src/"+*prefix+"/") showFuncResult(loc, nam, hasPath, usedExternally, isUpper) } wg.Done() }
// findIntrinsic returns the constraint generation function for an // intrinsic function fn, or nil if the function should be handled normally. // func (a *analysis) findIntrinsic(fn *ssa.Function) intrinsic { // Consult the *Function-keyed cache. // A cached nil indicates a normal non-intrinsic function. impl, ok := a.intrinsics[fn] if !ok { impl = intrinsicsByName[fn.String()] // may be nil if a.isReflect(fn) { if !a.config.Reflection { impl = ext۰NoEffect // reflection disabled } else if impl == nil { // Ensure all "reflect" code is treated intrinsically. impl = ext۰NotYetImplemented } } a.intrinsics[fn] = impl } return impl }
func (visit *visitor) function(fn *ssa.Function, isOvl isOverloaded) { if !visit.seen[fn] { // been, exists := visit.seen[fn]; !been || !exists { vprintln("DEBUG 1st visit to: ", fn.String()) visit.seen[fn] = true visit.usesGR[fn] = false if isOvl(fn) { vprintln("DEBUG overloaded: ", fn.String()) return } if len(fn.Blocks) == 0 { // exclude functions that reference C/assembler code // NOTE: not marked as seen, because we don't want to include in output // if used, the symbol will be included in the golibruntime replacement packages // TODO review vprintln("DEBUG no code for: ", fn.String()) return // external functions cannot use goroutines } var buf [10]*ssa.Value // avoid alloc in common case for _, b := range fn.Blocks { for _, instr := range b.Instrs { for _, op := range instr.Operands(buf[:0]) { areRecursing := false afn, isFn := (*op).(*ssa.Function) if isFn { if afn == fn { areRecursing = true } visit.function(afn, isOvl) if visit.usesGR[afn] { vprintln("marked as using GR because referenced func uses GR") visit.usesGR[fn] = true } vprintln(fn.Name(), " calls ", afn.Name()) } // TODO, review if this code should be included if !visit.usesGR[fn] { if _, ok := (*op).(ssa.Value); ok { typ := (*op).Type() typ = DeRefUl(typ) switch typ.(type) { // TODO use oracle techniques to determine which interfaces or functions may require GR case *types.Chan, *types.Interface: visit.usesGR[fn] = true // may be too conservative vprintln("marked as using GR because uses Chan/Interface") case *types.Signature: if !areRecursing { if !isFn { visit.usesGR[fn] = true vprintln("marked as using GR because uses Signature") } } } } } } if _, ok := instr.(*ssa.Call); ok { switch instr.(*ssa.Call).Call.Value.(type) { case *ssa.Builtin: //NoOp default: cc := instr.(*ssa.Call).Common() if cc != nil { afn := cc.StaticCallee() if afn != nil { visit.function(afn, isOvl) if visit.usesGR[afn] { visit.usesGR[fn] = true vprintln("marked as using GR because call target uses GR") } vprintln(fn.Name(), " calls ", afn.Name()) } } } } if !visit.usesGR[fn] { switch instr.(type) { case *ssa.Go, *ssa.MakeChan, *ssa.Defer, *ssa.Panic, *ssa.Send, *ssa.Select: vprintln("usesGR because uses Go...", fn.Name()) visit.usesGR[fn] = true case *ssa.UnOp: if instr.(*ssa.UnOp).Op.String() == "<-" { vprintln("usesGR because uses <-", fn.Name()) visit.usesGR[fn] = true } } } } } } }
func (u *unit) defineFunction(f *ssa.Function) { // Only define functions from this package, or synthetic // wrappers (which do not have a package). if f.Pkg != nil && f.Pkg != u.pkg { return } llfn := u.resolveFunctionGlobal(f) linkage := u.getFunctionLinkage(f) isMethod := f.Signature.Recv() != nil // Methods cannot be referred to via a descriptor. if !isMethod { llfd := u.resolveFunctionDescriptorGlobal(f) llfd.SetInitializer(llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0))) llfd.SetLinkage(linkage) } // We only need to emit a descriptor for functions without bodies. if len(f.Blocks) == 0 { return } ssaopt.LowerAllocsToStack(f) if u.DumpSSA { f.WriteTo(os.Stderr) } fr := newFrame(u, llfn) defer fr.dispose() fr.addCommonFunctionAttrs(fr.function) fr.function.SetLinkage(linkage) fr.logf("Define function: %s", f.String()) fti := u.llvmtypes.getSignatureInfo(f.Signature) delete(u.undefinedFuncs, f) fr.retInf = fti.retInf // Push the compile unit and function onto the debug context. if u.GenerateDebug { u.debug.PushFunction(fr.function, f.Signature, f.Pos()) defer u.debug.PopFunction() u.debug.SetLocation(fr.builder, f.Pos()) } // If a function calls recover, we create a separate function to // hold the real function, and this function calls __go_can_recover // and bridges to it. if callsRecover(f) { fr = fr.bridgeRecoverFunc(fr.function, fti) } fr.blocks = make([]llvm.BasicBlock, len(f.Blocks)) fr.lastBlocks = make([]llvm.BasicBlock, len(f.Blocks)) for i, block := range f.Blocks { fr.blocks[i] = llvm.AddBasicBlock(fr.function, fmt.Sprintf(".%d.%s", i, block.Comment)) } fr.builder.SetInsertPointAtEnd(fr.blocks[0]) prologueBlock := llvm.InsertBasicBlock(fr.blocks[0], "prologue") fr.builder.SetInsertPointAtEnd(prologueBlock) // Map parameter positions to indices. We use this // when processing locals to map back to parameters // when generating debug metadata. paramPos := make(map[token.Pos]int) for i, param := range f.Params { paramPos[param.Pos()] = i llparam := fti.argInfos[i].decode(llvm.GlobalContext(), fr.builder, fr.builder) if isMethod && i == 0 { if _, ok := param.Type().Underlying().(*types.Pointer); !ok { llparam = fr.builder.CreateBitCast(llparam, llvm.PointerType(fr.types.ToLLVM(param.Type()), 0), "") llparam = fr.builder.CreateLoad(llparam, "") } } fr.env[param] = newValue(llparam, param.Type()) } // Load closure, extract free vars. if len(f.FreeVars) > 0 { for _, fv := range f.FreeVars { fr.env[fv] = newValue(llvm.ConstNull(u.llvmtypes.ToLLVM(fv.Type())), fv.Type()) } elemTypes := make([]llvm.Type, len(f.FreeVars)+1) elemTypes[0] = llvm.PointerType(llvm.Int8Type(), 0) // function pointer for i, fv := range f.FreeVars { elemTypes[i+1] = u.llvmtypes.ToLLVM(fv.Type()) } structType := llvm.StructType(elemTypes, false) closure := fr.runtime.getClosure.call(fr)[0] closure = fr.builder.CreateBitCast(closure, llvm.PointerType(structType, 0), "") for i, fv := range f.FreeVars { ptr := fr.builder.CreateStructGEP(closure, i+1, "") ptr = fr.builder.CreateLoad(ptr, "") fr.env[fv] = newValue(ptr, fv.Type()) } } // Allocate stack space for locals in the prologue block. for _, local := range f.Locals { typ := fr.llvmtypes.ToLLVM(deref(local.Type())) alloca := fr.builder.CreateAlloca(typ, local.Comment) fr.memsetZero(alloca, llvm.SizeOf(typ)) bcalloca := fr.builder.CreateBitCast(alloca, llvm.PointerType(llvm.Int8Type(), 0), "") value := newValue(bcalloca, local.Type()) fr.env[local] = value if fr.GenerateDebug { paramIndex, ok := paramPos[local.Pos()] if !ok { paramIndex = -1 } fr.debug.Declare(fr.builder, local, alloca, paramIndex) } } // If this is the "init" function, enable init-specific optimizations. if !isMethod && f.Name() == "init" { fr.isInit = true } // If the function contains any defers, we must first create // an unwind block. We can short-circuit the check for defers with // f.Recover != nil. if f.Recover != nil || hasDefer(f) { fr.unwindBlock = llvm.AddBasicBlock(fr.function, "") fr.frameptr = fr.builder.CreateAlloca(llvm.Int8Type(), "") } term := fr.builder.CreateBr(fr.blocks[0]) fr.allocaBuilder.SetInsertPointBefore(term) for _, block := range f.DomPreorder() { fr.translateBlock(block, fr.blocks[block.Index]) } fr.fixupPhis() if !fr.unwindBlock.IsNil() { fr.setupUnwindBlock(f.Recover, f.Signature.Results()) } // The init function needs to register the GC roots first. We do this // after generating code for it because allocations may have caused // additional GC roots to be created. if fr.isInit { fr.builder.SetInsertPointBefore(prologueBlock.FirstInstruction()) fr.registerGcRoots() } }