Exemple #1
0
// Switches examines the control-flow graph of fn and returns the
// set of inferred value and type switches.  A value switch tests an
// ssa.Value for equality against two or more compile-time constant
// values.  Switches involving link-time constants (addresses) are
// ignored.  A type switch type-asserts an ssa.Value against two or
// more types.
//
// The switches are returned in dominance order.
//
// The resulting switches do not necessarily correspond to uses of the
// 'switch' keyword in the source: for example, a single source-level
// switch statement with non-constant cases may result in zero, one or
// many Switches, one per plural sequence of constant cases.
// Switches may even be inferred from if/else- or goto-based control flow.
// (In general, the control flow constructs of the source program
// cannot be faithfully reproduced from the SSA representation.)
//
func Switches(fn *ssa.Function) []Switch {
	// Traverse the CFG in dominance order, so we don't
	// enter an if/else-chain in the middle.
	var switches []Switch
	seen := make(map[*ssa.BasicBlock]bool) // TODO(adonovan): opt: use ssa.blockSet
	for _, b := range fn.DomPreorder() {
		if x, k := isComparisonBlock(b); x != nil {
			// Block b starts a switch.
			sw := Switch{Start: b, X: x}
			valueSwitch(&sw, k, seen)
			if len(sw.ConstCases) > 1 {
				switches = append(switches, sw)
			}
		}

		if y, x, T := isTypeAssertBlock(b); y != nil {
			// Block b starts a type switch.
			sw := Switch{Start: b, X: x}
			typeSwitch(&sw, y, T, seen)
			if len(sw.TypeCases) > 1 {
				switches = append(switches, sw)
			}
		}
	}
	return switches
}
Exemple #2
0
// findIntrinsic returns the constraint generation function for an
// intrinsic function fn, or nil if the function should be handled normally.
//
func (a *analysis) findIntrinsic(fn *ssa.Function) intrinsic {
	// Consult the *Function-keyed cache.
	// A cached nil indicates a normal non-intrinsic function.
	impl, ok := a.intrinsics[fn]
	if !ok {
		impl = intrinsicsByName[fn.String()] // may be nil

		if a.isReflect(fn) {
			if !a.config.Reflection {
				impl = ext۰NoEffect // reflection disabled
			} else if impl == nil {
				// Ensure all "reflect" code is treated intrinsically.
				impl = ext۰NotYetImplemented
			}
		}

		a.intrinsics[fn] = impl
	}
	return impl
}
Exemple #3
0
func LowerAllocsToStack(f *ssa.Function) {
	pending := make([]ssa.Value, 0, 10)

	for _, b := range f.Blocks {
		for _, instr := range b.Instrs {
			if alloc, ok := instr.(*ssa.Alloc); ok && alloc.Heap && !escapes(alloc, alloc.Block(), pending) {
				alloc.Heap = false
				f.Locals = append(f.Locals, alloc)
			}
		}
	}
}
Exemple #4
0
func (u *unit) getFunctionLinkage(f *ssa.Function) llvm.Linkage {
	switch {
	case f.Pkg == nil:
		// Synthetic functions outside packages may appear in multiple packages.
		return llvm.LinkOnceODRLinkage

	case f.Parent() != nil:
		// Anonymous.
		return llvm.InternalLinkage

	case f.Signature.Recv() == nil && !ast.IsExported(f.Name()) &&
		!(f.Name() == "main" && f.Pkg.Object.Path() == "main") &&
		f.Name() != "init":
		// Unexported methods may be referenced as part of an interface method
		// table in another package. TODO(pcc): detect when this cannot happen.
		return llvm.InternalLinkage

	default:
		return llvm.ExternalLinkage
	}
}
Exemple #5
0
// callSSA interprets a call to function fn with arguments args,
// and lexical environment env, returning its result.
// callpos is the position of the callsite.
//
func callSSA(i *interpreter, caller *frame, callpos token.Pos, fn *ssa.Function, args []value, env []value) value {
	if i.mode&EnableTracing != 0 {
		fset := fn.Prog.Fset
		// TODO(adonovan): fix: loc() lies for external functions.
		fmt.Fprintf(os.Stderr, "Entering %s%s.\n", fn, loc(fset, fn.Pos()))
		suffix := ""
		if caller != nil {
			suffix = ", resuming " + caller.fn.String() + loc(fset, callpos)
		}
		defer fmt.Fprintf(os.Stderr, "Leaving %s%s.\n", fn, suffix)
	}
	fr := &frame{
		i:      i,
		caller: caller, // for panic/recover
		fn:     fn,
	}
	if fn.Parent() == nil {
		name := fn.String()
		if ext := externals[name]; ext != nil {
			if i.mode&EnableTracing != 0 {
				fmt.Fprintln(os.Stderr, "\t(external)")
			}
			return ext(fr, args)
		}
		if fn.Blocks == nil {
			panic("no code for function: " + name)
		}
	}
	fr.env = make(map[ssa.Value]value)
	fr.block = fn.Blocks[0]
	fr.locals = make([]value, len(fn.Locals))
	for i, l := range fn.Locals {
		fr.locals[i] = zero(deref(l.Type()))
		fr.env[l] = &fr.locals[i]
	}
	for i, p := range fn.Params {
		fr.env[p] = args[i]
	}
	for i, fv := range fn.FreeVars {
		fr.env[fv] = env[i]
	}
	for fr.block != nil {
		runFrame(fr)
	}
	// Destroy the locals to avoid accidental use after return.
	for i := range fn.Locals {
		fr.locals[i] = bad{}
	}
	return fr.result
}
Exemple #6
0
func (u *unit) defineFunction(f *ssa.Function) {
	// Only define functions from this package, or synthetic
	// wrappers (which do not have a package).
	if f.Pkg != nil && f.Pkg != u.pkg {
		return
	}

	llfn := u.resolveFunctionGlobal(f)
	linkage := u.getFunctionLinkage(f)

	isMethod := f.Signature.Recv() != nil

	// Methods cannot be referred to via a descriptor.
	if !isMethod {
		llfd := u.resolveFunctionDescriptorGlobal(f)
		llfd.SetInitializer(llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0)))
		llfd.SetLinkage(linkage)
	}

	// We only need to emit a descriptor for functions without bodies.
	if len(f.Blocks) == 0 {
		return
	}

	ssaopt.LowerAllocsToStack(f)

	if u.DumpSSA {
		f.WriteTo(os.Stderr)
	}

	fr := newFrame(u, llfn)
	defer fr.dispose()
	fr.addCommonFunctionAttrs(fr.function)
	fr.function.SetLinkage(linkage)

	fr.logf("Define function: %s", f.String())
	fti := u.llvmtypes.getSignatureInfo(f.Signature)
	delete(u.undefinedFuncs, f)
	fr.retInf = fti.retInf

	// Push the compile unit and function onto the debug context.
	if u.GenerateDebug {
		u.debug.PushFunction(fr.function, f.Signature, f.Pos())
		defer u.debug.PopFunction()
		u.debug.SetLocation(fr.builder, f.Pos())
	}

	// If a function calls recover, we create a separate function to
	// hold the real function, and this function calls __go_can_recover
	// and bridges to it.
	if callsRecover(f) {
		fr = fr.bridgeRecoverFunc(fr.function, fti)
	}

	fr.blocks = make([]llvm.BasicBlock, len(f.Blocks))
	fr.lastBlocks = make([]llvm.BasicBlock, len(f.Blocks))
	for i, block := range f.Blocks {
		fr.blocks[i] = llvm.AddBasicBlock(fr.function, fmt.Sprintf(".%d.%s", i, block.Comment))
	}
	fr.builder.SetInsertPointAtEnd(fr.blocks[0])
	fr.transformSwitches(f)

	prologueBlock := llvm.InsertBasicBlock(fr.blocks[0], "prologue")
	fr.builder.SetInsertPointAtEnd(prologueBlock)

	for i, param := range f.Params {
		llparam := fti.argInfos[i].decode(llvm.GlobalContext(), fr.builder, fr.builder)
		if isMethod && i == 0 {
			if _, ok := param.Type().Underlying().(*types.Pointer); !ok {
				llparam = fr.builder.CreateBitCast(llparam, llvm.PointerType(fr.types.ToLLVM(param.Type()), 0), "")
				llparam = fr.builder.CreateLoad(llparam, "")
			}
		}
		fr.env[param] = newValue(llparam, param.Type())
	}

	// Load closure, extract free vars.
	if len(f.FreeVars) > 0 {
		for _, fv := range f.FreeVars {
			fr.env[fv] = newValue(llvm.ConstNull(u.llvmtypes.ToLLVM(fv.Type())), fv.Type())
		}
		elemTypes := make([]llvm.Type, len(f.FreeVars)+1)
		elemTypes[0] = llvm.PointerType(llvm.Int8Type(), 0) // function pointer
		for i, fv := range f.FreeVars {
			elemTypes[i+1] = u.llvmtypes.ToLLVM(fv.Type())
		}
		structType := llvm.StructType(elemTypes, false)
		closure := fr.function.Param(fti.chainIndex)
		closure = fr.builder.CreateBitCast(closure, llvm.PointerType(structType, 0), "")
		for i, fv := range f.FreeVars {
			ptr := fr.builder.CreateStructGEP(closure, i+1, "")
			ptr = fr.builder.CreateLoad(ptr, "")
			fr.env[fv] = newValue(ptr, fv.Type())
		}
	}

	// Allocate stack space for locals in the prologue block.
	for _, local := range f.Locals {
		typ := fr.llvmtypes.ToLLVM(deref(local.Type()))
		alloca := fr.builder.CreateAlloca(typ, local.Comment)
		fr.memsetZero(alloca, llvm.SizeOf(typ))
		bcalloca := fr.builder.CreateBitCast(alloca, llvm.PointerType(llvm.Int8Type(), 0), "")
		value := newValue(bcalloca, local.Type())
		fr.env[local] = value
	}

	// If the function contains any defers, we must first create
	// an unwind block. We can short-circuit the check for defers with
	// f.Recover != nil.
	if f.Recover != nil || hasDefer(f) {
		fr.unwindBlock = llvm.AddBasicBlock(fr.function, "")
		fr.frameptr = fr.builder.CreateAlloca(llvm.Int8Type(), "")
	}

	// Keep track of the block into which we need to insert the call
	// to __go_register_gc_roots. This needs to be inserted after the
	// init guard check under the llgo ABI.
	var registerGcBlock llvm.BasicBlock

	// If this is the "init" function, emit the init guard check and
	// enable init-specific optimizations.
	if !isMethod && f.Name() == "init" {
		registerGcBlock = fr.emitInitPrologue()
		fr.isInit = true
	}

	fr.builder.CreateBr(fr.blocks[0])
	fr.allocaBuilder.SetInsertPointBefore(prologueBlock.FirstInstruction())

	for _, block := range f.DomPreorder() {
		llblock := fr.blocks[block.Index]
		if llblock.IsNil() {
			continue
		}
		fr.translateBlock(block, llblock)
	}

	fr.fixupPhis()

	if !fr.unwindBlock.IsNil() {
		fr.setupUnwindBlock(f.Recover, f.Signature.Results())
	}

	// The init function needs to register the GC roots first. We do this
	// after generating code for it because allocations may have caused
	// additional GC roots to be created.
	if fr.isInit {
		fr.builder.SetInsertPointBefore(registerGcBlock.FirstInstruction())
		fr.registerGcRoots()
	}
}