// chanOps returns a slice of all the channel operations in the instruction. // Derived from oracle/peers.go. func chanOps(instr ssa.Instruction) []chanOp { fn := instr.Parent() var ops []chanOp switch instr := instr.(type) { case *ssa.UnOp: if instr.Op == token.ARROW { // TODO(adonovan): don't assume <-ch; could be 'range ch'. ops = append(ops, chanOp{instr.X, "received", instr.Pos(), len("<-"), fn}) } case *ssa.Send: ops = append(ops, chanOp{instr.Chan, "sent", instr.Pos(), len("<-"), fn}) case *ssa.Select: for _, st := range instr.States { mode := "received" if st.Dir == types.SendOnly { mode = "sent" } ops = append(ops, chanOp{st.Chan, mode, st.Pos, len("<-"), fn}) } case ssa.CallInstruction: call := instr.Common() if blt, ok := call.Value.(*ssa.Builtin); ok && blt.Name() == "close" { pos := instr.Common().Pos() ops = append(ops, chanOp{call.Args[0], "closed", pos - token.Pos(len("close")), len("close("), fn}) } } return ops }
func operands(inst ssa.Instruction) []ssa.Value { ops := inst.Operands(nil) vs := make([]ssa.Value, len(ops)) for i, op := range ops { vs[i] = *op } return vs }
// chanOps returns a slice of all the channel operations in the instruction. func chanOps(instr ssa.Instruction) []chanOp { // TODO(adonovan): handle calls to reflect.{Select,Recv,Send,Close} too. var ops []chanOp switch instr := instr.(type) { case *ssa.UnOp: if instr.Op == token.ARROW { ops = append(ops, chanOp{instr.X, types.RecvOnly, instr.Pos()}) } case *ssa.Send: ops = append(ops, chanOp{instr.Chan, types.SendOnly, instr.Pos()}) case *ssa.Select: for _, st := range instr.States { ops = append(ops, chanOp{st.Chan, st.Dir, st.Pos}) } } return ops }
// chanOps returns a slice of all the channel operations in the instruction. func chanOps(instr ssa.Instruction) []chanOp { // TODO(adonovan): handle calls to reflect.{Select,Recv,Send,Close} too. var ops []chanOp switch instr := instr.(type) { case *ssa.UnOp: if instr.Op == token.ARROW { ops = append(ops, chanOp{instr.X, types.RecvOnly, instr.Pos()}) } case *ssa.Send: ops = append(ops, chanOp{instr.Chan, types.SendOnly, instr.Pos()}) case *ssa.Select: for _, st := range instr.States { ops = append(ops, chanOp{st.Chan, st.Dir, st.Pos}) } case ssa.CallInstruction: cc := instr.Common() if b, ok := cc.Value.(*ssa.Builtin); ok && b.Name() == "close" { ops = append(ops, chanOp{cc.Args[0], types.SendRecv, cc.Pos()}) } } return ops }
// visitInstr interprets a single ssa.Instruction within the activation // record frame. It returns a continuation value indicating where to // read the next instruction from. func visitInstr(fr *frame, instr ssa.Instruction) continuation { switch instr := instr.(type) { case *ssa.DebugRef: // no-op case *ssa.UnOp: fr.env[instr] = unop(instr, fr.get(instr.X)) case *ssa.BinOp: fr.env[instr] = binop(instr.Op, instr.X.Type(), fr.get(instr.X), fr.get(instr.Y)) case *ssa.Call: fn, args := prepareCall(fr, &instr.Call) fr.env[instr] = call(fr.i, fr, instr.Pos(), fn, args) case *ssa.ChangeInterface: fr.env[instr] = fr.get(instr.X) case *ssa.ChangeType: fr.env[instr] = fr.get(instr.X) // (can't fail) case *ssa.Convert: fr.env[instr] = conv(instr.Type(), instr.X.Type(), fr.get(instr.X)) case *ssa.MakeInterface: fr.env[instr] = iface{t: instr.X.Type(), v: fr.get(instr.X)} case *ssa.Extract: fr.env[instr] = fr.get(instr.Tuple).(tuple)[instr.Index] case *ssa.Slice: fr.env[instr] = slice(fr.get(instr.X), fr.get(instr.Low), fr.get(instr.High), fr.get(instr.Max)) case *ssa.Return: switch len(instr.Results) { case 0: case 1: fr.result = fr.get(instr.Results[0]) default: var res []value for _, r := range instr.Results { res = append(res, fr.get(r)) } fr.result = tuple(res) } fr.block = nil return kReturn case *ssa.RunDefers: fr.runDefers() case *ssa.Panic: panic(targetPanic{fr.get(instr.X)}) case *ssa.Send: fr.get(instr.Chan).(chan value) <- copyVal(fr.get(instr.X)) case *ssa.Store: *fr.get(instr.Addr).(*value) = copyVal(fr.get(instr.Val)) case *ssa.If: succ := 1 if fr.get(instr.Cond).(bool) { succ = 0 } fr.prevBlock, fr.block = fr.block, fr.block.Succs[succ] return kJump case *ssa.Jump: fr.prevBlock, fr.block = fr.block, fr.block.Succs[0] return kJump case *ssa.Defer: fn, args := prepareCall(fr, &instr.Call) fr.defers = &deferred{ fn: fn, args: args, instr: instr, tail: fr.defers, } case *ssa.Go: fn, args := prepareCall(fr, &instr.Call) go call(fr.i, nil, instr.Pos(), fn, args) case *ssa.MakeChan: fr.env[instr] = make(chan value, asInt(fr.get(instr.Size))) case *ssa.Alloc: var addr *value if instr.Heap { // new addr = new(value) fr.env[instr] = addr } else { // local addr = fr.env[instr].(*value) } *addr = zero(deref(instr.Type())) case *ssa.MakeSlice: slice := make([]value, asInt(fr.get(instr.Cap))) tElt := instr.Type().Underlying().(*types.Slice).Elem() for i := range slice { slice[i] = zero(tElt) } fr.env[instr] = slice[:asInt(fr.get(instr.Len))] case *ssa.MakeMap: reserve := 0 if instr.Reserve != nil { reserve = asInt(fr.get(instr.Reserve)) } fr.env[instr] = makeMap(instr.Type().Underlying().(*types.Map).Key(), reserve) case *ssa.Range: fr.env[instr] = rangeIter(fr.get(instr.X), instr.X.Type()) case *ssa.Next: fr.env[instr] = fr.get(instr.Iter).(iter).next() case *ssa.FieldAddr: x := fr.get(instr.X) fr.env[instr] = &(*x.(*value)).(structure)[instr.Field] case *ssa.Field: fr.env[instr] = copyVal(fr.get(instr.X).(structure)[instr.Field]) case *ssa.IndexAddr: x := fr.get(instr.X) idx := fr.get(instr.Index) switch x := x.(type) { case []value: fr.env[instr] = &x[asInt(idx)] case *value: // *array fr.env[instr] = &(*x).(array)[asInt(idx)] default: panic(fmt.Sprintf("unexpected x type in IndexAddr: %T", x)) } case *ssa.Index: fr.env[instr] = copyVal(fr.get(instr.X).(array)[asInt(fr.get(instr.Index))]) case *ssa.Lookup: fr.env[instr] = lookup(instr, fr.get(instr.X), fr.get(instr.Index)) case *ssa.MapUpdate: m := fr.get(instr.Map) key := fr.get(instr.Key) v := fr.get(instr.Value) switch m := m.(type) { case map[value]value: m[key] = v case *hashmap: m.insert(key.(hashable), v) default: panic(fmt.Sprintf("illegal map type: %T", m)) } case *ssa.TypeAssert: fr.env[instr] = typeAssert(fr.i, instr, fr.get(instr.X).(iface)) case *ssa.MakeClosure: var bindings []value for _, binding := range instr.Bindings { bindings = append(bindings, fr.get(binding)) } fr.env[instr] = &closure{instr.Fn.(*ssa.Function), bindings} case *ssa.Phi: for i, pred := range instr.Block().Preds { if fr.prevBlock == pred { fr.env[instr] = fr.get(instr.Edges[i]) break } } case *ssa.Select: var cases []reflect.SelectCase if !instr.Blocking { cases = append(cases, reflect.SelectCase{ Dir: reflect.SelectDefault, }) } for _, state := range instr.States { var dir reflect.SelectDir if state.Dir == types.RecvOnly { dir = reflect.SelectRecv } else { dir = reflect.SelectSend } var send reflect.Value if state.Send != nil { send = reflect.ValueOf(fr.get(state.Send)) } cases = append(cases, reflect.SelectCase{ Dir: dir, Chan: reflect.ValueOf(fr.get(state.Chan)), Send: send, }) } chosen, recv, recvOk := reflect.Select(cases) if !instr.Blocking { chosen-- // default case should have index -1. } r := tuple{chosen, recvOk} for i, st := range instr.States { if st.Dir == types.RecvOnly { var v value if i == chosen && recvOk { // No need to copy since send makes an unaliased copy. v = recv.Interface().(value) } else { v = zero(st.Chan.Type().Underlying().(*types.Chan).Elem()) } r = append(r, v) } } fr.env[instr] = r default: panic(fmt.Sprintf("unexpected instruction: %T", instr)) } // if val, ok := instr.(ssa.Value); ok { // fmt.Println(toString(fr.env[val])) // debugging // } return kNext }
func (fr *frame) instruction(instr ssa.Instruction) { fr.logf("[%T] %v @ %s\n", instr, instr, fr.pkg.Prog.Fset.Position(instr.Pos())) if fr.GenerateDebug { fr.debug.setLocation(fr.builder, instr.Pos()) } // Check if we'll need to backpatch; see comment // in fr.value(). if v, ok := instr.(ssa.Value); ok { if b := fr.backpatcher(v); b != nil { defer b() } } switch instr := instr.(type) { case *ssa.Alloc: typ := fr.llvmtypes.ToLLVM(deref(instr.Type())) var value llvm.Value if instr.Heap { value = fr.createTypeMalloc(typ) value.SetName(instr.Comment) fr.env[instr] = fr.NewValue(value, instr.Type()) } else { value = fr.env[instr].LLVMValue() } fr.memsetZero(value, llvm.SizeOf(typ)) case *ssa.BinOp: lhs, rhs := fr.value(instr.X), fr.value(instr.Y) fr.env[instr] = lhs.BinaryOp(instr.Op, rhs).(*LLVMValue) case *ssa.Call: fn, args, result := fr.prepareCall(instr) // Some builtins may only be used immediately, and not // deferred; in this case, "fn" will be nil, and result // may be non-nil (it will be nil for builtins without // results.) if fn == nil { if result != nil { fr.env[instr] = result } } else { result = fr.createCall(fn, args) fr.env[instr] = result } case *ssa.ChangeInterface: x := fr.value(instr.X) // The source type must be a non-empty interface, // as ChangeInterface cannot fail (E2I may fail). if instr.Type().Underlying().(*types.Interface).NumMethods() > 0 { // TODO(axw) optimisation for I2I case where we // know statically the methods to carry over. x = x.convertI2E() x, _ = x.convertE2I(instr.Type()) } else { x = x.convertI2E() x = fr.NewValue(x.LLVMValue(), instr.Type()) } fr.env[instr] = x case *ssa.ChangeType: value := fr.value(instr.X).LLVMValue() if _, ok := instr.Type().Underlying().(*types.Pointer); ok { value = fr.builder.CreateBitCast(value, fr.llvmtypes.ToLLVM(instr.Type()), "") } v := fr.NewValue(value, instr.Type()) if _, ok := instr.X.(*ssa.Phi); ok { v = phiValue(fr.compiler, v) } fr.env[instr] = v case *ssa.Convert: v := fr.value(instr.X) if _, ok := instr.X.(*ssa.Phi); ok { v = phiValue(fr.compiler, v) } fr.env[instr] = v.Convert(instr.Type()).(*LLVMValue) //case *ssa.DebugRef: case *ssa.Defer: fn, args, result := fr.prepareCall(instr) if result != nil { panic("illegal use of builtin in defer statement") } fn = fr.indirectFunction(fn, args) fr.createCall(fr.runtime.pushdefer, []*LLVMValue{fn}) case *ssa.Extract: tuple := fr.value(instr.Tuple).LLVMValue() elem := fr.builder.CreateExtractValue(tuple, instr.Index, instr.Name()) elemtyp := instr.Type() fr.env[instr] = fr.NewValue(elem, elemtyp) case *ssa.Field: value := fr.value(instr.X).LLVMValue() field := fr.builder.CreateExtractValue(value, instr.Field, instr.Name()) fieldtyp := instr.Type() fr.env[instr] = fr.NewValue(field, fieldtyp) case *ssa.FieldAddr: // TODO: implement nil check and panic. // TODO: combine a chain of {Field,Index}Addrs into a single GEP. ptr := fr.value(instr.X).LLVMValue() fieldptr := fr.builder.CreateStructGEP(ptr, instr.Field, instr.Name()) fieldptrtyp := instr.Type() fr.env[instr] = fr.NewValue(fieldptr, fieldptrtyp) case *ssa.Go: fn, args, result := fr.prepareCall(instr) if result != nil { panic("illegal use of builtin in go statement") } fn = fr.indirectFunction(fn, args) fr.createCall(fr.runtime.Go, []*LLVMValue{fn}) case *ssa.If: cond := fr.value(instr.Cond).LLVMValue() block := instr.Block() trueBlock := fr.block(block.Succs[0]) falseBlock := fr.block(block.Succs[1]) fr.builder.CreateCondBr(cond, trueBlock, falseBlock) case *ssa.Index: // FIXME Surely we should be dealing with an // *array, so we can do a GEP? array := fr.value(instr.X).LLVMValue() arrayptr := fr.builder.CreateAlloca(array.Type(), "") fr.builder.CreateStore(array, arrayptr) index := fr.value(instr.Index).LLVMValue() zero := llvm.ConstNull(index.Type()) addr := fr.builder.CreateGEP(arrayptr, []llvm.Value{zero, index}, "") fr.env[instr] = fr.NewValue(fr.builder.CreateLoad(addr, ""), instr.Type()) case *ssa.IndexAddr: // TODO: implement nil-check and panic. // TODO: combine a chain of {Field,Index}Addrs into a single GEP. x := fr.value(instr.X).LLVMValue() index := fr.value(instr.Index).LLVMValue() var addr llvm.Value var elemtyp types.Type zero := llvm.ConstNull(index.Type()) switch typ := instr.X.Type().Underlying().(type) { case *types.Slice: elemtyp = typ.Elem() x = fr.builder.CreateExtractValue(x, 0, "") addr = fr.builder.CreateGEP(x, []llvm.Value{index}, "") case *types.Pointer: // *array elemtyp = typ.Elem().Underlying().(*types.Array).Elem() addr = fr.builder.CreateGEP(x, []llvm.Value{zero, index}, "") } fr.env[instr] = fr.NewValue(addr, types.NewPointer(elemtyp)) case *ssa.Jump: succ := instr.Block().Succs[0] fr.builder.CreateBr(fr.block(succ)) case *ssa.Lookup: x := fr.value(instr.X) index := fr.value(instr.Index) if isString(x.Type().Underlying()) { fr.env[instr] = fr.stringIndex(x, index) } else { fr.env[instr] = fr.mapLookup(x, index, instr.CommaOk) } case *ssa.MakeChan: fr.env[instr] = fr.makeChan(instr.Type(), fr.value(instr.Size)) case *ssa.MakeClosure: fn := fr.resolveFunction(instr.Fn.(*ssa.Function)) bindings := make([]*LLVMValue, len(instr.Bindings)) for i, binding := range instr.Bindings { bindings[i] = fr.value(binding) } fr.env[instr] = fr.makeClosure(fn, bindings) case *ssa.MakeInterface: receiver := fr.value(instr.X) fr.env[instr] = fr.makeInterface(receiver, instr.Type()) case *ssa.MakeMap: fr.env[instr] = fr.makeMap(instr.Type(), fr.value(instr.Reserve)) case *ssa.MakeSlice: length := fr.value(instr.Len) capacity := fr.value(instr.Cap) fr.env[instr] = fr.makeSlice(instr.Type(), length, capacity) case *ssa.MapUpdate: m := fr.value(instr.Map) k := fr.value(instr.Key) v := fr.value(instr.Value) fr.mapUpdate(m, k, v) case *ssa.Next: iter := fr.value(instr.Iter) if !instr.IsString { fr.env[instr] = fr.mapIterNext(iter) return } // String range // // We make some assumptions for now around the // current state of affairs in go.tools/ssa. // // - Range's block is a predecessor of Next's. // (this is currently true, but may change in the future; // adonovan says he will expose the dominator tree // computation in the future, which we can use here). // - Next is the first non-Phi instruction in its block. // (this is not strictly necessary; we can move the Phi // to the top of the block, and defer the tuple creation // to Extract). assert(instr.Iter.(*ssa.Range).Block() == instr.Block().Preds[0]) for _, blockInstr := range instr.Block().Instrs { if instr == blockInstr { break } _, isphi := blockInstr.(*ssa.Phi) assert(isphi) } preds := instr.Block().Preds llpreds := make([]llvm.BasicBlock, len(preds)) for i, b := range preds { llpreds[i] = fr.block(b) } fr.env[instr] = fr.stringIterNext(iter, llpreds) case *ssa.Panic: arg := fr.value(instr.X).LLVMValue() fr.builder.CreateCall(fr.runtime.panic_.LLVMValue(), []llvm.Value{arg}, "") fr.builder.CreateUnreachable() case *ssa.Phi: typ := instr.Type() phi := fr.builder.CreatePHI(fr.llvmtypes.ToLLVM(typ), instr.Comment) fr.env[instr] = fr.NewValue(phi, typ) values := make([]llvm.Value, len(instr.Edges)) blocks := make([]llvm.BasicBlock, len(instr.Edges)) block := instr.Block() for i, edge := range instr.Edges { values[i] = fr.value(edge).LLVMValue() blocks[i] = fr.block(block.Preds[i]) } phi.AddIncoming(values, blocks) case *ssa.Range: x := fr.value(instr.X) switch x.Type().Underlying().(type) { case *types.Map: fr.env[instr] = fr.mapIterInit(x) case *types.Basic: // string fr.env[instr] = x default: panic(fmt.Sprintf("unhandled range for type %T", x.Type())) } case *ssa.Return: switch n := len(instr.Results); n { case 0: // https://code.google.com/p/go/issues/detail?id=7022 if r := instr.Parent().Signature.Results(); r != nil && r.Len() > 0 { fr.builder.CreateUnreachable() } else { fr.builder.CreateRetVoid() } case 1: fr.builder.CreateRet(fr.value(instr.Results[0]).LLVMValue()) default: values := make([]llvm.Value, n) for i, result := range instr.Results { values[i] = fr.value(result).LLVMValue() } fr.builder.CreateAggregateRet(values) } case *ssa.RunDefers: fr.builder.CreateCall(fr.runtime.rundefers.LLVMValue(), nil, "") case *ssa.Select: states := make([]selectState, len(instr.States)) for i, state := range instr.States { states[i] = selectState{ Dir: state.Dir, Chan: fr.value(state.Chan), Send: fr.value(state.Send), } } fr.env[instr] = fr.chanSelect(states, instr.Blocking) case *ssa.Send: fr.chanSend(fr.value(instr.Chan), fr.value(instr.X)) case *ssa.Slice: x := fr.value(instr.X) low := fr.value(instr.Low) high := fr.value(instr.High) fr.env[instr] = fr.slice(x, low, high) case *ssa.Store: addr := fr.value(instr.Addr).LLVMValue() value := fr.value(instr.Val).LLVMValue() // The bitcast is necessary to handle recursive pointer stores. addr = fr.builder.CreateBitCast(addr, llvm.PointerType(value.Type(), 0), "") fr.builder.CreateStore(value, addr) case *ssa.TypeAssert: x := fr.value(instr.X) if iface, ok := x.Type().Underlying().(*types.Interface); ok && iface.NumMethods() > 0 { x = x.convertI2E() } if !instr.CommaOk { if _, ok := instr.AssertedType.Underlying().(*types.Interface); ok { fr.env[instr] = x.mustConvertE2I(instr.AssertedType) } else { fr.env[instr] = x.mustConvertE2V(instr.AssertedType) } } else { var result, success *LLVMValue if _, ok := instr.AssertedType.Underlying().(*types.Interface); ok { result, success = x.convertE2I(instr.AssertedType) } else { result, success = x.convertE2V(instr.AssertedType) } resultval := result.LLVMValue() okval := success.LLVMValue() pairtyp := llvm.StructType([]llvm.Type{resultval.Type(), okval.Type()}, false) pair := llvm.Undef(pairtyp) pair = fr.builder.CreateInsertValue(pair, resultval, 0, "") pair = fr.builder.CreateInsertValue(pair, okval, 1, "") fr.env[instr] = fr.NewValue(pair, instr.Type()) } case *ssa.UnOp: operand := fr.value(instr.X) switch instr.Op { case token.ARROW: fr.env[instr] = fr.chanRecv(operand, instr.CommaOk) case token.MUL: // The bitcast is necessary to handle recursive pointer loads. llptr := fr.builder.CreateBitCast(operand.LLVMValue(), llvm.PointerType(fr.llvmtypes.ToLLVM(instr.Type()), 0), "") fr.env[instr] = fr.NewValue(fr.builder.CreateLoad(llptr, ""), instr.Type()) default: fr.env[instr] = operand.UnaryOp(instr.Op).(*LLVMValue) } default: panic(fmt.Sprintf("unhandled: %v", instr)) } }
// genInstr generates constraints for instruction instr in context cgn. func (a *analysis) genInstr(cgn *cgnode, instr ssa.Instruction) { if a.log != nil { var prefix string if val, ok := instr.(ssa.Value); ok { prefix = val.Name() + " = " } fmt.Fprintf(a.log, "; %s%s\n", prefix, instr) } switch instr := instr.(type) { case *ssa.DebugRef: // no-op. case *ssa.UnOp: switch instr.Op { case token.ARROW: // <-x // We can ignore instr.CommaOk because the node we're // altering is always at zero offset relative to instr tElem := instr.X.Type().Underlying().(*types.Chan).Elem() a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(tElem)) case token.MUL: // *x a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(instr.Type())) default: // NOT, SUB, XOR: no-op. } case *ssa.BinOp: // All no-ops. case ssa.CallInstruction: // *ssa.Call, *ssa.Go, *ssa.Defer a.genCall(cgn, instr) case *ssa.ChangeType: a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) case *ssa.Convert: a.genConv(instr, cgn) case *ssa.Extract: a.copy(a.valueNode(instr), a.valueOffsetNode(instr.Tuple, instr.Index), a.sizeof(instr.Type())) case *ssa.FieldAddr: a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), a.offsetOf(mustDeref(instr.X.Type()), instr.Field)) case *ssa.IndexAddr: a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), 1) case *ssa.Field: a.copy(a.valueNode(instr), a.valueOffsetNode(instr.X, instr.Field), a.sizeof(instr.Type())) case *ssa.Index: a.copy(a.valueNode(instr), 1+a.valueNode(instr.X), a.sizeof(instr.Type())) case *ssa.Select: recv := a.valueOffsetNode(instr, 2) // instr : (index, recvOk, recv0, ... recv_n-1) for _, st := range instr.States { elemSize := a.sizeof(st.Chan.Type().Underlying().(*types.Chan).Elem()) switch st.Dir { case types.RecvOnly: a.genLoad(cgn, recv, st.Chan, 0, elemSize) recv += nodeid(elemSize) case types.SendOnly: a.genStore(cgn, st.Chan, a.valueNode(st.Send), 0, elemSize) } } case *ssa.Return: results := a.funcResults(cgn.obj) for _, r := range instr.Results { sz := a.sizeof(r.Type()) a.copy(results, a.valueNode(r), sz) results += nodeid(sz) } case *ssa.Send: a.genStore(cgn, instr.Chan, a.valueNode(instr.X), 0, a.sizeof(instr.X.Type())) case *ssa.Store: a.genStore(cgn, instr.Addr, a.valueNode(instr.Val), 0, a.sizeof(instr.Val.Type())) case *ssa.Alloc, *ssa.MakeSlice, *ssa.MakeChan, *ssa.MakeMap, *ssa.MakeInterface: v := instr.(ssa.Value) a.addressOf(v.Type(), a.valueNode(v), a.objectNode(cgn, v)) case *ssa.ChangeInterface: a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) case *ssa.TypeAssert: a.typeAssert(instr.AssertedType, a.valueNode(instr), a.valueNode(instr.X), true) case *ssa.Slice: a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) case *ssa.If, *ssa.Jump: // no-op. case *ssa.Phi: sz := a.sizeof(instr.Type()) for _, e := range instr.Edges { a.copy(a.valueNode(instr), a.valueNode(e), sz) } case *ssa.MakeClosure: fn := instr.Fn.(*ssa.Function) a.copy(a.valueNode(instr), a.valueNode(fn), 1) // Free variables are treated like global variables. for i, b := range instr.Bindings { a.copy(a.valueNode(fn.FreeVars[i]), a.valueNode(b), a.sizeof(b.Type())) } case *ssa.RunDefers: // The analysis is flow insensitive, so we just "call" // defers as we encounter them. case *ssa.Range: // Do nothing. Next{Iter: *ssa.Range} handles this case. case *ssa.Next: if !instr.IsString { // map // Assumes that Next is always directly applied to a Range result. theMap := instr.Iter.(*ssa.Range).X tMap := theMap.Type().Underlying().(*types.Map) ksize := a.sizeof(tMap.Key()) vsize := a.sizeof(tMap.Elem()) // Load from the map's (k,v) into the tuple's (ok, k, v). a.genLoad(cgn, a.valueNode(instr)+1, theMap, 0, ksize+vsize) } case *ssa.Lookup: if tMap, ok := instr.X.Type().Underlying().(*types.Map); ok { // CommaOk can be ignored: field 0 is a no-op. ksize := a.sizeof(tMap.Key()) vsize := a.sizeof(tMap.Elem()) a.genLoad(cgn, a.valueNode(instr), instr.X, ksize, vsize) } case *ssa.MapUpdate: tmap := instr.Map.Type().Underlying().(*types.Map) ksize := a.sizeof(tmap.Key()) vsize := a.sizeof(tmap.Elem()) a.genStore(cgn, instr.Map, a.valueNode(instr.Key), 0, ksize) a.genStore(cgn, instr.Map, a.valueNode(instr.Value), ksize, vsize) case *ssa.Panic: a.copy(a.panicNode, a.valueNode(instr.X), 1) default: panic(fmt.Sprintf("unimplemented: %T", instr)) } }