// visitInstr interprets a single ssa.Instruction within the activation // record frame. It returns a continuation value indicating where to // read the next instruction from. func visitInstr(fr *frame, instr ssa.Instruction) continuation { switch instr := instr.(type) { case *ssa.DebugRef: // no-op case *ssa.UnOp: fr.env[instr] = unop(instr, fr.get(instr.X)) case *ssa.BinOp: fr.env[instr] = binop(instr.Op, instr.X.Type(), fr.get(instr.X), fr.get(instr.Y)) case *ssa.Call: fn, args := prepareCall(fr, &instr.Call) fr.env[instr] = call(fr.i, fr, instr.Pos(), fn, args) case *ssa.ChangeInterface: fr.env[instr] = fr.get(instr.X) case *ssa.ChangeType: fr.env[instr] = fr.get(instr.X) // (can't fail) case *ssa.Convert: fr.env[instr] = conv(instr.Type(), instr.X.Type(), fr.get(instr.X)) case *ssa.MakeInterface: fr.env[instr] = iface{t: instr.X.Type(), v: fr.get(instr.X)} case *ssa.Extract: fr.env[instr] = fr.get(instr.Tuple).(tuple)[instr.Index] case *ssa.Slice: fr.env[instr] = slice(fr.get(instr.X), fr.get(instr.Low), fr.get(instr.High), fr.get(instr.Max)) case *ssa.Return: switch len(instr.Results) { case 0: case 1: fr.result = fr.get(instr.Results[0]) default: var res []value for _, r := range instr.Results { res = append(res, fr.get(r)) } fr.result = tuple(res) } fr.block = nil return kReturn case *ssa.RunDefers: fr.runDefers() case *ssa.Panic: panic(targetPanic{fr.get(instr.X)}) case *ssa.Send: fr.get(instr.Chan).(chan value) <- fr.get(instr.X) case *ssa.Store: store(deref(instr.Addr.Type()), fr.get(instr.Addr).(*value), fr.get(instr.Val)) case *ssa.If: succ := 1 if fr.get(instr.Cond).(bool) { succ = 0 } fr.prevBlock, fr.block = fr.block, fr.block.Succs[succ] return kJump case *ssa.Jump: fr.prevBlock, fr.block = fr.block, fr.block.Succs[0] return kJump case *ssa.Defer: fn, args := prepareCall(fr, &instr.Call) fr.defers = &deferred{ fn: fn, args: args, instr: instr, tail: fr.defers, } case *ssa.Go: fn, args := prepareCall(fr, &instr.Call) go call(fr.i, nil, instr.Pos(), fn, args) case *ssa.MakeChan: fr.env[instr] = make(chan value, asInt(fr.get(instr.Size))) case *ssa.Alloc: var addr *value if instr.Heap { // new addr = new(value) fr.env[instr] = addr } else { // local addr = fr.env[instr].(*value) } *addr = zero(deref(instr.Type())) case *ssa.MakeSlice: slice := make([]value, asInt(fr.get(instr.Cap))) tElt := instr.Type().Underlying().(*types.Slice).Elem() for i := range slice { slice[i] = zero(tElt) } fr.env[instr] = slice[:asInt(fr.get(instr.Len))] case *ssa.MakeMap: reserve := 0 if instr.Reserve != nil { reserve = asInt(fr.get(instr.Reserve)) } fr.env[instr] = makeMap(instr.Type().Underlying().(*types.Map).Key(), reserve) case *ssa.Range: fr.env[instr] = rangeIter(fr.get(instr.X), instr.X.Type()) case *ssa.Next: fr.env[instr] = fr.get(instr.Iter).(iter).next() case *ssa.FieldAddr: x := fr.get(instr.X) // FIXME wrong! &global.f must not change if we do *global = zero! fr.env[instr] = &(*x.(*value)).(structure)[instr.Field] case *ssa.Field: fr.env[instr] = fr.get(instr.X).(structure)[instr.Field] case *ssa.IndexAddr: x := fr.get(instr.X) idx := fr.get(instr.Index) switch x := x.(type) { case []value: fr.env[instr] = &x[asInt(idx)] case *value: // *array fr.env[instr] = &(*x).(array)[asInt(idx)] default: panic(fmt.Sprintf("unexpected x type in IndexAddr: %T", x)) } case *ssa.Index: fr.env[instr] = fr.get(instr.X).(array)[asInt(fr.get(instr.Index))] case *ssa.Lookup: fr.env[instr] = lookup(instr, fr.get(instr.X), fr.get(instr.Index)) case *ssa.MapUpdate: m := fr.get(instr.Map) key := fr.get(instr.Key) v := fr.get(instr.Value) switch m := m.(type) { case map[value]value: m[key] = v case *hashmap: m.insert(key.(hashable), v) default: panic(fmt.Sprintf("illegal map type: %T", m)) } case *ssa.TypeAssert: fr.env[instr] = typeAssert(fr.i, instr, fr.get(instr.X).(iface)) case *ssa.MakeClosure: var bindings []value for _, binding := range instr.Bindings { bindings = append(bindings, fr.get(binding)) } fr.env[instr] = &closure{instr.Fn.(*ssa.Function), bindings} case *ssa.Phi: for i, pred := range instr.Block().Preds { if fr.prevBlock == pred { fr.env[instr] = fr.get(instr.Edges[i]) break } } case *ssa.Select: var cases []reflect.SelectCase if !instr.Blocking { cases = append(cases, reflect.SelectCase{ Dir: reflect.SelectDefault, }) } for _, state := range instr.States { var dir reflect.SelectDir if state.Dir == types.RecvOnly { dir = reflect.SelectRecv } else { dir = reflect.SelectSend } var send reflect.Value if state.Send != nil { send = reflect.ValueOf(fr.get(state.Send)) } cases = append(cases, reflect.SelectCase{ Dir: dir, Chan: reflect.ValueOf(fr.get(state.Chan)), Send: send, }) } chosen, recv, recvOk := reflect.Select(cases) if !instr.Blocking { chosen-- // default case should have index -1. } r := tuple{chosen, recvOk} for i, st := range instr.States { if st.Dir == types.RecvOnly { var v value if i == chosen && recvOk { // No need to copy since send makes an unaliased copy. v = recv.Interface().(value) } else { v = zero(st.Chan.Type().Underlying().(*types.Chan).Elem()) } r = append(r, v) } } fr.env[instr] = r default: panic(fmt.Sprintf("unexpected instruction: %T", instr)) } // if val, ok := instr.(ssa.Value); ok { // fmt.Println(toString(fr.env[val])) // debugging // } return kNext }
// genInstr generates constraints for instruction instr in context cgn. func (a *analysis) genInstr(cgn *cgnode, instr ssa.Instruction) { if a.log != nil { var prefix string if val, ok := instr.(ssa.Value); ok { prefix = val.Name() + " = " } fmt.Fprintf(a.log, "; %s%s\n", prefix, instr) } switch instr := instr.(type) { case *ssa.DebugRef: // no-op. case *ssa.UnOp: switch instr.Op { case token.ARROW: // <-x // We can ignore instr.CommaOk because the node we're // altering is always at zero offset relative to instr tElem := instr.X.Type().Underlying().(*types.Chan).Elem() a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(tElem)) case token.MUL: // *x a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(instr.Type())) default: // NOT, SUB, XOR: no-op. } case *ssa.BinOp: // All no-ops. case ssa.CallInstruction: // *ssa.Call, *ssa.Go, *ssa.Defer a.genCall(cgn, instr) case *ssa.ChangeType: a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) case *ssa.Convert: a.genConv(instr, cgn) case *ssa.Extract: a.copy(a.valueNode(instr), a.valueOffsetNode(instr.Tuple, instr.Index), a.sizeof(instr.Type())) case *ssa.FieldAddr: a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), a.offsetOf(mustDeref(instr.X.Type()), instr.Field)) case *ssa.IndexAddr: a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), 1) case *ssa.Field: a.copy(a.valueNode(instr), a.valueOffsetNode(instr.X, instr.Field), a.sizeof(instr.Type())) case *ssa.Index: a.copy(a.valueNode(instr), 1+a.valueNode(instr.X), a.sizeof(instr.Type())) case *ssa.Select: recv := a.valueOffsetNode(instr, 2) // instr : (index, recvOk, recv0, ... recv_n-1) for _, st := range instr.States { elemSize := a.sizeof(st.Chan.Type().Underlying().(*types.Chan).Elem()) switch st.Dir { case types.RecvOnly: a.genLoad(cgn, recv, st.Chan, 0, elemSize) recv += nodeid(elemSize) case types.SendOnly: a.genStore(cgn, st.Chan, a.valueNode(st.Send), 0, elemSize) } } case *ssa.Return: results := a.funcResults(cgn.obj) for _, r := range instr.Results { sz := a.sizeof(r.Type()) a.copy(results, a.valueNode(r), sz) results += nodeid(sz) } case *ssa.Send: a.genStore(cgn, instr.Chan, a.valueNode(instr.X), 0, a.sizeof(instr.X.Type())) case *ssa.Store: a.genStore(cgn, instr.Addr, a.valueNode(instr.Val), 0, a.sizeof(instr.Val.Type())) case *ssa.Alloc, *ssa.MakeSlice, *ssa.MakeChan, *ssa.MakeMap, *ssa.MakeInterface: v := instr.(ssa.Value) a.addressOf(v.Type(), a.valueNode(v), a.objectNode(cgn, v)) case *ssa.ChangeInterface: a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) case *ssa.TypeAssert: a.typeAssert(instr.AssertedType, a.valueNode(instr), a.valueNode(instr.X), true) case *ssa.Slice: a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) case *ssa.If, *ssa.Jump: // no-op. case *ssa.Phi: sz := a.sizeof(instr.Type()) for _, e := range instr.Edges { a.copy(a.valueNode(instr), a.valueNode(e), sz) } case *ssa.MakeClosure: fn := instr.Fn.(*ssa.Function) a.copy(a.valueNode(instr), a.valueNode(fn), 1) // Free variables are treated like global variables. for i, b := range instr.Bindings { a.copy(a.valueNode(fn.FreeVars[i]), a.valueNode(b), a.sizeof(b.Type())) } case *ssa.RunDefers: // The analysis is flow insensitive, so we just "call" // defers as we encounter them. case *ssa.Range: // Do nothing. Next{Iter: *ssa.Range} handles this case. case *ssa.Next: if !instr.IsString { // map // Assumes that Next is always directly applied to a Range result. theMap := instr.Iter.(*ssa.Range).X tMap := theMap.Type().Underlying().(*types.Map) ksize := a.sizeof(tMap.Key()) vsize := a.sizeof(tMap.Elem()) // Load from the map's (k,v) into the tuple's (ok, k, v). a.genLoad(cgn, a.valueNode(instr)+1, theMap, 0, ksize+vsize) } case *ssa.Lookup: if tMap, ok := instr.X.Type().Underlying().(*types.Map); ok { // CommaOk can be ignored: field 0 is a no-op. ksize := a.sizeof(tMap.Key()) vsize := a.sizeof(tMap.Elem()) a.genLoad(cgn, a.valueNode(instr), instr.X, ksize, vsize) } case *ssa.MapUpdate: tmap := instr.Map.Type().Underlying().(*types.Map) ksize := a.sizeof(tmap.Key()) vsize := a.sizeof(tmap.Elem()) a.genStore(cgn, instr.Map, a.valueNode(instr.Key), 0, ksize) a.genStore(cgn, instr.Map, a.valueNode(instr.Value), ksize, vsize) case *ssa.Panic: a.copy(a.panicNode, a.valueNode(instr.X), 1) default: panic(fmt.Sprintf("unimplemented: %T", instr)) } }
func (fr *frame) instruction(instr ssa.Instruction) { fr.logf("[%T] %v @ %s\n", instr, instr, fr.pkg.Prog.Fset.Position(instr.Pos())) if fr.GenerateDebug { fr.debug.SetLocation(fr.builder, instr.Pos()) } switch instr := instr.(type) { case *ssa.Alloc: typ := deref(instr.Type()) llvmtyp := fr.llvmtypes.ToLLVM(typ) var value llvm.Value if !instr.Heap { value = fr.env[instr].value fr.memsetZero(value, llvm.SizeOf(llvmtyp)) } else if fr.isInit && fr.shouldStaticallyAllocate(instr) { // If this is the init function and we think it may be beneficial, // allocate memory statically in the object file rather than on the // heap. This allows us to optimize constant stores into such // variables as static initializations. global := llvm.AddGlobal(fr.module.Module, llvmtyp, "") global.SetLinkage(llvm.InternalLinkage) fr.addGlobal(global, typ) ptr := llvm.ConstBitCast(global, llvm.PointerType(llvm.Int8Type(), 0)) fr.env[instr] = newValue(ptr, instr.Type()) } else { value = fr.createTypeMalloc(typ) value.SetName(instr.Comment) value = fr.builder.CreateBitCast(value, llvm.PointerType(llvm.Int8Type(), 0), "") fr.env[instr] = newValue(value, instr.Type()) } case *ssa.BinOp: lhs, rhs := fr.value(instr.X), fr.value(instr.Y) fr.env[instr] = fr.binaryOp(lhs, instr.Op, rhs) case *ssa.Call: tuple := fr.callInstruction(instr) if len(tuple) == 1 { fr.env[instr] = tuple[0] } else { fr.tuples[instr] = tuple } case *ssa.ChangeInterface: x := fr.value(instr.X) // The source type must be a non-empty interface, // as ChangeInterface cannot fail (E2I may fail). if instr.Type().Underlying().(*types.Interface).NumMethods() > 0 { x = fr.changeInterface(x, instr.Type(), false) } else { x = fr.convertI2E(x) } fr.env[instr] = x case *ssa.ChangeType: value := fr.llvmvalue(instr.X) if _, ok := instr.Type().Underlying().(*types.Pointer); ok { value = fr.builder.CreateBitCast(value, fr.llvmtypes.ToLLVM(instr.Type()), "") } fr.env[instr] = newValue(value, instr.Type()) case *ssa.Convert: v := fr.value(instr.X) fr.env[instr] = fr.convert(v, instr.Type()) case *ssa.Defer: fn, arg := fr.createThunk(instr) fr.runtime.Defer.call(fr, fr.frameptr, fn, arg) case *ssa.Extract: var elem llvm.Value if t, ok := fr.tuples[instr.Tuple]; ok { elem = t[instr.Index].value } else { tuple := fr.llvmvalue(instr.Tuple) elem = fr.builder.CreateExtractValue(tuple, instr.Index, instr.Name()) } elemtyp := instr.Type() fr.env[instr] = newValue(elem, elemtyp) case *ssa.Field: fieldtyp := instr.Type() if p, ok := fr.ptr[instr.X]; ok { field := fr.builder.CreateStructGEP(p, instr.Field, instr.Name()) if fr.canAvoidElementLoad(instr) { fr.ptr[instr] = field } else { fr.env[instr] = newValue(fr.builder.CreateLoad(field, ""), fieldtyp) } } else { value := fr.llvmvalue(instr.X) field := fr.builder.CreateExtractValue(value, instr.Field, instr.Name()) fr.env[instr] = newValue(field, fieldtyp) } case *ssa.FieldAddr: ptr := fr.llvmvalue(instr.X) fr.nilCheck(instr.X, ptr) xtyp := instr.X.Type().Underlying().(*types.Pointer).Elem() ptrtyp := llvm.PointerType(fr.llvmtypes.ToLLVM(xtyp), 0) ptr = fr.builder.CreateBitCast(ptr, ptrtyp, "") fieldptr := fr.builder.CreateStructGEP(ptr, instr.Field, instr.Name()) fieldptr = fr.builder.CreateBitCast(fieldptr, llvm.PointerType(llvm.Int8Type(), 0), "") fieldptrtyp := instr.Type() fr.env[instr] = newValue(fieldptr, fieldptrtyp) case *ssa.Go: fn, arg := fr.createThunk(instr) fr.runtime.Go.call(fr, fn, arg) case *ssa.If: cond := fr.llvmvalue(instr.Cond) block := instr.Block() trueBlock := fr.block(block.Succs[0]) falseBlock := fr.block(block.Succs[1]) cond = fr.builder.CreateTrunc(cond, llvm.Int1Type(), "") fr.builder.CreateCondBr(cond, trueBlock, falseBlock) case *ssa.Index: var arrayptr llvm.Value if ptr, ok := fr.ptr[instr.X]; ok { arrayptr = ptr } else { array := fr.llvmvalue(instr.X) arrayptr = fr.allocaBuilder.CreateAlloca(array.Type(), "") fr.builder.CreateStore(array, arrayptr) } index := fr.llvmvalue(instr.Index) arraytyp := instr.X.Type().Underlying().(*types.Array) arraylen := llvm.ConstInt(fr.llvmtypes.inttype, uint64(arraytyp.Len()), false) // The index may not have been promoted to int (for example, if it // came from a composite literal). index = fr.createZExtOrTrunc(index, fr.types.inttype, "") // Bounds checking: 0 <= index < len zero := llvm.ConstNull(fr.types.inttype) i0 := fr.builder.CreateICmp(llvm.IntSLT, index, zero, "") li := fr.builder.CreateICmp(llvm.IntSLE, arraylen, index, "") cond := fr.builder.CreateOr(i0, li, "") fr.condBrRuntimeError(cond, gccgoRuntimeErrorARRAY_INDEX_OUT_OF_BOUNDS) addr := fr.builder.CreateGEP(arrayptr, []llvm.Value{zero, index}, "") if fr.canAvoidElementLoad(instr) { fr.ptr[instr] = addr } else { fr.env[instr] = newValue(fr.builder.CreateLoad(addr, ""), instr.Type()) } case *ssa.IndexAddr: x := fr.llvmvalue(instr.X) index := fr.llvmvalue(instr.Index) var arrayptr, arraylen llvm.Value var elemtyp types.Type var errcode uint64 switch typ := instr.X.Type().Underlying().(type) { case *types.Slice: elemtyp = typ.Elem() arrayptr = fr.builder.CreateExtractValue(x, 0, "") arraylen = fr.builder.CreateExtractValue(x, 1, "") errcode = gccgoRuntimeErrorSLICE_INDEX_OUT_OF_BOUNDS case *types.Pointer: // *array arraytyp := typ.Elem().Underlying().(*types.Array) elemtyp = arraytyp.Elem() fr.nilCheck(instr.X, x) arrayptr = x arraylen = llvm.ConstInt(fr.llvmtypes.inttype, uint64(arraytyp.Len()), false) errcode = gccgoRuntimeErrorARRAY_INDEX_OUT_OF_BOUNDS } // The index may not have been promoted to int (for example, if it // came from a composite literal). index = fr.createZExtOrTrunc(index, fr.types.inttype, "") // Bounds checking: 0 <= index < len zero := llvm.ConstNull(fr.types.inttype) i0 := fr.builder.CreateICmp(llvm.IntSLT, index, zero, "") li := fr.builder.CreateICmp(llvm.IntSLE, arraylen, index, "") cond := fr.builder.CreateOr(i0, li, "") fr.condBrRuntimeError(cond, errcode) ptrtyp := llvm.PointerType(fr.llvmtypes.ToLLVM(elemtyp), 0) arrayptr = fr.builder.CreateBitCast(arrayptr, ptrtyp, "") addr := fr.builder.CreateGEP(arrayptr, []llvm.Value{index}, "") addr = fr.builder.CreateBitCast(addr, llvm.PointerType(llvm.Int8Type(), 0), "") fr.env[instr] = newValue(addr, types.NewPointer(elemtyp)) case *ssa.Jump: succ := instr.Block().Succs[0] fr.builder.CreateBr(fr.block(succ)) case *ssa.Lookup: x := fr.value(instr.X) index := fr.value(instr.Index) if isString(x.Type().Underlying()) { fr.env[instr] = fr.stringIndex(x, index) } else { v, ok := fr.mapLookup(x, index) if instr.CommaOk { fr.tuples[instr] = []*govalue{v, ok} } else { fr.env[instr] = v } } case *ssa.MakeChan: fr.env[instr] = fr.makeChan(instr.Type(), fr.value(instr.Size)) case *ssa.MakeClosure: llfn := fr.resolveFunctionGlobal(instr.Fn.(*ssa.Function)) llfn = llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0)) fn := newValue(llfn, instr.Fn.(*ssa.Function).Signature) bindings := make([]*govalue, len(instr.Bindings)) for i, binding := range instr.Bindings { bindings[i] = fr.value(binding) } fr.env[instr] = fr.makeClosure(fn, bindings) case *ssa.MakeInterface: // fr.ptr[instr.X] will be set if a pointer load was elided by canAvoidLoad if ptr, ok := fr.ptr[instr.X]; ok { fr.env[instr] = fr.makeInterfaceFromPointer(ptr, instr.X.Type(), instr.Type()) } else { receiver := fr.llvmvalue(instr.X) fr.env[instr] = fr.makeInterface(receiver, instr.X.Type(), instr.Type()) } case *ssa.MakeMap: fr.env[instr] = fr.makeMap(instr.Type(), fr.value(instr.Reserve)) case *ssa.MakeSlice: length := fr.value(instr.Len) capacity := fr.value(instr.Cap) fr.env[instr] = fr.makeSlice(instr.Type(), length, capacity) case *ssa.MapUpdate: m := fr.value(instr.Map) k := fr.value(instr.Key) v := fr.value(instr.Value) fr.mapUpdate(m, k, v) case *ssa.Next: iter := fr.tuples[instr.Iter] if instr.IsString { fr.tuples[instr] = fr.stringIterNext(iter) } else { fr.tuples[instr] = fr.mapIterNext(iter) } case *ssa.Panic: arg := fr.value(instr.X) fr.callPanic(arg, true) case *ssa.Phi: typ := instr.Type() phi := fr.builder.CreatePHI(fr.llvmtypes.ToLLVM(typ), instr.Comment) fr.env[instr] = newValue(phi, typ) fr.phis = append(fr.phis, pendingPhi{instr, phi}) case *ssa.Range: x := fr.value(instr.X) switch x.Type().Underlying().(type) { case *types.Map: fr.tuples[instr] = fr.mapIterInit(x) case *types.Basic: // string fr.tuples[instr] = fr.stringIterInit(x) default: panic(fmt.Sprintf("unhandled range for type %T", x.Type())) } case *ssa.Return: vals := make([]llvm.Value, len(instr.Results)) for i, res := range instr.Results { vals[i] = fr.llvmvalue(res) } fr.retInf.encode(llvm.GlobalContext(), fr.allocaBuilder, fr.builder, vals) case *ssa.RunDefers: fr.runDefers() case *ssa.Select: index, recvOk, recvElems := fr.chanSelect(instr) tuple := append([]*govalue{index, recvOk}, recvElems...) fr.tuples[instr] = tuple case *ssa.Send: fr.chanSend(fr.value(instr.Chan), fr.value(instr.X)) case *ssa.Slice: x := fr.llvmvalue(instr.X) low := fr.llvmvalue(instr.Low) high := fr.llvmvalue(instr.High) max := fr.llvmvalue(instr.Max) slice := fr.slice(x, instr.X.Type(), low, high, max) fr.env[instr] = newValue(slice, instr.Type()) case *ssa.Store: addr := fr.llvmvalue(instr.Addr) value := fr.llvmvalue(instr.Val) addr = fr.builder.CreateBitCast(addr, llvm.PointerType(value.Type(), 0), "") // If this is the init function, see if we can simulate the effect // of the store in a global's initializer, in which case we can avoid // generating code for it. if !fr.isInit || !fr.maybeStoreInInitializer(value, addr) { fr.nilCheck(instr.Addr, addr) fr.builder.CreateStore(value, addr) } case *switchInstr: fr.emitSwitch(instr) case *ssa.TypeAssert: x := fr.value(instr.X) if instr.CommaOk { v, ok := fr.interfaceTypeCheck(x, instr.AssertedType) fr.tuples[instr] = []*govalue{v, ok} } else { fr.env[instr] = fr.interfaceTypeAssert(x, instr.AssertedType) } case *ssa.UnOp: operand := fr.value(instr.X) switch instr.Op { case token.ARROW: x, ok := fr.chanRecv(operand, instr.CommaOk) if instr.CommaOk { fr.tuples[instr] = []*govalue{x, ok} } else { fr.env[instr] = x } case token.MUL: fr.nilCheck(instr.X, operand.value) if !fr.canAvoidLoad(instr, operand.value) { // The bitcast is necessary to handle recursive pointer loads. llptr := fr.builder.CreateBitCast(operand.value, llvm.PointerType(fr.llvmtypes.ToLLVM(instr.Type()), 0), "") fr.env[instr] = newValue(fr.builder.CreateLoad(llptr, ""), instr.Type()) } default: fr.env[instr] = fr.unaryOp(operand, instr.Op) } default: panic(fmt.Sprintf("unhandled: %v", instr)) } }