func (l langType) set1usePtr(v ssa.Value, oup oneUsePtr) string { if l.hc.useRegisterArray { l.hc.map1usePtr[v] = oneUsePtr{obj: oup.obj, off: oup.off} return "" } nam := v.Name() newObj := "" newOff := "" ret := "" madeVarObj := false madeVarOff := false for _, eoup := range l.hc.map1usePtr { // TODO speed this up with another map or two if oup.obj == eoup.objOrig && eoup.varObj { newObj = eoup.obj } if oup.off == eoup.offOrig && eoup.varOff { newOff = eoup.off } } if newObj == "" { ret += "var " + nam + "obj=" + oup.obj + ";\n" newObj = nam + "obj" madeVarObj = true l.hc.tempVarList = append(l.hc.tempVarList, regToFree{nam + "obj", "Dynamic"}) } if newOff == "" { ret += "var " + nam + "off=" + oup.off + ";\n" newOff = nam + "off" madeVarOff = true } l.hc.map1usePtr[v] = oneUsePtr{newObj, newOff, oup.obj, oup.off, madeVarObj, madeVarOff} return ret }
// valueOffsetNode ascertains the node for tuple/struct value v, // then returns the node for its subfield #index. // func (a *analysis) valueOffsetNode(v ssa.Value, index int) nodeid { id := a.valueNode(v) if id == 0 { panic(fmt.Sprintf("cannot offset within n0: %s = %s", v.Name(), v)) } return id + nodeid(a.offsetOf(v.Type(), index)) }
func reg(reg ssa.Value) string { if reg == nil { return "???.nil" } if reg.Parent() != nil { return fmt.Sprintf("%s.\033[4m%s\033[0m", reg.Parent().String(), reg.Name()) } return fmt.Sprintf("???.\033[4m%s\033[0m", reg.Name()) }
// prepareCallFn prepares a caller Function to visit performing necessary context switching and returns a new callee Function. // rcvr is non-nil if invoke call func (caller *Function) prepareCallFn(common *ssa.CallCommon, fn *ssa.Function, rcvr ssa.Value) *Function { callee := NewFunction(caller) callee.Fn = fn // This function was called before if _, ok := callee.Prog.FuncInstance[callee.Fn]; ok { callee.Prog.FuncInstance[callee.Fn]++ } else { callee.Prog.FuncInstance[callee.Fn] = 0 } callee.FuncDef.Name = fn.String() callee.id = callee.Prog.FuncInstance[callee.Fn] for i, param := range callee.Fn.Params { var argCaller ssa.Value if rcvr != nil { if i == 0 { argCaller = rcvr } else { argCaller = common.Args[i-1] } } else { argCaller = common.Args[i] } if _, ok := argCaller.Type().(*types.Chan); ok { callee.FuncDef.AddParams(&migo.Parameter{Caller: argCaller, Callee: param}) } if inst, ok := caller.locals[argCaller]; ok { callee.locals[param] = inst callee.revlookup[argCaller.Name()] = param.Name() // Copy array and struct from parent. if elems, ok := caller.arrays[inst]; ok { callee.arrays[inst] = elems } if fields, ok := caller.structs[inst]; ok { callee.structs[inst] = fields } if maps, ok := caller.maps[inst]; ok { callee.maps[inst] = maps } } else if c, ok := argCaller.(*ssa.Const); ok { callee.locals[param] = &Const{c} } } if inst, ok := caller.locals[common.Value]; ok { if cap, ok := caller.Prog.closures[inst]; ok { for i, fv := range callee.Fn.FreeVars { callee.locals[fv] = cap[i] if _, ok := derefType(fv.Type()).(*types.Chan); ok { callee.FuncDef.AddParams(&migo.Parameter{Caller: fv, Callee: fv}) } } } } return callee }
func (f *Function) sizeof(val ssa.Value) uint { if _, ok := val.(*ssa.Const); ok { return f.sizeofConst(val.(*ssa.Const)) } info, ok := f.identifiers[val.Name()] if !ok { ice(fmt.Sprintf("unknown name (%v), value (%v)\n", val.Name(), val)) } _, _, size := info.Addr() return size }
func (f *Function) LoadValue(loc ssa.Instruction, val ssa.Value, offset uint, size uint) (string, *register, *Error) { ident := f.Ident(val) asm := fmt.Sprintf("// BEGIN LoadValue, val %v (= %v), offset %v, size %v\n", val.Name(), val, offset, size) a, reg, err := f.LoadIdent(loc, ident, offset, size) if err != nil { return "", nil, err } asm += a asm += fmt.Sprintf("// END LoadValue, val %v (= %v), offset %v, size %v\n", val.Name(), val, offset, size) return asm, reg, nil }
func (f *Function) Ident(v ssa.Value) *identifier { if ident, ok := f.identifiers[v.Name()]; ok { if ident == nil { ice("nil ident") } return ident } switch v := v.(type) { case *ssa.Const: ident := identifier{f: f, name: v.Name(), typ: v.Type(), local: nil, param: nil, cnst: v} ident.initStorage(true) f.identifiers[v.Name()] = &ident return &ident } local, err := f.newIdent(v) if err != nil { return nil } f.identifiers[v.Name()] = &local return &local }
func (fr *frame) get(key ssa.Value) value { switch key := key.(type) { case nil: // Hack; simplifies handling of optional attributes // such as ssa.Slice.{Low,High}. return nil case *ssa.Function, *ssa.Builtin: return key case *ssa.Const: return constValue(key) case *ssa.Global: if r, ok := fr.i.globals[key]; ok { return r } } if r, ok := fr.env[key]; ok { return r } panic(fmt.Sprintf("get: no value for %T: %v", key, key.Name())) }
func (f *Function) newIdent(v ssa.Value) (identifier, *Error) { name := v.Name() typ := v.Type() size := sizeof(typ) offset := int(size) if align(typ) > size { offset = int(align(typ)) } ident := identifier{ f: f, name: name, typ: typ, param: nil, local: nil, value: v, offset: -int(f.localIdentsSize()) - offset} ident.initStorage(false) f.identifiers[name] = &ident // zeroing the memory is done at the beginning of the function return ident, nil }
// setValueNode associates node id with the value v. // cgn identifies the context iff v is a local variable. // func (a *analysis) setValueNode(v ssa.Value, id nodeid, cgn *cgnode) { if cgn != nil { a.localval[v] = id } else { a.globalval[v] = id } if a.log != nil { fmt.Fprintf(a.log, "\tval[%s] = n%d (%T)\n", v.Name(), id, v) } // Due to context-sensitivity, we may encounter the same Value // in many contexts. We merge them to a canonical node, since // that's what all clients want. // Record the (v, id) relation if the client has queried pts(v). if _, ok := a.config.Queries[v]; ok { t := v.Type() ptr, ok := a.result.Queries[v] if !ok { // First time? Create the canonical query node. ptr = Pointer{a, a.addNodes(t, "query")} a.result.Queries[v] = ptr } a.result.Queries[v] = ptr a.copy(ptr.n, id, a.sizeof(t)) } // Record the (*v, id) relation if the client has queried pts(*v). if _, ok := a.config.IndirectQueries[v]; ok { t := v.Type() ptr, ok := a.result.IndirectQueries[v] if !ok { // First time? Create the canonical indirect query node. ptr = Pointer{a, a.addNodes(v.Type(), "query.indirect")} a.result.IndirectQueries[v] = ptr } a.genLoad(cgn, ptr.n, v, 0, a.sizeof(t)) } }
// objectNode returns the object to which v points, if known. // In other words, if the points-to set of v is a singleton, it // returns the sole label, zero otherwise. // // We exploit this information to make the generated constraints less // dynamic. For example, a complex load constraint can be replaced by // a simple copy constraint when the sole destination is known a priori. // // Some SSA instructions always have singletons points-to sets: // Alloc, Function, Global, MakeChan, MakeClosure, MakeInterface, MakeMap, MakeSlice. // Others may be singletons depending on their operands: // FreeVar, Const, Convert, FieldAddr, IndexAddr, Slice. // // Idempotent. Objects are created as needed, possibly via recursion // down the SSA value graph, e.g IndexAddr(FieldAddr(Alloc))). // func (a *analysis) objectNode(cgn *cgnode, v ssa.Value) nodeid { switch v.(type) { case *ssa.Global, *ssa.Function, *ssa.Const, *ssa.FreeVar: // Global object. obj, ok := a.globalobj[v] if !ok { switch v := v.(type) { case *ssa.Global: obj = a.nextNode() a.addNodes(mustDeref(v.Type()), "global") a.endObject(obj, nil, v) case *ssa.Function: obj = a.makeFunctionObject(v, nil) case *ssa.Const: // not addressable case *ssa.FreeVar: // not addressable } if a.log != nil { fmt.Fprintf(a.log, "\tglobalobj[%s] = n%d\n", v, obj) } a.globalobj[v] = obj } return obj } // Local object. obj, ok := a.localobj[v] if !ok { switch v := v.(type) { case *ssa.Alloc: obj = a.nextNode() a.addNodes(mustDeref(v.Type()), "alloc") a.endObject(obj, cgn, v) case *ssa.MakeSlice: obj = a.nextNode() a.addNodes(sliceToArray(v.Type()), "makeslice") a.endObject(obj, cgn, v) case *ssa.MakeChan: obj = a.nextNode() a.addNodes(v.Type().Underlying().(*types.Chan).Elem(), "makechan") a.endObject(obj, cgn, v) case *ssa.MakeMap: obj = a.nextNode() tmap := v.Type().Underlying().(*types.Map) a.addNodes(tmap.Key(), "makemap.key") elem := a.addNodes(tmap.Elem(), "makemap.value") // To update the value field, MapUpdate // generates store-with-offset constraints which // the presolver can't model, so we must mark // those nodes indirect. for id, end := elem, elem+nodeid(a.sizeof(tmap.Elem())); id < end; id++ { a.mapValues = append(a.mapValues, id) } a.endObject(obj, cgn, v) case *ssa.MakeInterface: tConc := v.X.Type() obj = a.makeTagged(tConc, cgn, v) // Copy the value into it, if nontrivial. if x := a.valueNode(v.X); x != 0 { a.copy(obj+1, x, a.sizeof(tConc)) } case *ssa.FieldAddr: if xobj := a.objectNode(cgn, v.X); xobj != 0 { obj = xobj + nodeid(a.offsetOf(mustDeref(v.X.Type()), v.Field)) } case *ssa.IndexAddr: if xobj := a.objectNode(cgn, v.X); xobj != 0 { obj = xobj + 1 } case *ssa.Slice: obj = a.objectNode(cgn, v.X) case *ssa.Convert: // TODO(adonovan): opt: handle these cases too: // - unsafe.Pointer->*T conversion acts like Alloc // - string->[]byte/[]rune conversion acts like MakeSlice } if a.log != nil { fmt.Fprintf(a.log, "\tlocalobj[%s] = n%d\n", v.Name(), obj) } a.localobj[v] = obj } return obj }
func (f *Function) StoreValAddr(loc ssa.Instruction, val ssa.Value, addr *identifier) (string, *Error) { if ident := f.Ident(val); ident == nil { ice("error in allocating local") } if addr.isConst() { ice(fmt.Sprintf("invalid addr \"%v\"", addr)) } asm := "" asm += fmt.Sprintf("// BEGIN StoreValAddr addr name:%v, val name:%v\n", addr.name, val.Name()) + asm if isComplex(val.Type()) { return ErrorMsg("complex32/64 unsupported") } else if isXmm(val.Type()) { a, valReg, err := f.LoadValue(loc, val, 0, f.sizeof(val)) if err != nil { return a, err } asm += a a, err = f.StoreValue(loc, addr, valReg) if err != nil { return a, err } asm += a f.freeReg(valReg) } else { size := f.sizeof(val) iterations := size datasize := 1 if size >= sizeBasic(types.Int64) { iterations = size / sizeBasic(types.Int64) datasize = 8 } else if size >= sizeBasic(types.Int32) { iterations = size / sizeBasic(types.Int32) datasize = 4 } else if size >= sizeBasic(types.Int16) { iterations = size / sizeBasic(types.Int16) datasize = 2 } if size > sizeInt() { if size%sizeInt() != 0 { ice(fmt.Sprintf("Size (%v) not multiple of sizeInt (%v)", size, sizeInt())) } } for i := 0; i < int(iterations); i++ { offset := uint(i * datasize) a, valReg, err := f.LoadValue(loc, val, offset, uint(datasize)) if err != nil { return a, err } asm += a a, err = f.AssignRegIdent(loc, valReg, addr, offset, uint(datasize)) if err != nil { return a, err } asm += a f.freeReg(valReg) } } asm += fmt.Sprintf("// END StoreValAddr addr name:%v, val name:%v\n", addr.name, val.Name()) return asm, nil }