// valueOffsetNode ascertains the node for tuple/struct value v, // then returns the node for its subfield #index. // func (a *analysis) valueOffsetNode(v ssa.Value, index int) nodeid { id := a.valueNode(v) if id == 0 { panic(fmt.Sprintf("cannot offset within n0: %s = %s", v.Name(), v)) } return id + nodeid(a.offsetOf(v.Type(), index)) }
func (f *Function) Ident(v ssa.Value) *identifier { if ident, ok := f.identifiers[v.Name()]; ok { if ident == nil { ice("nil ident") } return ident } switch v := v.(type) { case *ssa.Const: ident := identifier{f: f, name: v.Name(), typ: v.Type(), local: nil, param: nil, cnst: v} ident.initStorage(true) f.identifiers[v.Name()] = &ident return &ident } local, err := f.newIdent(v) if err != nil { return nil } f.identifiers[v.Name()] = &local return &local }
func (fr *frame) value(v ssa.Value) (result *govalue) { switch v := v.(type) { case nil: return nil case *ssa.Function: return fr.resolveFunctionDescriptor(v) case *ssa.Const: return fr.newValueFromConst(v.Value, v.Type()) case *ssa.Global: if g, ok := fr.globals[v]; ok { return newValue(g, v.Type()) } // Create an external global. Globals for this package are defined // on entry to translatePackage, and have initialisers. llelemtyp := fr.llvmtypes.ToLLVM(deref(v.Type())) vname := fr.types.mc.mangleGlobalName(v) llglobal := llvm.AddGlobal(fr.module.Module, llelemtyp, vname) llglobal = llvm.ConstBitCast(llglobal, fr.llvmtypes.ToLLVM(v.Type())) fr.globals[v] = llglobal return newValue(llglobal, v.Type()) } if value, ok := fr.env[v]; ok { return value } panic("Instruction not visited yet") }
// AddQuery adds v to Config.IndirectQueries. // Precondition: CanPoint(v.Type().Underlying().(*types.Pointer).Elem()). func (c *Config) AddIndirectQuery(v ssa.Value) { if c.IndirectQueries == nil { c.IndirectQueries = make(map[ssa.Value]struct{}) } if !CanPoint(mustDeref(v.Type())) { panic(fmt.Sprintf("%s is not the address of a pointer-like value: %s", v, v.Type())) } c.IndirectQueries[v] = struct{}{} }
// AddQuery adds v to Config.Queries. // Precondition: CanPoint(v.Type()). // TODO(adonovan): consider returning a new Pointer for this query, // which will be initialized during analysis. That avoids the needs // for the corresponding ssa.Value-keyed maps in Config and Result. func (c *Config) AddQuery(v ssa.Value) { if !CanPoint(v.Type()) { panic(fmt.Sprintf("%s is not a pointer-like value: %s", v, v.Type())) } if c.Queries == nil { c.Queries = make(map[ssa.Value]struct{}) } c.Queries[v] = struct{}{} }
// genOffsetAddr generates constraints for a 'v=ptr.field' (FieldAddr) // or 'v=ptr[*]' (IndexAddr) instruction v. func (a *analysis) genOffsetAddr(cgn *cgnode, v ssa.Value, ptr nodeid, offset uint32) { dst := a.valueNode(v) if obj := a.objectNode(cgn, v); obj != 0 { // Pre-apply offsetAddrConstraint.solve(). a.addressOf(v.Type(), dst, obj) } else { a.offsetAddr(v.Type(), dst, ptr, offset) } }
// prepareCallFn prepares a caller Function to visit performing necessary context switching and returns a new callee Function. // rcvr is non-nil if invoke call func (caller *Function) prepareCallFn(common *ssa.CallCommon, fn *ssa.Function, rcvr ssa.Value) *Function { callee := NewFunction(caller) callee.Fn = fn // This function was called before if _, ok := callee.Prog.FuncInstance[callee.Fn]; ok { callee.Prog.FuncInstance[callee.Fn]++ } else { callee.Prog.FuncInstance[callee.Fn] = 0 } callee.FuncDef.Name = fn.String() callee.id = callee.Prog.FuncInstance[callee.Fn] for i, param := range callee.Fn.Params { var argCaller ssa.Value if rcvr != nil { if i == 0 { argCaller = rcvr } else { argCaller = common.Args[i-1] } } else { argCaller = common.Args[i] } if _, ok := argCaller.Type().(*types.Chan); ok { callee.FuncDef.AddParams(&migo.Parameter{Caller: argCaller, Callee: param}) } if inst, ok := caller.locals[argCaller]; ok { callee.locals[param] = inst callee.revlookup[argCaller.Name()] = param.Name() // Copy array and struct from parent. if elems, ok := caller.arrays[inst]; ok { callee.arrays[inst] = elems } if fields, ok := caller.structs[inst]; ok { callee.structs[inst] = fields } if maps, ok := caller.maps[inst]; ok { callee.maps[inst] = maps } } else if c, ok := argCaller.(*ssa.Const); ok { callee.locals[param] = &Const{c} } } if inst, ok := caller.locals[common.Value]; ok { if cap, ok := caller.Prog.closures[inst]; ok { for i, fv := range callee.Fn.FreeVars { callee.locals[fv] = cap[i] if _, ok := derefType(fv.Type()).(*types.Chan); ok { callee.FuncDef.AddParams(&migo.Parameter{Caller: fv, Callee: fv}) } } } } return callee }
// purgeChanOps removes channels that are of different type as queryOp, i.e. // channel we are looking for. func purgeChanOps(ops []ChanOp, ch ssa.Value) []ChanOp { i := 0 for _, op := range ops { if types.Identical(op.Value.Type().Underlying().(*types.Chan).Elem(), ch.Type().Underlying().(*types.Chan).Elem()) { ops[i] = op i++ } } ops = ops[:i] return ops }
// runPTA runs the pointer analysis of the selected SSA value or address. func runPTA(o *Oracle, v ssa.Value, isAddr bool) (ptrs []pointerResult, err error) { buildSSA(o) T := v.Type() if isAddr { o.ptaConfig.AddIndirectQuery(v) T = deref(T) } else { o.ptaConfig.AddQuery(v) } ptares := ptrAnalysis(o) var ptr pointer.Pointer if isAddr { ptr = ptares.IndirectQueries[v] } else { ptr = ptares.Queries[v] } if ptr == (pointer.Pointer{}) { return nil, fmt.Errorf("pointer analysis did not find expression (dead code?)") } pts := ptr.PointsTo() if pointer.CanHaveDynamicTypes(T) { // Show concrete types for interface/reflect.Value expression. if concs := pts.DynamicTypes(); concs.Len() > 0 { concs.Iterate(func(conc types.Type, pta interface{}) { labels := pta.(pointer.PointsToSet).Labels() sort.Sort(byPosAndString(labels)) // to ensure determinism ptrs = append(ptrs, pointerResult{conc, labels}) }) } } else { // Show labels for other expressions. labels := pts.Labels() sort.Sort(byPosAndString(labels)) // to ensure determinism ptrs = append(ptrs, pointerResult{T, labels}) } sort.Sort(byTypeString(ptrs)) // to ensure determinism return ptrs, nil }
func (f *Function) newIdent(v ssa.Value) (identifier, *Error) { name := v.Name() typ := v.Type() size := sizeof(typ) offset := int(size) if align(typ) > size { offset = int(align(typ)) } ident := identifier{ f: f, name: name, typ: typ, param: nil, local: nil, value: v, offset: -int(f.localIdentsSize()) - offset} ident.initStorage(false) f.identifiers[name] = &ident // zeroing the memory is done at the beginning of the function return ident, nil }
// valueNode returns the id of the value node for v, creating it (and // the association) as needed. It may return zero for uninteresting // values containing no pointers. // func (a *analysis) valueNode(v ssa.Value) nodeid { // Value nodes for locals are created en masse by genFunc. if id, ok := a.localval[v]; ok { return id } // Value nodes for globals are created on demand. id, ok := a.globalval[v] if !ok { var comment string if a.log != nil { comment = v.String() } id = a.addNodes(v.Type(), comment) if obj := a.objectNode(nil, v); obj != 0 { a.addressOf(v.Type(), id, obj) } a.setValueNode(v, id, nil) } return id }
// Declare creates an llvm.dbg.declare call for the specified function // parameter or local variable. func (d *DIBuilder) Declare(b llvm.Builder, v ssa.Value, llv llvm.Value, paramIndex int) { tag := tagAutoVariable if paramIndex >= 0 { tag = tagArgVariable } var diFile llvm.Value var line int if file := d.fset.File(v.Pos()); file != nil { line = file.Line(v.Pos()) diFile = d.getFile(file) } localVar := d.builder.CreateLocalVariable(d.scope(), llvm.DILocalVariable{ Tag: tag, Name: llv.Name(), File: diFile, Line: line, ArgNo: paramIndex + 1, Type: d.DIType(v.Type()), }) expr := d.builder.CreateExpression(nil) d.builder.InsertDeclareAtEnd(llv, localVar, expr, b.GetInsertBlock()) }
func (f *Function) SliceLen(loc ssa.Instruction, slice ssa.Value, ident *identifier) (string, *Error) { if _, ok := slice.Type().(*types.Slice); !ok { panic(ice(fmt.Sprintf("getting len of slice, type should slice not (%v)", slice.Type().String()))) } asm := fmt.Sprintf("// BEGIN SliceLen: slice (%v), ident (%v)\n", slice, ident.String()) a, reg, err := f.LoadValue(loc, slice, sliceLenOffset(), sliceLenSize()) asm += a if err != nil { return asm, err } a, err = f.StoreValue(loc, ident, reg) asm += a if err != nil { return asm, err } f.freeReg(reg) asm += fmt.Sprintf("// END SliceLen: slice (%v), ident (%v)\n", slice, *ident) return asm, nil }
// setValueNode associates node id with the value v. // cgn identifies the context iff v is a local variable. // func (a *analysis) setValueNode(v ssa.Value, id nodeid, cgn *cgnode) { if cgn != nil { a.localval[v] = id } else { a.globalval[v] = id } if a.log != nil { fmt.Fprintf(a.log, "\tval[%s] = n%d (%T)\n", v.Name(), id, v) } // Due to context-sensitivity, we may encounter the same Value // in many contexts. We merge them to a canonical node, since // that's what all clients want. // Record the (v, id) relation if the client has queried pts(v). if _, ok := a.config.Queries[v]; ok { t := v.Type() ptr, ok := a.result.Queries[v] if !ok { // First time? Create the canonical query node. ptr = Pointer{a, a.addNodes(t, "query")} a.result.Queries[v] = ptr } a.result.Queries[v] = ptr a.copy(ptr.n, id, a.sizeof(t)) } // Record the (*v, id) relation if the client has queried pts(*v). if _, ok := a.config.IndirectQueries[v]; ok { t := v.Type() ptr, ok := a.result.IndirectQueries[v] if !ok { // First time? Create the canonical indirect query node. ptr = Pointer{a, a.addNodes(v.Type(), "query.indirect")} a.result.IndirectQueries[v] = ptr } a.genLoad(cgn, ptr.n, v, 0, a.sizeof(t)) } }
// storeRetvals takes retval (SSA value from caller storing return value(s)) and // stores the return value of function (callee). func (caller *Function) storeRetvals(infer *TypeInfer, retval ssa.Value, callee *Function) { if !callee.HasBody() { switch callee.Fn.Signature.Results().Len() { case 0: // Nothing. case 1: // Creating external instance because return value may be used. caller.locals[retval] = &External{caller.Fn, retval.Type().Underlying(), caller.InstanceID()} infer.Logger.Print(caller.Sprintf(ExitSymbol + "external")) default: caller.locals[retval] = &External{caller.Fn, retval.Type().Underlying(), caller.InstanceID()} caller.tuples[caller.locals[retval]] = make(Tuples, callee.Fn.Signature.Results().Len()) infer.Logger.Print(caller.Sprintf(ExitSymbol+"external len=%d", callee.Fn.Signature.Results().Len())) } return } switch len(callee.retvals) { case 0: // Nothing. case 1: // XXX Pick the last return value from the exit paths // This assumes idiomatic Go for error path to return early // https://golang.org/doc/effective_go.html#if caller.locals[retval] = callee.retvals[len(callee.retvals)-1] if a, ok := callee.arrays[caller.locals[retval]]; ok { caller.arrays[caller.locals[retval]] = a } if s, ok := callee.structs[caller.locals[retval]]; ok { caller.structs[caller.locals[retval]] = s } if m, ok := callee.maps[caller.locals[retval]]; ok { caller.maps[caller.locals[retval]] = m } if a, ok := callee.Prog.arrays[caller.locals[retval]]; ok { caller.arrays[caller.locals[retval]] = a } if s, ok := callee.Prog.structs[caller.locals[retval]]; ok { caller.structs[caller.locals[retval]] = s } switch inst := caller.locals[retval].(type) { case *Value: infer.Logger.Print(caller.Sprintf(ExitSymbol+"[1] %s", inst)) return case *External: infer.Logger.Print(caller.Sprintf(ExitSymbol+"[1] (ext) %s", inst)) return case *Const: infer.Logger.Print(caller.Sprintf(ExitSymbol+"[1] constant %s", inst)) return default: infer.Logger.Fatalf("return[1]: %s: not an instance %+v", ErrUnknownValue, retval) } default: caller.locals[retval] = &Value{retval, caller.InstanceID(), int64(0)} if callee.Fn.Signature.Results().Len() == 1 { caller.locals[retval] = callee.retvals[len(callee.retvals)-1] if a, ok := callee.arrays[caller.locals[retval]]; ok { caller.arrays[caller.locals[retval]] = a } if s, ok := callee.structs[caller.locals[retval]]; ok { caller.structs[caller.locals[retval]] = s } if m, ok := callee.maps[caller.locals[retval]]; ok { caller.maps[caller.locals[retval]] = m } if a, ok := callee.Prog.arrays[caller.locals[retval]]; ok { caller.arrays[caller.locals[retval]] = a } if s, ok := callee.Prog.structs[caller.locals[retval]]; ok { caller.structs[caller.locals[retval]] = s } } else { caller.tuples[caller.locals[retval]] = make(Tuples, callee.Fn.Signature.Results().Len()) for i := range callee.retvals { tupleIdx := i % callee.Fn.Signature.Results().Len() if callee.retvals[i] != nil { caller.tuples[caller.locals[retval]][tupleIdx] = callee.retvals[i] } if a, ok := callee.arrays[callee.retvals[i]]; ok { caller.arrays[callee.retvals[i]] = a } if s, ok := callee.structs[callee.retvals[i]]; ok { caller.structs[callee.retvals[i]] = s } if m, ok := callee.maps[callee.retvals[i]]; ok { caller.maps[callee.retvals[i]] = m } if a, ok := callee.Prog.arrays[callee.retvals[i]]; ok { caller.arrays[callee.retvals[i]] = a } if s, ok := callee.Prog.structs[callee.retvals[i]]; ok { caller.structs[callee.retvals[i]] = s } } } // XXX Pick the return values from the last exit path // This assumes idiomatic Go for error path to return early // https://golang.org/doc/effective_go.html#if infer.Logger.Print(caller.Sprintf(ExitSymbol+"[%d/%d] %v", callee.Fn.Signature.Results().Len(), len(callee.retvals), caller.tuples[caller.locals[retval]])) } }
// objectNode returns the object to which v points, if known. // In other words, if the points-to set of v is a singleton, it // returns the sole label, zero otherwise. // // We exploit this information to make the generated constraints less // dynamic. For example, a complex load constraint can be replaced by // a simple copy constraint when the sole destination is known a priori. // // Some SSA instructions always have singletons points-to sets: // Alloc, Function, Global, MakeChan, MakeClosure, MakeInterface, MakeMap, MakeSlice. // Others may be singletons depending on their operands: // FreeVar, Const, Convert, FieldAddr, IndexAddr, Slice. // // Idempotent. Objects are created as needed, possibly via recursion // down the SSA value graph, e.g IndexAddr(FieldAddr(Alloc))). // func (a *analysis) objectNode(cgn *cgnode, v ssa.Value) nodeid { switch v.(type) { case *ssa.Global, *ssa.Function, *ssa.Const, *ssa.FreeVar: // Global object. obj, ok := a.globalobj[v] if !ok { switch v := v.(type) { case *ssa.Global: obj = a.nextNode() a.addNodes(mustDeref(v.Type()), "global") a.endObject(obj, nil, v) case *ssa.Function: obj = a.makeFunctionObject(v, nil) case *ssa.Const: // not addressable case *ssa.FreeVar: // not addressable } if a.log != nil { fmt.Fprintf(a.log, "\tglobalobj[%s] = n%d\n", v, obj) } a.globalobj[v] = obj } return obj } // Local object. obj, ok := a.localobj[v] if !ok { switch v := v.(type) { case *ssa.Alloc: obj = a.nextNode() a.addNodes(mustDeref(v.Type()), "alloc") a.endObject(obj, cgn, v) case *ssa.MakeSlice: obj = a.nextNode() a.addNodes(sliceToArray(v.Type()), "makeslice") a.endObject(obj, cgn, v) case *ssa.MakeChan: obj = a.nextNode() a.addNodes(v.Type().Underlying().(*types.Chan).Elem(), "makechan") a.endObject(obj, cgn, v) case *ssa.MakeMap: obj = a.nextNode() tmap := v.Type().Underlying().(*types.Map) a.addNodes(tmap.Key(), "makemap.key") elem := a.addNodes(tmap.Elem(), "makemap.value") // To update the value field, MapUpdate // generates store-with-offset constraints which // the presolver can't model, so we must mark // those nodes indirect. for id, end := elem, elem+nodeid(a.sizeof(tmap.Elem())); id < end; id++ { a.mapValues = append(a.mapValues, id) } a.endObject(obj, cgn, v) case *ssa.MakeInterface: tConc := v.X.Type() obj = a.makeTagged(tConc, cgn, v) // Copy the value into it, if nontrivial. if x := a.valueNode(v.X); x != 0 { a.copy(obj+1, x, a.sizeof(tConc)) } case *ssa.FieldAddr: if xobj := a.objectNode(cgn, v.X); xobj != 0 { obj = xobj + nodeid(a.offsetOf(mustDeref(v.X.Type()), v.Field)) } case *ssa.IndexAddr: if xobj := a.objectNode(cgn, v.X); xobj != 0 { obj = xobj + 1 } case *ssa.Slice: obj = a.objectNode(cgn, v.X) case *ssa.Convert: // TODO(adonovan): opt: handle these cases too: // - unsafe.Pointer->*T conversion acts like Alloc // - string->[]byte/[]rune conversion acts like MakeSlice } if a.log != nil { fmt.Fprintf(a.log, "\tlocalobj[%s] = n%d\n", v.Name(), obj) } a.localobj[v] = obj } return obj }
func (f *Function) StoreValAddr(loc ssa.Instruction, val ssa.Value, addr *identifier) (string, *Error) { if ident := f.Ident(val); ident == nil { ice("error in allocating local") } if addr.isConst() { ice(fmt.Sprintf("invalid addr \"%v\"", addr)) } asm := "" asm += fmt.Sprintf("// BEGIN StoreValAddr addr name:%v, val name:%v\n", addr.name, val.Name()) + asm if isComplex(val.Type()) { return ErrorMsg("complex32/64 unsupported") } else if isXmm(val.Type()) { a, valReg, err := f.LoadValue(loc, val, 0, f.sizeof(val)) if err != nil { return a, err } asm += a a, err = f.StoreValue(loc, addr, valReg) if err != nil { return a, err } asm += a f.freeReg(valReg) } else { size := f.sizeof(val) iterations := size datasize := 1 if size >= sizeBasic(types.Int64) { iterations = size / sizeBasic(types.Int64) datasize = 8 } else if size >= sizeBasic(types.Int32) { iterations = size / sizeBasic(types.Int32) datasize = 4 } else if size >= sizeBasic(types.Int16) { iterations = size / sizeBasic(types.Int16) datasize = 2 } if size > sizeInt() { if size%sizeInt() != 0 { ice(fmt.Sprintf("Size (%v) not multiple of sizeInt (%v)", size, sizeInt())) } } for i := 0; i < int(iterations); i++ { offset := uint(i * datasize) a, valReg, err := f.LoadValue(loc, val, offset, uint(datasize)) if err != nil { return a, err } asm += a a, err = f.AssignRegIdent(loc, valReg, addr, offset, uint(datasize)) if err != nil { return a, err } asm += a f.freeReg(valReg) } } asm += fmt.Sprintf("// END StoreValAddr addr name:%v, val name:%v\n", addr.name, val.Name()) return asm, nil }