// Analyze runs the pointer analysis with the scope and options // specified by config, and returns the (synthetic) root of the callgraph. // func Analyze(config *Config) *Result { a := &analysis{ config: config, log: config.Log, prog: config.prog(), globalval: make(map[ssa.Value]nodeid), globalobj: make(map[ssa.Value]nodeid), flattenMemo: make(map[types.Type][]*fieldInfo), hasher: typemap.MakeHasher(), intrinsics: make(map[*ssa.Function]intrinsic), work: makeMapWorklist(), result: &Result{ Queries: make(map[ssa.Value][]Pointer), IndirectQueries: make(map[ssa.Value][]Pointer), PrintCalls: make(map[*ssa.CallCommon]Pointer), }, } if false { a.log = os.Stderr // for debugging crashes; extremely verbose } if a.log != nil { fmt.Fprintln(a.log, "======== NEW ANALYSIS ========") } if reflect := a.prog.ImportedPackage("reflect"); reflect != nil { rV := reflect.Object.Scope().Lookup("Value") a.reflectValueObj = rV a.reflectValueCall = a.prog.Method(rV.Type().MethodSet().Lookup(nil, "Call")) a.reflectType = reflect.Object.Scope().Lookup("Type").Type().(*types.Named) a.reflectRtypeObj = reflect.Object.Scope().Lookup("rtype") a.reflectRtypePtr = types.NewPointer(a.reflectRtypeObj.Type()) // Override flattening of reflect.Value, treating it like a basic type. tReflectValue := a.reflectValueObj.Type() a.flattenMemo[tReflectValue] = []*fieldInfo{{typ: tReflectValue}} a.rtypes.SetHasher(a.hasher) a.reflectZeros.SetHasher(a.hasher) } if runtime := a.prog.ImportedPackage("runtime"); runtime != nil { a.runtimeSetFinalizer = runtime.Func("SetFinalizer") } root := a.generate() if a.log != nil { // Show size of constraint system. counts := make(map[reflect.Type]int) for _, c := range a.constraints { counts[reflect.TypeOf(c)]++ } fmt.Fprintf(a.log, "# constraints:\t%d\n", len(a.constraints)) for t, n := range counts { fmt.Fprintf(a.log, "\t%s:\t%d\n", t, n) } fmt.Fprintf(a.log, "# nodes:\t%d\n", len(a.nodes)) } //a.optimize() a.solve() if a.log != nil { // Dump solution. for i, n := range a.nodes { if n.pts != nil { fmt.Fprintf(a.log, "pts(n%d) = %s : %s\n", i, n.pts, n.typ) } } } // Add dynamic edges to call graph. for _, caller := range a.cgnodes { for _, site := range caller.sites { for callee := range a.nodes[site.targets].pts { a.callEdge(site, callee) } } } if a.config.BuildCallGraph { a.result.CallGraph = &cgraph{root, a.cgnodes} } return a.result }
// Hash functions and equivalence relation: // hashString computes the FNV hash of s. func hashString(s string) int { var h uint32 for i := 0; i < len(s); i++ { h ^= uint32(s[i]) h *= 16777619 } return int(h) } var ( mu sync.Mutex hasher = typemap.MakeHasher() ) // hashType returns a hash for t such that // types.IsIdentical(x, y) => hashType(x) == hashType(y). func hashType(t types.Type) int { mu.Lock() h := int(hasher.Hash(t)) mu.Unlock() return h } // usesBuiltinMap returns true if the built-in hash function and // equivalence relation for type t are consistent with those of the // interpreter's representation of type t. Such types are: all basic // types (bool, numbers, string), pointers and channels.
// Analyze runs the pointer analysis with the scope and options // specified by config, and returns the (synthetic) root of the callgraph. // func Analyze(config *Config) *Result { a := &analysis{ config: config, log: config.Log, prog: config.prog(), valNode: make(map[ssa.Value]nodeid), flattenMemo: make(map[types.Type][]*fieldInfo), hasher: typemap.MakeHasher(), intrinsics: make(map[*ssa.Function]intrinsic), funcObj: make(map[*ssa.Function]nodeid), probes: make(map[*ssa.CallCommon]nodeid), work: makeMapWorklist(), queries: make(map[ssa.Value][]Pointer), } if reflect := a.prog.ImportedPackage("reflect"); reflect != nil { a.reflectValueObj = reflect.Object.Scope().Lookup("Value") a.reflectType = reflect.Object.Scope().Lookup("Type").Type().(*types.Named) a.reflectRtypeObj = reflect.Object.Scope().Lookup("rtype") a.reflectRtypePtr = types.NewPointer(a.reflectRtypeObj.Type()) // Override flattening of reflect.Value, treating it like a basic type. tReflectValue := a.reflectValueObj.Type() a.flattenMemo[tReflectValue] = []*fieldInfo{{typ: tReflectValue}} a.rtypes.SetHasher(a.hasher) a.reflectZeros.SetHasher(a.hasher) } if false { a.log = os.Stderr // for debugging crashes; extremely verbose } if a.log != nil { fmt.Fprintln(a.log, "======== NEW ANALYSIS ========") } a.generate() //a.optimize() a.solve() if a.log != nil { // Dump solution. for i, n := range a.nodes { if n.pts != nil { fmt.Fprintf(a.log, "pts(n%d) = %s : %s\n", i, n.pts, n.typ) } } } // Visit discovered call graph. for _, caller := range a.cgnodes { for _, site := range caller.sites { for nid := range a.nodes[site.targets].pts { callee := a.nodes[nid].obj.cgn if a.config.BuildCallGraph { site.callees = append(site.callees, callee) } // TODO(adonovan): de-dup these messages. // Warn about calls to non-intrinsic external functions. if fn := callee.fn; fn.Blocks == nil && a.findIntrinsic(fn) == nil { a.warnf(site.pos(), "unsound call to unknown intrinsic: %s", fn) a.warnf(fn.Pos(), " (declared here)") } } } } var callgraph *cgraph if a.config.BuildCallGraph { callgraph = &cgraph{a.cgnodes} } return &Result{ CallGraph: callgraph, Queries: a.queries, } }