// typeOKForCgoCall returns true if the type of arg is OK to pass to a // C function using cgo. This is not true for Go types with embedded // pointers. func typeOKForCgoCall(t types.Type) bool { if t == nil { return true } switch t := t.Underlying().(type) { case *types.Chan, *types.Map, *types.Signature, *types.Slice: return false case *types.Pointer: return typeOKForCgoCall(t.Elem()) case *types.Array: return typeOKForCgoCall(t.Elem()) case *types.Struct: for i := 0; i < t.NumFields(); i++ { if !typeOKForCgoCall(t.Field(i).Type()) { return false } } } return true }
func equalType(x, y types.Type) error { if reflect.TypeOf(x) != reflect.TypeOf(y) { return fmt.Errorf("unequal kinds: %T vs %T", x, y) } switch x := x.(type) { case *types.Interface: y := y.(*types.Interface) // TODO(gri): enable separate emission of Embedded interfaces // and ExplicitMethods then use this logic. // if x.NumEmbeddeds() != y.NumEmbeddeds() { // return fmt.Errorf("unequal number of embedded interfaces: %d vs %d", // x.NumEmbeddeds(), y.NumEmbeddeds()) // } // for i := 0; i < x.NumEmbeddeds(); i++ { // xi := x.Embedded(i) // yi := y.Embedded(i) // if xi.String() != yi.String() { // return fmt.Errorf("mismatched %th embedded interface: %s vs %s", // i, xi, yi) // } // } // if x.NumExplicitMethods() != y.NumExplicitMethods() { // return fmt.Errorf("unequal methods: %d vs %d", // x.NumExplicitMethods(), y.NumExplicitMethods()) // } // for i := 0; i < x.NumExplicitMethods(); i++ { // xm := x.ExplicitMethod(i) // ym := y.ExplicitMethod(i) // if xm.Name() != ym.Name() { // return fmt.Errorf("mismatched %th method: %s vs %s", i, xm, ym) // } // if err := equalType(xm.Type(), ym.Type()); err != nil { // return fmt.Errorf("mismatched %s method: %s", xm.Name(), err) // } // } if x.NumMethods() != y.NumMethods() { return fmt.Errorf("unequal methods: %d vs %d", x.NumMethods(), y.NumMethods()) } for i := 0; i < x.NumMethods(); i++ { xm := x.Method(i) ym := y.Method(i) if xm.Name() != ym.Name() { return fmt.Errorf("mismatched %dth method: %s vs %s", i, xm, ym) } if err := equalType(xm.Type(), ym.Type()); err != nil { return fmt.Errorf("mismatched %s method: %s", xm.Name(), err) } } case *types.Array: y := y.(*types.Array) if x.Len() != y.Len() { return fmt.Errorf("unequal array lengths: %d vs %d", x.Len(), y.Len()) } if err := equalType(x.Elem(), y.Elem()); err != nil { return fmt.Errorf("array elements: %s", err) } case *types.Basic: y := y.(*types.Basic) if x.Kind() != y.Kind() { return fmt.Errorf("unequal basic types: %s vs %s", x, y) } case *types.Chan: y := y.(*types.Chan) if x.Dir() != y.Dir() { return fmt.Errorf("unequal channel directions: %d vs %d", x.Dir(), y.Dir()) } if err := equalType(x.Elem(), y.Elem()); err != nil { return fmt.Errorf("channel elements: %s", err) } case *types.Map: y := y.(*types.Map) if err := equalType(x.Key(), y.Key()); err != nil { return fmt.Errorf("map keys: %s", err) } if err := equalType(x.Elem(), y.Elem()); err != nil { return fmt.Errorf("map values: %s", err) } case *types.Named: y := y.(*types.Named) if x.String() != y.String() { return fmt.Errorf("unequal named types: %s vs %s", x, y) } case *types.Pointer: y := y.(*types.Pointer) if err := equalType(x.Elem(), y.Elem()); err != nil { return fmt.Errorf("pointer elements: %s", err) } case *types.Signature: y := y.(*types.Signature) if err := equalType(x.Params(), y.Params()); err != nil { return fmt.Errorf("parameters: %s", err) } if err := equalType(x.Results(), y.Results()); err != nil { return fmt.Errorf("results: %s", err) } if x.Variadic() != y.Variadic() { return fmt.Errorf("unequal varidicity: %t vs %t", x.Variadic(), y.Variadic()) } if (x.Recv() != nil) != (y.Recv() != nil) { return fmt.Errorf("unequal receivers: %s vs %s", x.Recv(), y.Recv()) } if x.Recv() != nil { // TODO(adonovan): fix: this assertion fires for interface methods. // The type of the receiver of an interface method is a named type // if the Package was loaded from export data, or an unnamed (interface) // type if the Package was produced by type-checking ASTs. // if err := equalType(x.Recv().Type(), y.Recv().Type()); err != nil { // return fmt.Errorf("receiver: %s", err) // } } case *types.Slice: y := y.(*types.Slice) if err := equalType(x.Elem(), y.Elem()); err != nil { return fmt.Errorf("slice elements: %s", err) } case *types.Struct: y := y.(*types.Struct) if x.NumFields() != y.NumFields() { return fmt.Errorf("unequal struct fields: %d vs %d", x.NumFields(), y.NumFields()) } for i := 0; i < x.NumFields(); i++ { xf := x.Field(i) yf := y.Field(i) if xf.Name() != yf.Name() { return fmt.Errorf("mismatched fields: %s vs %s", xf, yf) } if err := equalType(xf.Type(), yf.Type()); err != nil { return fmt.Errorf("struct field %s: %s", xf.Name(), err) } if x.Tag(i) != y.Tag(i) { return fmt.Errorf("struct field %s has unequal tags: %q vs %q", xf.Name(), x.Tag(i), y.Tag(i)) } } case *types.Tuple: y := y.(*types.Tuple) if x.Len() != y.Len() { return fmt.Errorf("unequal tuple lengths: %d vs %d", x.Len(), y.Len()) } for i := 0; i < x.Len(); i++ { if err := equalType(x.At(i).Type(), y.At(i).Type()); err != nil { return fmt.Errorf("tuple element %d: %s", i, err) } } } return nil }
// zero returns a new "zero" value of the specified type. func zero(t types.Type) value { switch t := t.(type) { case *types.Basic: if t.Kind() == types.UntypedNil { panic("untyped nil has no zero value") } if t.Info()&types.IsUntyped != 0 { // TODO(adonovan): make it an invariant that // this is unreachable. Currently some // constants have 'untyped' types when they // should be defaulted by the typechecker. t = ssa.DefaultType(t).(*types.Basic) } switch t.Kind() { case types.Bool: return false case types.Int: return int(0) case types.Int8: return int8(0) case types.Int16: return int16(0) case types.Int32: return int32(0) case types.Int64: return int64(0) case types.Uint: return uint(0) case types.Uint8: return uint8(0) case types.Uint16: return uint16(0) case types.Uint32: return uint32(0) case types.Uint64: return uint64(0) case types.Uintptr: return uintptr(0) case types.Float32: return float32(0) case types.Float64: return float64(0) case types.Complex64: return complex64(0) case types.Complex128: return complex128(0) case types.String: return "" case types.UnsafePointer: return unsafe.Pointer(nil) default: panic(fmt.Sprint("zero for unexpected type:", t)) } case *types.Pointer: return (*value)(nil) case *types.Array: a := make(array, t.Len()) for i := range a { a[i] = zero(t.Elem()) } return a case *types.Named: return zero(t.Underlying()) case *types.Interface: return iface{} // nil type, methodset and value case *types.Slice: return []value(nil) case *types.Struct: s := make(structure, t.NumFields()) for i := range s { s[i] = zero(t.Field(i).Type()) } return s case *types.Tuple: if t.Len() == 1 { return zero(t.At(0).Type()) } s := make(tuple, t.Len()) for i := range s { s[i] = zero(t.At(i).Type()) } return s case *types.Chan: return chan value(nil) case *types.Map: if usesBuiltinMap(t.Key()) { return map[value]value(nil) } return (*hashmap)(nil) case *types.Signature: return (*ssa.Function)(nil) } panic(fmt.Sprint("zero: unexpected ", t)) }
// flatten returns a list of directly contained fields in the preorder // traversal of the type tree of t. The resulting elements are all // scalars (basic types or pointerlike types), except for struct/array // "identity" nodes, whose type is that of the aggregate. // // reflect.Value is considered pointerlike, similar to interface{}. // // Callers must not mutate the result. // func (a *analysis) flatten(t types.Type) []*fieldInfo { fl, ok := a.flattenMemo[t] if !ok { switch t := t.(type) { case *types.Named: u := t.Underlying() if isInterface(u) { // Debuggability hack: don't remove // the named type from interfaces as // they're very verbose. fl = append(fl, &fieldInfo{typ: t}) } else { fl = a.flatten(u) } case *types.Basic, *types.Signature, *types.Chan, *types.Map, *types.Interface, *types.Slice, *types.Pointer: fl = append(fl, &fieldInfo{typ: t}) case *types.Array: fl = append(fl, &fieldInfo{typ: t}) // identity node for _, fi := range a.flatten(t.Elem()) { fl = append(fl, &fieldInfo{typ: fi.typ, op: true, tail: fi}) } case *types.Struct: fl = append(fl, &fieldInfo{typ: t}) // identity node for i, n := 0, t.NumFields(); i < n; i++ { f := t.Field(i) for _, fi := range a.flatten(f.Type()) { fl = append(fl, &fieldInfo{typ: fi.typ, op: f, tail: fi}) } } case *types.Tuple: // No identity node: tuples are never address-taken. n := t.Len() if n == 1 { // Don't add a fieldInfo link for singletons, // e.g. in params/results. fl = append(fl, a.flatten(t.At(0).Type())...) } else { for i := 0; i < n; i++ { f := t.At(i) for _, fi := range a.flatten(f.Type()) { fl = append(fl, &fieldInfo{typ: fi.typ, op: i, tail: fi}) } } } default: panic(t) } a.flattenMemo[t] = fl } return fl }
// hashFor computes the hash of t. func (h Hasher) hashFor(t types.Type) uint32 { // See Identical for rationale. switch t := t.(type) { case *types.Basic: return uint32(t.Kind()) case *types.Array: return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem()) case *types.Slice: return 9049 + 2*h.Hash(t.Elem()) case *types.Struct: var hash uint32 = 9059 for i, n := 0, t.NumFields(); i < n; i++ { f := t.Field(i) if f.Anonymous() { hash += 8861 } hash += hashString(t.Tag(i)) hash += hashString(f.Name()) // (ignore f.Pkg) hash += h.Hash(f.Type()) } return hash case *types.Pointer: return 9067 + 2*h.Hash(t.Elem()) case *types.Signature: var hash uint32 = 9091 if t.Variadic() { hash *= 8863 } return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results()) case *types.Interface: var hash uint32 = 9103 for i, n := 0, t.NumMethods(); i < n; i++ { // See go/types.identicalMethods for rationale. // Method order is not significant. // Ignore m.Pkg(). m := t.Method(i) hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type()) } return hash case *types.Map: return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem()) case *types.Chan: return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem()) case *types.Named: // Not safe with a copying GC; objects may move. return uint32(reflect.ValueOf(t.Obj()).Pointer()) case *types.Tuple: return h.hashTuple(t) } panic(t) }