func (tm *LLVMTypeMap) Sizeof(typ types.Type) int64 { switch typ := typ.Underlying().(type) { case *types.Basic: switch typ.Kind() { case types.Int, types.Uint: return int64(tm.target.TypeAllocSize(tm.inttype)) case types.Uintptr, types.UnsafePointer: return int64(tm.target.PointerSize()) case types.String: return 2 * int64(tm.target.PointerSize()) } return types.DefaultSizeof(typ) case *types.Array: eltsize := tm.Sizeof(typ.Elem()) eltalign := tm.Alignof(typ.Elem()) var eltpad int64 if eltsize%eltalign != 0 { eltpad = eltalign - (eltsize % eltalign) } return (eltsize + eltpad) * typ.Len() case *types.Struct: if typ.NumFields() == 0 { return 0 } fields := make([]*types.Field, int(typ.NumFields())) for i := range fields { fields[i] = typ.Field(i) } offsets := tm.Offsetsof(fields) n := len(fields) return offsets[n-1] + tm.Sizeof(fields[n-1].Type) } return int64(tm.target.PointerSize()) }
// flatten returns a list of directly contained fields in the preorder // traversal of the type tree of t. The resulting elements are all // scalars (basic types or pointerlike types), except for struct/array // "identity" nodes, whose type is that of the aggregate. // // reflect.Value is considered pointerlike, similar to interface{}. // // Callers must not mutate the result. // func (a *analysis) flatten(t types.Type) []*fieldInfo { fl, ok := a.flattenMemo[t] if !ok { switch t := t.(type) { case *types.Named: u := t.Underlying() if _, ok := u.(*types.Interface); ok { // Debuggability hack: don't remove // the named type from interfaces as // they're very verbose. fl = append(fl, &fieldInfo{typ: t}) } else { fl = a.flatten(u) } case *types.Basic, *types.Signature, *types.Chan, *types.Map, *types.Interface, *types.Slice, *types.Pointer: fl = append(fl, &fieldInfo{typ: t}) case *types.Array: fl = append(fl, &fieldInfo{typ: t}) // identity node for _, fi := range a.flatten(t.Elem()) { fl = append(fl, &fieldInfo{typ: fi.typ, op: true, tail: fi}) } case *types.Struct: fl = append(fl, &fieldInfo{typ: t}) // identity node for i, n := 0, t.NumFields(); i < n; i++ { f := t.Field(i) for _, fi := range a.flatten(f.Type()) { fl = append(fl, &fieldInfo{typ: fi.typ, op: f, tail: fi}) } } case *types.Tuple: // No identity node: tuples are never address-taken. for i, n := 0, t.Len(); i < n; i++ { f := t.At(i) for _, fi := range a.flatten(f.Type()) { fl = append(fl, &fieldInfo{typ: fi.typ, op: i, tail: fi}) } } case *types.Builtin: panic("flatten(*types.Builtin)") // not the type of any value default: panic(t) } a.flattenMemo[t] = fl } return fl }
func (tm *llvmTypeMap) Alignof(typ types.Type) int64 { switch typ := typ.Underlying().(type) { case *types.Array: return tm.Alignof(typ.Elem()) case *types.Basic: switch typ.Kind() { case types.Int, types.Uint, types.Int64, types.Uint64, types.Float64, types.Complex64, types.Complex128: return int64(tm.target.TypeAllocSize(tm.inttype)) case types.Uintptr, types.UnsafePointer, types.String: return int64(tm.target.PointerSize()) } return tm.StdSizes.Alignof(typ) case *types.Struct: max := int64(1) for i := 0; i < typ.NumFields(); i++ { f := typ.Field(i) a := tm.Alignof(f.Type()) if a > max { max = a } } return max } return int64(tm.target.PointerSize()) }
func (c *compiler) tollvmDebugDescriptor(t types.Type) llvm.DebugDescriptor { switch t := t.(type) { case *types.Pointer: return llvm.NewPointerDerivedType(c.tollvmDebugDescriptor(t.Elem())) case nil: return void_debug_type } bt := &llvm.BasicTypeDescriptor{ Name: c.types.TypeString(t), Size: uint64(c.types.Sizeof(t) * 8), Alignment: uint64(c.types.Alignof(t) * 8), } if basic, ok := t.(*types.Basic); ok { switch bi := basic.Info(); { case bi&types.IsBoolean != 0: bt.TypeEncoding = llvm.DW_ATE_boolean case bi&types.IsUnsigned != 0: bt.TypeEncoding = llvm.DW_ATE_unsigned case bi&types.IsInteger != 0: bt.TypeEncoding = llvm.DW_ATE_signed case bi&types.IsFloat != 0: bt.TypeEncoding = llvm.DW_ATE_float } } return bt }
// hashFor computes the hash of t. func (h Hasher) hashFor(t types.Type) uint32 { // See IsIdentical for rationale. switch t := t.(type) { case *types.Basic: return uint32(t.Kind()) case *types.Array: return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem()) case *types.Slice: return 9049 + 2*h.Hash(t.Elem()) case *types.Struct: var hash uint32 = 9059 for i, n := 0, t.NumFields(); i < n; i++ { f := t.Field(i) if f.Anonymous() { hash += 8861 } hash += hashString(t.Tag(i)) hash += hashString(f.Name()) // (ignore f.Pkg) hash += h.Hash(f.Type()) } return hash case *types.Pointer: return 9067 + 2*h.Hash(t.Elem()) case *types.Signature: var hash uint32 = 9091 if t.IsVariadic() { hash *= 8863 } return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results()) case *types.Interface: var hash uint32 = 9103 for i, n := 0, t.NumMethods(); i < n; i++ { // See go/types.identicalMethods for rationale. // Method order is not significant. // Ignore m.Pkg(). m := t.Method(i) hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type()) } return hash case *types.Map: return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem()) case *types.Chan: return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem()) case *types.Named: // Not safe with a copying GC; objects may move. return uint32(uintptr(unsafe.Pointer(t.Obj()))) } panic("unexpected type") }
// canHaveConcreteMethods returns true iff typ may have concrete // methods associated with it. Callers must supply allowPtr=true. // // TODO(gri): consider putting this in go/types. It's surprisingly subtle. func canHaveConcreteMethods(typ types.Type, allowPtr bool) bool { switch typ := typ.(type) { case *types.Pointer: return allowPtr && canHaveConcreteMethods(typ.Elem(), false) case *types.Named: switch typ.Underlying().(type) { case *types.Pointer, *types.Interface: return false } return true case *types.Struct: return true } return false }
func (tm *llvmTypeMap) Sizeof(typ types.Type) int64 { switch typ := typ.Underlying().(type) { case *types.Basic: switch typ.Kind() { case types.Int, types.Uint: return int64(tm.target.TypeAllocSize(tm.inttype)) case types.Uintptr, types.UnsafePointer: return int64(tm.target.PointerSize()) } return tm.StdSizes.Sizeof(typ) case *types.Array: eltsize := tm.Sizeof(typ.Elem()) eltalign := tm.Alignof(typ.Elem()) var eltpad int64 if eltsize%eltalign != 0 { eltpad = eltalign - (eltsize % eltalign) } return (eltsize + eltpad) * typ.Len() case *types.Slice: return 3 * int64(tm.target.PointerSize()) case *types.Struct: n := typ.NumFields() if n == 0 { return 0 } fields := make([]*types.Var, n) for i := range fields { fields[i] = typ.Field(i) } offsets := tm.Offsetsof(fields) return offsets[n-1] + tm.Sizeof(fields[n-1].Type()) case *types.Interface: return int64((2 + typ.NumMethods()) * tm.target.PointerSize()) } return int64(tm.target.PointerSize()) }
// typeBaseType returns the base type for a types.Type. func typeBaseType(t types.Type) types.Type { switch t := t.(type) { case *types.Array: return typeBaseType(t.Elem()) case *types.Pointer: return typeBaseType(t.Deref()) case *types.Map: return typeBaseType(t.Elem()) // TODO(sqs): also return Key type; typeBaseType needs to return multiple results? case *types.Slice: return typeBaseType(t.Elem()) } return t }
func (v *LLVMValue) Convert(dsttyp types.Type) Value { b := v.compiler.builder // If it's a stack allocated value, we'll want to compare the // value type, not the pointer type. srctyp := v.typ // Get the underlying type, if any. origdsttyp := dsttyp dsttyp = dsttyp.Underlying() srctyp = srctyp.Underlying() // Identical (underlying) types? Just swap in the destination type. if types.IsIdentical(srctyp, dsttyp) { // A method converted to a function type without the // receiver is where we convert a "method value" into a // function. if srctyp, ok := srctyp.(*types.Signature); ok && srctyp.Recv() != nil { if dsttyp, ok := dsttyp.(*types.Signature); ok && dsttyp.Recv() == nil { return v.convertMethodValue(origdsttyp) } } // TODO avoid load here by reusing pointer value, if exists. return v.compiler.NewValue(v.LLVMValue(), origdsttyp) } // Both pointer types with identical underlying types? Same as above. if srctyp, ok := srctyp.(*types.Pointer); ok { if dsttyp, ok := dsttyp.(*types.Pointer); ok { srctyp := srctyp.Elem().Underlying() dsttyp := dsttyp.Elem().Underlying() if types.IsIdentical(srctyp, dsttyp) { return v.compiler.NewValue(v.LLVMValue(), origdsttyp) } } } // Convert from an interface type. if _, isinterface := srctyp.(*types.Interface); isinterface { if interface_, isinterface := dsttyp.(*types.Interface); isinterface { return v.mustConvertI2I(interface_) } else { return v.mustConvertI2V(origdsttyp) } } // Converting to an interface type. if interface_, isinterface := dsttyp.(*types.Interface); isinterface { return v.convertV2I(interface_) } byteslice := types.NewSlice(types.Typ[types.Byte]) runeslice := types.NewSlice(types.Typ[types.Rune]) // string -> if isString(srctyp) { // (untyped) string -> string // XXX should untyped strings be able to escape go/types? if isString(dsttyp) { return v.compiler.NewValue(v.LLVMValue(), origdsttyp) } // string -> []byte if types.IsIdentical(dsttyp, byteslice) { c := v.compiler value := v.LLVMValue() strdata := c.builder.CreateExtractValue(value, 0, "") strlen := c.builder.CreateExtractValue(value, 1, "") // Data must be copied, to prevent changes in // the byte slice from mutating the string. newdata := c.builder.CreateArrayMalloc(strdata.Type().ElementType(), strlen, "") memcpy := c.NamedFunction("runtime.memcpy", "func(uintptr, uintptr, uintptr)") c.builder.CreateCall(memcpy, []llvm.Value{ c.builder.CreatePtrToInt(newdata, c.target.IntPtrType(), ""), c.builder.CreatePtrToInt(strdata, c.target.IntPtrType(), ""), strlen, }, "") strdata = newdata struct_ := llvm.Undef(c.types.ToLLVM(byteslice)) struct_ = c.builder.CreateInsertValue(struct_, strdata, 0, "") struct_ = c.builder.CreateInsertValue(struct_, strlen, 1, "") struct_ = c.builder.CreateInsertValue(struct_, strlen, 2, "") return c.NewValue(struct_, byteslice) } // string -> []rune if types.IsIdentical(dsttyp, runeslice) { return v.stringToRuneSlice() } } // []byte -> string if types.IsIdentical(srctyp, byteslice) && isString(dsttyp) { c := v.compiler value := v.LLVMValue() data := c.builder.CreateExtractValue(value, 0, "") len := c.builder.CreateExtractValue(value, 1, "") // Data must be copied, to prevent changes in // the byte slice from mutating the string. newdata := c.builder.CreateArrayMalloc(data.Type().ElementType(), len, "") memcpy := c.NamedFunction("runtime.memcpy", "func(uintptr, uintptr, uintptr)") c.builder.CreateCall(memcpy, []llvm.Value{ c.builder.CreatePtrToInt(newdata, c.target.IntPtrType(), ""), c.builder.CreatePtrToInt(data, c.target.IntPtrType(), ""), len, }, "") data = newdata struct_ := llvm.Undef(c.types.ToLLVM(types.Typ[types.String])) struct_ = c.builder.CreateInsertValue(struct_, data, 0, "") struct_ = c.builder.CreateInsertValue(struct_, len, 1, "") return c.NewValue(struct_, types.Typ[types.String]) } // []rune -> string if types.IsIdentical(srctyp, runeslice) && isString(dsttyp) { return v.runeSliceToString() } // rune -> string if isString(dsttyp) && isInteger(srctyp) { return v.runeToString() } // TODO other special conversions? llvm_type := v.compiler.types.ToLLVM(dsttyp) // Unsafe pointer conversions. if dsttyp == types.Typ[types.UnsafePointer] { // X -> unsafe.Pointer if _, isptr := srctyp.(*types.Pointer); isptr { value := b.CreatePtrToInt(v.LLVMValue(), llvm_type, "") return v.compiler.NewValue(value, origdsttyp) } else if srctyp == types.Typ[types.Uintptr] { return v.compiler.NewValue(v.LLVMValue(), origdsttyp) } } else if srctyp == types.Typ[types.UnsafePointer] { // unsafe.Pointer -> X if _, isptr := dsttyp.(*types.Pointer); isptr { value := b.CreateIntToPtr(v.LLVMValue(), llvm_type, "") return v.compiler.NewValue(value, origdsttyp) } else if dsttyp == types.Typ[types.Uintptr] { return v.compiler.NewValue(v.LLVMValue(), origdsttyp) } } lv := v.LLVMValue() srcType := lv.Type() switch srcType.TypeKind() { case llvm.IntegerTypeKind: switch llvm_type.TypeKind() { case llvm.IntegerTypeKind: srcBits := srcType.IntTypeWidth() dstBits := llvm_type.IntTypeWidth() delta := srcBits - dstBits switch { case delta < 0: // TODO check if (un)signed, use S/ZExt accordingly. lv = b.CreateZExt(lv, llvm_type, "") case delta > 0: lv = b.CreateTrunc(lv, llvm_type, "") } return v.compiler.NewValue(lv, origdsttyp) case llvm.FloatTypeKind, llvm.DoubleTypeKind: if !isUnsigned(v.Type()) { lv = b.CreateSIToFP(lv, llvm_type, "") } else { lv = b.CreateUIToFP(lv, llvm_type, "") } return v.compiler.NewValue(lv, origdsttyp) } case llvm.DoubleTypeKind: switch llvm_type.TypeKind() { case llvm.FloatTypeKind: lv = b.CreateFPTrunc(lv, llvm_type, "") return v.compiler.NewValue(lv, origdsttyp) case llvm.IntegerTypeKind: if !isUnsigned(dsttyp) { lv = b.CreateFPToSI(lv, llvm_type, "") } else { lv = b.CreateFPToUI(lv, llvm_type, "") } return v.compiler.NewValue(lv, origdsttyp) } case llvm.FloatTypeKind: switch llvm_type.TypeKind() { case llvm.DoubleTypeKind: lv = b.CreateFPExt(lv, llvm_type, "") return v.compiler.NewValue(lv, origdsttyp) case llvm.IntegerTypeKind: if !isUnsigned(dsttyp) { lv = b.CreateFPToSI(lv, llvm_type, "") } else { lv = b.CreateFPToUI(lv, llvm_type, "") } return v.compiler.NewValue(lv, origdsttyp) } } // Complex -> complex. Complexes are only convertible to other // complexes, contant conversions aside. So we can just check the // source type here; given that the types are not identical // (checked above), we can assume the destination type is the alternate // complex type. if isComplex(srctyp) { var fpcast func(*Builder, llvm.Value, llvm.Type, string) llvm.Value var fptype llvm.Type if srctyp == types.Typ[types.Complex64] { fpcast = (*Builder).CreateFPExt fptype = llvm.DoubleType() } else { fpcast = (*Builder).CreateFPTrunc fptype = llvm.FloatType() } if fpcast != nil { realv := b.CreateExtractValue(lv, 0, "") imagv := b.CreateExtractValue(lv, 1, "") realv = fpcast(b, realv, fptype, "") imagv = fpcast(b, imagv, fptype, "") lv = llvm.Undef(v.compiler.types.ToLLVM(dsttyp)) lv = b.CreateInsertValue(lv, realv, 0, "") lv = b.CreateInsertValue(lv, imagv, 1, "") return v.compiler.NewValue(lv, origdsttyp) } } srcstr := v.compiler.types.TypeString(v.typ) dststr := v.compiler.types.TypeString(origdsttyp) panic(fmt.Sprintf("unimplemented conversion: %s -> %s", srcstr, dststr)) }
// matchArgTypeInternal is the internal version of matchArgType. It carries a map // remembering what types are in progress so we don't recur when faced with recursive // types or mutually recursive types. func (f *File) matchArgTypeInternal(t printfArgType, typ types.Type, arg ast.Expr, inProgress map[types.Type]bool) bool { // %v, %T accept any argument type. if t == anyType { return true } if typ == nil { // external call typ = f.pkg.types[arg].Type if typ == nil { return true // probably a type check problem } } // If the type implements fmt.Formatter, we have nothing to check. // But (see issue 6259) that's not easy to verify, so instead we see // if its method set contains a Format function. We could do better, // even now, but we don't need to be 100% accurate. Wait for 6259 to // be fixed instead. TODO. if f.hasMethod(typ, "Format") { return true } // If we can use a string, might arg (dynamically) implement the Stringer or Error interface? if t&argString != 0 { if types.AssertableTo(errorType, typ) || types.AssertableTo(stringerType, typ) { return true } } typ = typ.Underlying() if inProgress[typ] { // We're already looking at this type. The call that started it will take care of it. return true } inProgress[typ] = true switch typ := typ.(type) { case *types.Signature: return t&argPointer != 0 case *types.Map: // Recur: map[int]int matches %d. return t&argPointer != 0 || (f.matchArgTypeInternal(t, typ.Key(), arg, inProgress) && f.matchArgTypeInternal(t, typ.Elem(), arg, inProgress)) case *types.Chan: return t&argPointer != 0 case *types.Array: // Same as slice. if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 { return true // %s matches []byte } // Recur: []int matches %d. return t&argPointer != 0 || f.matchArgTypeInternal(t, typ.Elem().Underlying(), arg, inProgress) case *types.Slice: // Same as array. if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 { return true // %s matches []byte } // Recur: []int matches %d. But watch out for // type T []T // If the element is a pointer type (type T[]*T), it's handled fine by the Pointer case below. return t&argPointer != 0 || f.matchArgTypeInternal(t, typ.Elem(), arg, inProgress) case *types.Pointer: // Ugly, but dealing with an edge case: a known pointer to an invalid type, // probably something from a failed import. if typ.Elem().String() == "invalid type" { if *verbose { f.Warnf(arg.Pos(), "printf argument %v is pointer to invalid or unknown type", f.gofmt(arg)) } return true // special case } // If it's actually a pointer with %p, it prints as one. if t == argPointer { return true } // If it's pointer to struct, that's equivalent in our analysis to whether we can print the struct. if str, ok := typ.Elem().Underlying().(*types.Struct); ok { return f.matchStructArgType(t, str, arg, inProgress) } // The rest can print with %p as pointers, or as integers with %x etc. return t&(argInt|argPointer) != 0 case *types.Struct: return f.matchStructArgType(t, typ, arg, inProgress) case *types.Interface: // If the static type of the argument is empty interface, there's little we can do. // Example: // func f(x interface{}) { fmt.Printf("%s", x) } // Whether x is valid for %s depends on the type of the argument to f. One day // we will be able to do better. For now, we assume that empty interface is OK // but non-empty interfaces, with Stringer and Error handled above, are errors. return typ.NumMethods() == 0 case *types.Basic: switch typ.Kind() { case types.UntypedBool, types.Bool: return t&argBool != 0 case types.UntypedInt, types.Int, types.Int8, types.Int16, types.Int32, types.Int64, types.Uint, types.Uint8, types.Uint16, types.Uint32, types.Uint64, types.Uintptr: return t&argInt != 0 case types.UntypedFloat, types.Float32, types.Float64: return t&argFloat != 0 case types.UntypedComplex, types.Complex64, types.Complex128: return t&argComplex != 0 case types.UntypedString, types.String: return t&argString != 0 case types.UnsafePointer: return t&(argPointer|argInt) != 0 case types.UntypedRune: return t&(argInt|argRune) != 0 case types.UntypedNil: return t&argPointer != 0 // TODO? case types.Invalid: if *verbose { f.Warnf(arg.Pos(), "printf argument %v has invalid or unknown type", f.gofmt(arg)) } return true // Probably a type check problem. } panic("unreachable") } return false }
// zero returns a new "zero" value of the specified type. func zero(t types.Type) value { switch t := t.(type) { case *types.Basic: if t.Kind() == types.UntypedNil { panic("untyped nil has no zero value") } if t.Info()&types.IsUntyped != 0 { // TODO(adonovan): make it an invariant that // this is unreachable. Currently some // constants have 'untyped' types when they // should be defaulted by the typechecker. t = ssa.DefaultType(t).(*types.Basic) } switch t.Kind() { case types.Bool: return false case types.Int: return int(0) case types.Int8: return int8(0) case types.Int16: return int16(0) case types.Int32: return int32(0) case types.Int64: return int64(0) case types.Uint: return uint(0) case types.Uint8: return uint8(0) case types.Uint16: return uint16(0) case types.Uint32: return uint32(0) case types.Uint64: return uint64(0) case types.Uintptr: return uintptr(0) case types.Float32: return float32(0) case types.Float64: return float64(0) case types.Complex64: return complex64(0) case types.Complex128: return complex128(0) case types.String: return "" case types.UnsafePointer: return unsafe.Pointer(nil) default: panic(fmt.Sprint("zero for unexpected type:", t)) } case *types.Pointer: return (*value)(nil) case *types.Array: a := make(array, t.Len()) for i := range a { a[i] = zero(t.Elem()) } return a case *types.Named: return zero(t.Underlying()) case *types.Interface: return iface{} // nil type, methodset and value case *types.Slice: return []value(nil) case *types.Struct: s := make(structure, t.NumFields()) for i := range s { s[i] = zero(t.Field(i).Type()) } return s case *types.Tuple: if t.Len() == 1 { return zero(t.At(0).Type()) } s := make(tuple, t.Len()) for i := range s { s[i] = zero(t.At(i).Type()) } return s case *types.Chan: return chan value(nil) case *types.Map: if usesBuiltinMap(t.Key()) { return map[value]value(nil) } return (*hashmap)(nil) case *types.Signature: return (*ssa.Function)(nil) } panic(fmt.Sprint("zero: unexpected ", t)) }
func (w *Walker) writeType(buf *bytes.Buffer, typ types.Type) { switch typ := typ.(type) { case *types.Basic: s := typ.Name() switch typ.Kind() { case types.UnsafePointer: s = "unsafe.Pointer" case types.UntypedBool: s = "ideal-bool" case types.UntypedInt: s = "ideal-int" case types.UntypedRune: // "ideal-char" for compatibility with old tool // TODO(gri) change to "ideal-rune" s = "ideal-char" case types.UntypedFloat: s = "ideal-float" case types.UntypedComplex: s = "ideal-complex" case types.UntypedString: s = "ideal-string" case types.UntypedNil: panic("should never see untyped nil type") default: switch s { case "byte": s = "uint8" case "rune": s = "int32" } } buf.WriteString(s) case *types.Array: fmt.Fprintf(buf, "[%d]", typ.Len()) w.writeType(buf, typ.Elem()) case *types.Slice: buf.WriteString("[]") w.writeType(buf, typ.Elem()) case *types.Struct: buf.WriteString("struct") case *types.Pointer: buf.WriteByte('*') w.writeType(buf, typ.Elem()) case *types.Tuple: panic("should never see a tuple type") case *types.Signature: buf.WriteString("func") w.writeSignature(buf, typ) case *types.Interface: buf.WriteString("interface{") if typ.NumMethods() > 0 { buf.WriteByte(' ') buf.WriteString(strings.Join(sortedMethodNames(typ), ", ")) buf.WriteByte(' ') } buf.WriteString("}") case *types.Map: buf.WriteString("map[") w.writeType(buf, typ.Key()) buf.WriteByte(']') w.writeType(buf, typ.Elem()) case *types.Chan: var s string switch typ.Dir() { case ast.SEND: s = "chan<- " case ast.RECV: s = "<-chan " default: s = "chan " } buf.WriteString(s) w.writeType(buf, typ.Elem()) case *types.Named: obj := typ.Obj() pkg := obj.Pkg() if pkg != nil && pkg != w.current { buf.WriteString(pkg.Name()) buf.WriteByte('.') } buf.WriteString(typ.Obj().Name()) default: panic(fmt.Sprintf("unknown type %T", typ)) } }
// flatten returns a list of directly contained fields in the preorder // traversal of the type tree of t. The resulting elements are all // scalars (basic types or pointerlike types), except for struct/array // "identity" nodes, whose type is that of the aggregate. // // reflect.Value is considered pointerlike, similar to interface{}. // // Callers must not mutate the result. // func (a *analysis) flatten(t types.Type) []*fieldInfo { fl, ok := a.flattenMemo[t] if !ok { switch t := t.(type) { case *types.Named: u := t.Underlying() if isInterface(u) { // Debuggability hack: don't remove // the named type from interfaces as // they're very verbose. fl = append(fl, &fieldInfo{typ: t}) } else { fl = a.flatten(u) } case *types.Basic, *types.Signature, *types.Chan, *types.Map, *types.Interface, *types.Slice, *types.Pointer: fl = append(fl, &fieldInfo{typ: t}) case *types.Array: fl = append(fl, &fieldInfo{typ: t}) // identity node for _, fi := range a.flatten(t.Elem()) { fl = append(fl, &fieldInfo{typ: fi.typ, op: true, tail: fi}) } case *types.Struct: fl = append(fl, &fieldInfo{typ: t}) // identity node for i, n := 0, t.NumFields(); i < n; i++ { f := t.Field(i) for _, fi := range a.flatten(f.Type()) { fl = append(fl, &fieldInfo{typ: fi.typ, op: f, tail: fi}) } } case *types.Tuple: // No identity node: tuples are never address-taken. n := t.Len() if n == 1 { // Don't add a fieldInfo link for singletons, // e.g. in params/results. fl = append(fl, a.flatten(t.At(0).Type())...) } else { for i := 0; i < n; i++ { f := t.At(i) for _, fi := range a.flatten(f.Type()) { fl = append(fl, &fieldInfo{typ: fi.typ, op: i, tail: fi}) } } } default: panic(t) } a.flattenMemo[t] = fl } return fl }