func (am *algorithmMap) eqalg(t types.Type) llvm.Value { t = t.Underlying() if st, ok := t.(*types.Struct); ok && st.NumFields() == 1 { t = st.Field(0).Type().Underlying() } switch t := t.(type) { case *types.Basic: switch t.Kind() { case types.String: return am.runtime.streqalg.LLVMValue() case types.Float32: return am.runtime.f32eqalg.LLVMValue() case types.Float64: return am.runtime.f64eqalg.LLVMValue() case types.Complex64: return am.runtime.c64eqalg.LLVMValue() case types.Complex128: return am.runtime.c128eqalg.LLVMValue() } case *types.Struct: // TODO } // TODO(axw) size-specific memequal cases return am.runtime.memequal.LLVMValue() }
// lockPath returns a typePath describing the location of a lock value // contained in typ. If there is no contained lock, it returns nil. func lockPath(tpkg *types.Package, typ types.Type) typePath { if typ == nil { return nil } // We're only interested in the case in which the underlying // type is a struct. (Interfaces and pointers are safe to copy.) styp, ok := typ.Underlying().(*types.Struct) if !ok { return nil } // We're looking for cases in which a reference to this type // can be locked, but a value cannot. This differentiates // embedded interfaces from embedded values. if plock := types.NewMethodSet(types.NewPointer(typ)).Lookup(tpkg, "Lock"); plock != nil { if lock := types.NewMethodSet(typ).Lookup(tpkg, "Lock"); lock == nil { return []types.Type{typ} } } nfields := styp.NumFields() for i := 0; i < nfields; i++ { ftyp := styp.Field(i).Type() subpath := lockPath(tpkg, ftyp) if subpath != nil { return append(subpath, typ) } } return nil }
// makeImethodThunk returns a synthetic thunk function permitting a // method id of interface typ to be called like a standalone function, // e.g.: // // type I interface { f(x int) R } // m := I.f // thunk // var i I // m(i, 0) // // The thunk is defined as if by: // // func I.f(i I, x int, ...) R { // return i.f(x, ...) // } // // TODO(adonovan): opt: currently the stub is created even when used // in call position: I.f(i, 0). Clearly this is suboptimal. // // TODO(adonovan): memoize creation of these functions in the Program. // func makeImethodThunk(prog *Program, typ types.Type, id Id) *Function { if prog.mode&LogSource != 0 { defer logStack("makeImethodThunk %s.%s", typ, id)() } itf := typ.Underlying().(*types.Interface) index, meth := methodIndex(itf, id) sig := *meth.Type().(*types.Signature) // copy; shared Values fn := &Function{ Name_: meth.Name(), Signature: &sig, Prog: prog, } fn.startBody() fn.addParam("recv", typ) createParams(fn) var c Call c.Call.Method = index c.Call.Recv = fn.Params[0] for _, arg := range fn.Params[1:] { c.Call.Args = append(c.Call.Args, arg) } emitTailCall(fn, &c) fn.finishBody() return fn }
func (tm *llvmTypeMap) makeLLVMType(t types.Type, name string) llvm.Type { switch t := t.(type) { case *types.Basic: return tm.basicLLVMType(t) case *types.Array: return tm.arrayLLVMType(t) case *types.Slice: return tm.sliceLLVMType(t, name) case *types.Struct: return tm.structLLVMType(t, name) case *types.Pointer: return tm.pointerLLVMType(t) case *types.Interface: return tm.interfaceLLVMType(t, name) case *types.Map: return tm.mapLLVMType(t) case *types.Chan: return tm.chanLLVMType(t) case *types.Named: // First we set ptrstandin, in case we've got a recursive pointer. if _, ok := t.Underlying().(*types.Pointer); ok { tm.types.Set(t, tm.ptrstandin) } return tm.nameLLVMType(t) } panic(fmt.Errorf("unhandled: %T", t)) }
func (c *compiler) makeInterface(v *LLVMValue, iface types.Type) *LLVMValue { llv := v.LLVMValue() lltyp := llv.Type() i8ptr := llvm.PointerType(llvm.Int8Type(), 0) if lltyp.TypeKind() == llvm.PointerTypeKind { llv = c.builder.CreateBitCast(llv, i8ptr, "") } else { // If the value fits exactly in a pointer, then we can just // bitcast it. Otherwise we need to malloc. if c.target.TypeStoreSize(lltyp) <= uint64(c.target.PointerSize()) { bits := c.target.TypeSizeInBits(lltyp) if bits > 0 { llv = coerce(c.builder, llv, llvm.IntType(int(bits))) llv = c.builder.CreateIntToPtr(llv, i8ptr, "") } else { llv = llvm.ConstNull(i8ptr) } } else { ptr := c.createTypeMalloc(lltyp) c.builder.CreateStore(llv, ptr) llv = c.builder.CreateBitCast(ptr, i8ptr, "") } } value := llvm.Undef(c.types.ToLLVM(iface)) rtype := c.types.ToRuntime(v.Type()) rtype = c.builder.CreateBitCast(rtype, llvm.PointerType(llvm.Int8Type(), 0), "") value = c.builder.CreateInsertValue(value, rtype, 0, "") value = c.builder.CreateInsertValue(value, llv, 1, "") if iface.Underlying().(*types.Interface).NumMethods() > 0 { result := c.NewValue(value, types.NewInterface(nil, nil)) result, _ = result.convertE2I(iface) return result } return c.NewValue(value, iface) }
// flatten returns a list of directly contained fields in the preorder // traversal of the type tree of t. The resulting elements are all // scalars (basic types or pointerlike types), except for struct/array // "identity" nodes, whose type is that of the aggregate. // // reflect.Value is considered pointerlike, similar to interface{}. // // Callers must not mutate the result. // func (a *analysis) flatten(t types.Type) []*fieldInfo { fl, ok := a.flattenMemo[t] if !ok { switch t := t.(type) { case *types.Named: u := t.Underlying() if _, ok := u.(*types.Interface); ok { // Debuggability hack: don't remove // the named type from interfaces as // they're very verbose. fl = append(fl, &fieldInfo{typ: t}) } else { fl = a.flatten(u) } case *types.Basic, *types.Signature, *types.Chan, *types.Map, *types.Interface, *types.Slice, *types.Pointer: fl = append(fl, &fieldInfo{typ: t}) case *types.Array: fl = append(fl, &fieldInfo{typ: t}) // identity node for _, fi := range a.flatten(t.Elem()) { fl = append(fl, &fieldInfo{typ: fi.typ, op: true, tail: fi}) } case *types.Struct: fl = append(fl, &fieldInfo{typ: t}) // identity node for i, n := 0, t.NumFields(); i < n; i++ { f := t.Field(i) for _, fi := range a.flatten(f.Type()) { fl = append(fl, &fieldInfo{typ: fi.typ, op: f, tail: fi}) } } case *types.Tuple: // No identity node: tuples are never address-taken. for i, n := 0, t.Len(); i < n; i++ { f := t.At(i) for _, fi := range a.flatten(f.Type()) { fl = append(fl, &fieldInfo{typ: fi.typ, op: i, tail: fi}) } } case *types.Builtin: panic("flatten(*types.Builtin)") // not the type of any value default: panic(t) } a.flattenMemo[t] = fl } return fl }
// makeMapLiteral makes a map with the specified keys and values. func (c *compiler) makeMapLiteral(typ types.Type, keys, values []Value) *LLVMValue { var count, keysptr, valuesptr llvm.Value dyntyp := c.types.ToRuntime(typ) dyntyp = c.builder.CreatePtrToInt(dyntyp, c.target.IntPtrType(), "") if len(keys) == 0 { count = llvm.ConstNull(c.types.inttype) keysptr = llvm.ConstNull(c.target.IntPtrType()) valuesptr = keysptr } else { maptyp := typ.Underlying().(*types.Map) keytyp := maptyp.Key() valtyp := maptyp.Elem() count = llvm.ConstInt(c.types.inttype, uint64(len(keys)), false) keysptr = c.builder.CreateArrayAlloca(c.types.ToLLVM(keytyp), count, "") valuesptr = c.builder.CreateArrayAlloca(c.types.ToLLVM(valtyp), count, "") for i := range keys { gepindices := []llvm.Value{llvm.ConstInt(c.types.inttype, uint64(i), false)} key := keys[i].Convert(keytyp).LLVMValue() ptr := c.builder.CreateGEP(keysptr, gepindices, "") c.builder.CreateStore(key, ptr) value := values[i].Convert(valtyp).LLVMValue() ptr = c.builder.CreateGEP(valuesptr, gepindices, "") c.builder.CreateStore(value, ptr) } keysptr = c.builder.CreatePtrToInt(keysptr, c.target.IntPtrType(), "") valuesptr = c.builder.CreatePtrToInt(valuesptr, c.target.IntPtrType(), "") } f := c.NamedFunction("runtime.makemap", "func(t uintptr, n int, keys, values uintptr) uintptr") mapval := c.builder.CreateCall(f, []llvm.Value{dyntyp, count, keysptr, valuesptr}, "") return c.NewValue(mapval, typ) }
// zeroConst returns a new "zero" constant of the specified type, // which must not be an array or struct type: the zero values of // aggregates are well-defined but cannot be represented by Const. // func zeroConst(t types.Type) *Const { switch t := t.(type) { case *types.Basic: switch { case t.Info()&types.IsBoolean != 0: return NewConst(exact.MakeBool(false), t) case t.Info()&types.IsNumeric != 0: return NewConst(exact.MakeInt64(0), t) case t.Info()&types.IsString != 0: return NewConst(exact.MakeString(""), t) case t.Kind() == types.UnsafePointer: fallthrough case t.Kind() == types.UntypedNil: return nilConst(t) default: panic(fmt.Sprint("zeroConst for unexpected type:", t)) } case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: return nilConst(t) case *types.Named: return NewConst(zeroConst(t.Underlying()).Value, t) case *types.Array, *types.Struct, *types.Tuple: panic(fmt.Sprint("zeroConst applied to aggregate:", t)) } panic(fmt.Sprint("zeroConst: unexpected ", t)) }
// emitConv emits to f code to convert Value val to exactly type typ, // and returns the converted value. Implicit conversions are required // by language assignability rules in assignments, parameter passing, // etc. // func emitConv(f *Function, val Value, typ types.Type) Value { t_src := val.Type() // Identical types? Conversion is a no-op. if types.IsIdentical(t_src, typ) { return val } ut_dst := typ.Underlying() ut_src := t_src.Underlying() // Just a change of type, but not value or representation? if isValuePreserving(ut_src, ut_dst) { c := &ChangeType{X: val} c.setType(typ) return f.emit(c) } // Conversion to, or construction of a value of, an interface type? if _, ok := ut_dst.(*types.Interface); ok { // Assignment from one interface type to another? if _, ok := ut_src.(*types.Interface); ok { return emitTypeAssert(f, val, typ) } // Untyped nil literal? Return interface-typed nil literal. if ut_src == tUntypedNil { return nilLiteral(typ) } // Convert (non-nil) "untyped" literals to their default type. // TODO(gri): expose types.isUntyped(). if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 { val = emitConv(f, val, DefaultType(ut_src)) } mi := &MakeInterface{ X: val, Methods: f.Prog.MethodSet(t_src), } mi.setType(typ) return f.emit(mi) } // Conversion of a literal to a non-interface type results in // a new literal of the destination type and (initially) the // same abstract value. We don't compute the representation // change yet; this defers the point at which the number of // possible representations explodes. if l, ok := val.(*Literal); ok { return newLiteral(l.Value, typ) } // A representation-changing conversion. c := &Convert{X: val} c.setType(typ) return f.emit(c) }
func (x array) hash(t types.Type) int { h := 0 tElt := t.Underlying().(*types.Array).Elem() for _, xi := range x { h += hash(tElt, xi) } return h }
func (c *PkgContext) typeCheck(of string, to types.Type) string { if in, isInterface := to.Underlying().(*types.Interface); isInterface { if in.MethodSet().Len() == 0 { return "true" } return fmt.Sprintf("%s.Go$implementedBy.indexOf(%s) !== -1", c.typeName(to), of) } return of + " === " + c.typeName(to) }
func (c *funcContext) typeCheck(of string, to types.Type) string { if in, isInterface := to.Underlying().(*types.Interface); isInterface { if in.Empty() { return "true" } return fmt.Sprintf("%s.implementedBy.indexOf(%s) !== -1", c.typeName(to), of) } return of + " === " + c.typeName(to) }
// underlying returns the underlying type of typ. Copied from go/types. func underlyingType(typ types.Type) types.Type { if typ, ok := typ.(*types.Named); ok { return typ.Underlying() // underlying types are never NamedTypes } if typ == nil { panic("underlying(nil)") } return typ }
func (x array) eq(t types.Type, _y interface{}) bool { y := _y.(array) tElt := t.Underlying().(*types.Array).Elem() for i, xi := range x { if !equals(tElt, xi, y[i]) { return false } } return true }
func (x structure) hash(t types.Type) int { tStruct := t.Underlying().(*types.Struct) h := 0 for i, n := 0, tStruct.NumFields(); i < n; i++ { if f := tStruct.Field(i); !f.Anonymous() { h += hash(f.Type(), x[i]) } } return h }
// usesBuiltinMap returns true if the built-in hash function and // equivalence relation for type t are consistent with those of the // interpreter's representation of type t. Such types are: all basic // types (bool, numbers, string), pointers and channels. // // usesBuiltinMap returns false for types that require a custom map // implementation: interfaces, arrays and structs. // // Panic ensues if t is an invalid map key type: function, map or slice. func usesBuiltinMap(t types.Type) bool { switch t := t.(type) { case *types.Basic, *types.Chan, *types.Pointer: return true case *types.Named: return usesBuiltinMap(t.Underlying()) case *types.Interface, *types.Array, *types.Struct: return false } panic(fmt.Sprintf("invalid map key type: %T", t)) }
// CanHaveDynamicTypes reports whether the type T can "hold" dynamic types, // i.e. is an interface (incl. reflect.Type) or a reflect.Value. // func CanHaveDynamicTypes(T types.Type) bool { switch T := T.(type) { case *types.Named: if obj := T.Obj(); obj.Name() == "Value" && obj.Pkg().Path() == "reflect" { return true // reflect.Value } return CanHaveDynamicTypes(T.Underlying()) case *types.Interface: return true } return false }
func (x structure) eq(t types.Type, _y interface{}) bool { y := _y.(structure) tStruct := t.Underlying().(*types.Struct) for i, n := 0, tStruct.NumFields(); i < n; i++ { if f := tStruct.Field(i); !f.Anonymous() { if !equals(f.Type(), x[i], y[i]) { return false } } } return true }
// CanPoint reports whether the type T is pointerlike, // for the purposes of this analysis. func CanPoint(T types.Type) bool { switch T := T.(type) { case *types.Named: if obj := T.Obj(); obj.Name() == "Value" && obj.Pkg().Path() == "reflect" { return true // treat reflect.Value like interface{} } return CanPoint(T.Underlying()) case *types.Pointer, *types.Interface, *types.Map, *types.Chan, *types.Signature, *types.Slice: return true } return false // array struct tuple builtin basic }
func (tm *LLVMTypeMap) Sizeof(typ types.Type) int64 { switch typ := typ.Underlying().(type) { case *types.Basic: switch typ.Kind() { case types.Int, types.Uint: return int64(tm.target.TypeAllocSize(tm.inttype)) case types.Uintptr, types.UnsafePointer: return int64(tm.target.PointerSize()) case types.String: return 2 * int64(tm.target.PointerSize()) } return types.DefaultSizeof(typ) case *types.Array: eltsize := tm.Sizeof(typ.Elem()) eltalign := tm.Alignof(typ.Elem()) var eltpad int64 if eltsize%eltalign != 0 { eltpad = eltalign - (eltsize % eltalign) } return (eltsize + eltpad) * typ.Len() case *types.Struct: if typ.NumFields() == 0 { return 0 } fields := make([]*types.Field, int(typ.NumFields())) for i := range fields { fields[i] = typ.Field(i) } offsets := tm.Offsetsof(fields) n := len(fields) return offsets[n-1] + tm.Sizeof(fields[n-1].Type) } return int64(tm.target.PointerSize()) }
func (tm *llvmTypeMap) Alignof(typ types.Type) int64 { switch typ := typ.Underlying().(type) { case *types.Array: return tm.Alignof(typ.Elem()) case *types.Basic: switch typ.Kind() { case types.Int, types.Uint, types.Int64, types.Uint64, types.Float64, types.Complex64, types.Complex128: return int64(tm.target.TypeAllocSize(tm.inttype)) case types.Uintptr, types.UnsafePointer, types.String: return int64(tm.target.PointerSize()) } return tm.StdSizes.Alignof(typ) case *types.Struct: max := int64(1) for i := 0; i < typ.NumFields(); i++ { f := typ.Field(i) a := tm.Alignof(f.Type()) if a > max { max = a } } return max } return int64(tm.target.PointerSize()) }
func (c *funcContext) zeroValue(ty types.Type) string { switch t := ty.Underlying().(type) { case *types.Basic: switch { case is64Bit(t) || t.Info()&types.IsComplex != 0: return fmt.Sprintf("new %s(0, 0)", c.typeName(ty)) case t.Info()&types.IsBoolean != 0: return "false" case t.Info()&types.IsNumeric != 0, t.Kind() == types.UnsafePointer: return "0" case t.Info()&types.IsString != 0: return `""` case t.Kind() == types.UntypedNil: panic("Zero value for untyped nil.") default: panic("Unhandled type") } case *types.Array: return fmt.Sprintf("%s.zero()", c.typeName(ty)) case *types.Signature: return "$throwNilPointerError" case *types.Slice: return fmt.Sprintf("%s.nil", c.typeName(ty)) case *types.Struct: return fmt.Sprintf("new %s.Ptr()", c.typeName(ty)) case *types.Map: return "false" case *types.Interface: return "null" } return fmt.Sprintf("%s.nil", c.typeName(ty)) }
func (c *funcContext) translateImplicitConversion(expr ast.Expr, desiredType types.Type) *expression { if desiredType == nil { return c.translateExpr(expr) } if expr == nil { return c.formatExpr("%s", c.zeroValue(desiredType)) } exprType := c.p.info.Types[expr].Type if types.Identical(exprType, desiredType) { return c.translateExpr(expr) } basicExprType, isBasicExpr := exprType.Underlying().(*types.Basic) if isBasicExpr && basicExprType.Kind() == types.UntypedNil { return c.formatExpr("%s", c.zeroValue(desiredType)) } switch desiredType.Underlying().(type) { case *types.Slice: return c.formatExpr("$subslice(new %1s(%2e.$array), %2e.$offset, %2e.$offset + %2e.$length)", c.typeName(desiredType), expr) case *types.Interface: if isWrapped(exprType) { return c.formatExpr("new %s(%e)", c.typeName(exprType), expr) } if _, isStruct := exprType.Underlying().(*types.Struct); isStruct { return c.formatExpr("new %1e.constructor.Struct(%1e)", expr) } } return c.translateExpr(expr) }
// eqnil returns the comparison x == y using the equivalence relation // appropriate for type t. // If t is a reference type, at most one of x or y may be a nil value // of that type. // func eqnil(t types.Type, x, y value) bool { switch t.Underlying().(type) { case *types.Map, *types.Signature, *types.Slice: // Since these types don't support comparison, // one of the operands must be a literal nil. switch x := x.(type) { case *hashmap: return (x != nil) == (y.(*hashmap) != nil) case map[value]value: return (x != nil) == (y.(map[value]value) != nil) case *ssa.Function: switch y := y.(type) { case *ssa.Function: return (x != nil) == (y != nil) case *closure: return true } case *closure: return (x != nil) == (y.(*ssa.Function) != nil) case []value: return (x != nil) == (y.([]value) != nil) } panic(fmt.Sprintf("eqnil(%s): illegal dynamic type: %T", t, x)) } return equals(t, x, y) }
// zeroValue emits to f code to produce a zero value of type t, // and returns it. // func zeroValue(f *Function, t types.Type) Value { switch t.Underlying().(type) { case *types.Struct, *types.Array: return emitLoad(f, f.addLocal(t, token.NoPos)) default: return zeroConst(t) } }
// emitTypeAssert emits to f a type assertion value := x.(t) and // returns the value. x.Type() must be an interface. // func emitTypeAssert(f *Function, x Value, t types.Type) Value { // Simplify infallible assertions. txi := x.Type().Underlying().(*types.Interface) if ti, ok := t.Underlying().(*types.Interface); ok { if types.IsIdentical(ti, txi) { return x } if isSuperinterface(ti, txi) { c := &ChangeInterface{X: x} c.setType(t) return f.emit(c) } } a := &TypeAssert{X: x, AssertedType: t} a.setType(t) return f.emit(a) }
func (c *funcContext) translateAssign(lhs ast.Expr, rhs string, typ types.Type, define bool) string { lhs = removeParens(lhs) if isBlank(lhs) { panic("translateAssign with blank lhs") } if l, ok := lhs.(*ast.IndexExpr); ok { if t, ok := c.p.info.Types[l.X].Type.Underlying().(*types.Map); ok { keyVar := c.newVariable("_key") return fmt.Sprintf(`%s = %s; (%s || $throwRuntimeError("assignment to entry in nil map"))[%s] = { k: %s, v: %s };`, keyVar, c.translateImplicitConversion(l.Index, t.Key()), c.translateExpr(l.X), c.makeKey(c.newIdent(keyVar, t.Key()), t.Key()), keyVar, rhs) } } switch typ.Underlying().(type) { case *types.Array, *types.Struct: if define { return fmt.Sprintf("%[1]s = %[2]s; $copy(%[1]s, %[3]s, %[4]s);", c.translateExpr(lhs), c.zeroValue(typ), rhs, c.typeName(typ)) } return fmt.Sprintf("$copy(%s, %s, %s);", c.translateExpr(lhs), rhs, c.typeName(typ)) } switch l := lhs.(type) { case *ast.Ident: o := c.p.info.Defs[l] if o == nil { o = c.p.info.Uses[l] } return fmt.Sprintf("%s = %s;", c.objectName(o), rhs) case *ast.SelectorExpr: sel, ok := c.p.info.Selections[l] if !ok { // qualified identifier return fmt.Sprintf("%s.%s = %s;", c.translateExpr(l.X), l.Sel.Name, rhs) } fields, jsTag := c.translateSelection(sel) if jsTag != "" { return fmt.Sprintf("%s.%s.%s = %s;", c.translateExpr(l.X), strings.Join(fields, "."), jsTag, c.externalize(rhs, sel.Type())) } return fmt.Sprintf("%s.%s = %s;", c.translateExpr(l.X), strings.Join(fields, "."), rhs) case *ast.StarExpr: return fmt.Sprintf("%s.$set(%s);", c.translateExpr(l.X), rhs) case *ast.IndexExpr: switch t := c.p.info.Types[l.X].Type.Underlying().(type) { case *types.Array, *types.Pointer: if c.p.info.Types[l.Index].Value != nil { return c.formatExpr("%e[%f] = %s;", l.X, l.Index, rhs).String() } return c.formatExpr(`(%2f < 0 || %2f >= %1e.length) ? $throwRuntimeError("index out of range") : %1e[%2f] = %3s`, l.X, l.Index, rhs).String() + ";" case *types.Slice: return c.formatExpr(`(%2f < 0 || %2f >= %1e.$length) ? $throwRuntimeError("index out of range") : %1e.$array[%1e.$offset + %2f] = %3s`, l.X, l.Index, rhs).String() + ";" default: panic(fmt.Sprintf("Unhandled lhs type: %T\n", t)) } default: panic(fmt.Sprintf("Unhandled lhs type: %T\n", l)) } }
// loadI2V loads an interface value to a type, without checking // that the interface type matches. func (v *LLVMValue) loadI2V(typ types.Type) *LLVMValue { c := v.compiler if c.types.Sizeof(typ) > int64(c.target.PointerSize()) { ptr := c.builder.CreateExtractValue(v.LLVMValue(), 1, "") typ = types.NewPointer(typ) ptr = c.builder.CreateBitCast(ptr, c.types.ToLLVM(typ), "") return c.NewValue(ptr, typ).makePointee() } value := c.builder.CreateExtractValue(v.LLVMValue(), 1, "") if _, ok := typ.Underlying().(*types.Pointer); ok { value = c.builder.CreateBitCast(value, c.types.ToLLVM(typ), "") return c.NewValue(value, typ) } bits := c.target.TypeSizeInBits(c.types.ToLLVM(typ)) value = c.builder.CreatePtrToInt(value, llvm.IntType(int(bits)), "") value = c.coerce(value, c.types.ToLLVM(typ)) return c.NewValue(value, typ) }
// IntuitiveMethodSet returns the intuitive method set of a type, T. // // The result contains MethodSet(T) and additionally, if T is a // concrete type, methods belonging to *T if there is no identically // named method on T itself. This corresponds to user intuition about // method sets; this function is intended only for user interfaces. // // The order of the result is as for types.MethodSet(T). // func IntuitiveMethodSet(T types.Type, msets *types.MethodSetCache) []*types.Selection { var result []*types.Selection mset := msets.MethodSet(T) if _, ok := T.Underlying().(*types.Interface); ok { for i, n := 0, mset.Len(); i < n; i++ { result = append(result, mset.At(i)) } } else { pmset := msets.MethodSet(types.NewPointer(T)) for i, n := 0, pmset.Len(); i < n; i++ { meth := pmset.At(i) if m := mset.Lookup(meth.Obj().Pkg(), meth.Obj().Name()); m != nil { meth = m } result = append(result, meth) } } return result }
func checkDeprecatedTags(t types.Type) { // give informative errors for use of deprecated custom methods switch x := t.Underlying().(type) { case *types.Struct: for i := 0; i < x.NumFields(); i++ { _, found, _ := parseTag("gen", x.Tag(i)) if found { addError(fmt.Sprintf(`custom methods (%s on %s) have been deprecated, see %s`, x.Tag(i), x.Field(i).Name(), deprecationUrl)) } } } }