// MethodSet returns the method set of type T.  It is thread-safe.
//
// If cache is nil, this function is equivalent to types.NewMethodSet(T).
// Utility functions can thus expose an optional *MethodSetCache
// parameter to clients that care about performance.
//
func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet {
	if cache == nil {
		return types.NewMethodSet(T)
	}
	cache.mu.Lock()
	defer cache.mu.Unlock()

	switch T := T.(type) {
	case *types.Named:
		return cache.lookupNamed(T).value

	case *types.Pointer:
		if N, ok := T.Elem().(*types.Named); ok {
			return cache.lookupNamed(N).pointer
		}
	}

	// all other types
	// (The map uses pointer equivalence, not type identity.)
	mset := cache.others[T]
	if mset == nil {
		mset = types.NewMethodSet(T)
		if cache.others == nil {
			cache.others = make(map[types.Type]*types.MethodSet)
		}
		cache.others[T] = mset
	}
	return mset
}
Exemple #2
0
func needWrapType(typ types.Type) bool {
	switch typ := typ.(type) {
	case *types.Basic:
		return false
	case *types.Struct:
		return true
	case *types.Named:
		switch ut := typ.Underlying().(type) {
		case *types.Basic:
			return false
		default:
			return needWrapType(ut)
		}
	case *types.Array:
		return true
	case *types.Map:
		return true
	case *types.Slice:
		return true
	case *types.Interface:
		wrap := true
		if typ.Underlying() == universe.syms["error"].GoType().Underlying() {
			wrap = false
		}
		return wrap
	case *types.Signature:
		return true
	case *types.Pointer:
		return needWrapType(typ.Elem())
	}
	return false
}
Exemple #3
0
func (g *goGen) genWrite(valName, seqName string, T types.Type) {
	if isErrorType(T) {
		g.Printf("if %s == nil {\n", valName)
		g.Printf("    %s.WriteString(\"\");\n", seqName)
		g.Printf("} else {\n")
		g.Printf("    %s.WriteString(%s.Error());\n", seqName, valName)
		g.Printf("}\n")
		return
	}
	switch T := T.(type) {
	case *types.Pointer:
		// TODO(crawshaw): test *int
		// TODO(crawshaw): test **Generator
		switch T := T.Elem().(type) {
		case *types.Named:
			obj := T.Obj()
			if obj.Pkg() != g.pkg {
				g.errorf("type %s not defined in package %s", T, g.pkg)
				return
			}
			g.Printf("%s.WriteGoRef(%s)\n", seqName, valName)
		default:
			g.errorf("unsupported type %s", T)
		}
	case *types.Named:
		switch u := T.Underlying().(type) {
		case *types.Interface, *types.Pointer:
			g.Printf("%s.WriteGoRef(%s)\n", seqName, valName)
		default:
			g.errorf("unsupported, direct named type %s: %s", T, u)
		}
	default:
		g.Printf("%s.Write%s(%s);\n", seqName, seqType(T), valName)
	}
}
Exemple #4
0
func (g *javaGen) genRead(resName, seqName string, T types.Type) {
	switch T := T.(type) {
	case *types.Pointer:
		// TODO(crawshaw): test *int
		// TODO(crawshaw): test **Generator
		switch T := T.Elem().(type) {
		case *types.Named:
			o := T.Obj()
			if o.Pkg() != g.pkg {
				g.errorf("type %s not defined in package %s", T, g.pkg)
				return
			}
			g.Printf("%s = new %s(%s.readRef());\n", resName, o.Name(), seqName)
		default:
			g.errorf("unsupported type %s", T)
		}
	case *types.Named:
		switch T.Underlying().(type) {
		case *types.Interface, *types.Pointer:
			o := T.Obj()
			if o.Pkg() != g.pkg {
				g.errorf("type %s not defined in package %s", T, g.pkg)
				return
			}
			g.Printf("%s = new %s.Proxy(%s.readRef());\n", resName, o.Name(), seqName)
		default:
			g.errorf("unsupported, direct named type %s", T)
		}
	default:
		g.Printf("%s = %s.read%s();\n", resName, seqName, seqType(T))
	}
}
Exemple #5
0
// store stores value v of type T into *addr.
func store(T types.Type, addr *value, v value) {
	switch T := T.Underlying().(type) {
	case *types.Struct:
		lhs := (*addr).(structure)
		rhs := v.(structure)
		for i := range lhs {
			store(T.Field(i).Type(), &lhs[i], rhs[i])
		}
	case *types.Array:
		lhs := (*addr).(array)
		rhs := v.(array)
		for i := range lhs {
			store(T.Elem(), &lhs[i], rhs[i])
		}
	default:
		*addr = v
	}
}
Exemple #6
0
func sizeofElem(t types.Type) uint {
	var e types.Type
	switch t := t.(type) {
	default:
		panic(ice(fmt.Sprintf("type (%v) not an array or slice\n", t.String())))
	case *types.Slice:
		e = t.Elem()
	case *types.Array:
		e = t.Elem()
	case *types.Named:
		if typeinfo, ok := simdInfo(t); ok {
			return typeinfo.elemSize
		}
		panic(ice(
			fmt.Sprintf("t (%v), isSimd (%v)\n", t.String(), isSimd(t))))

	}
	return sizeof(e)
}
Exemple #7
0
// load returns the value of type T in *addr.
func load(T types.Type, addr *value) value {
	switch T := T.Underlying().(type) {
	case *types.Struct:
		v := (*addr).(structure)
		a := make(structure, len(v))
		for i := range a {
			a[i] = load(T.Field(i).Type(), &v[i])
		}
		return a
	case *types.Array:
		v := (*addr).(array)
		a := make(array, len(v))
		for i := range a {
			a[i] = load(T.Elem(), &v[i])
		}
		return a
	default:
		return *addr
	}
}
Exemple #8
0
// javaType returns a string that can be used as a Java type.
func (g *javaGen) javaType(T types.Type) string {
	switch T := T.(type) {
	case *types.Basic:
		switch T.Kind() {
		case types.Bool:
			return "boolean"
		case types.Int:
			return "long"
		case types.Int8:
			return "byte"
		case types.Int16:
			return "short"
		case types.Int32:
			return "int"
		case types.Int64:
			return "long"
		case types.Uint8:
			// TODO(crawshaw): Java bytes are signed, so this is
			// questionable, but vital.
			return "byte"
		// TODO(crawshaw): case types.Uint, types.Uint16, types.Uint32, types.Uint64:
		case types.Float32:
			return "float"
		case types.Float64:
			return "double"
		case types.String:
			return "String"
		default:
			g.errorf("unsupported return type: %s", T)
			return "TODO"
		}
	case *types.Slice:
		elem := g.javaType(T.Elem())
		return elem + "[]"

	case *types.Pointer:
		if _, ok := T.Elem().(*types.Named); ok {
			return g.javaType(T.Elem())
		}
		panic(fmt.Sprintf("unsupporter pointer to type: %s", T))
	case *types.Named:
		n := T.Obj()
		if n.Pkg() != g.pkg {
			panic(fmt.Sprintf("type %s is in package %s, must be defined in package %s", n.Name(), n.Pkg().Name(), g.pkg.Name()))
		}
		// TODO(crawshaw): more checking here
		return n.Name()
	default:
		g.errorf("unsupported javaType: %#+v, %s\n", T, T)
		return "TODO"
	}
}
Exemple #9
0
func reflectType(t types.Type) reflect.Type {
	switch t := t.(type) {
	case *types.Tuple:
		// TODO
	case *types.Basic:
		return reflectBasic(t.Kind())
	case *types.Pointer:
		return reflect.PtrTo(reflectType(t.Elem()))
	case *types.Slice:
		return reflect.SliceOf(reflectType(t.Elem()))
	case *types.Array:
		return reflect.ArrayOf(int(t.Len()), reflectType(t.Elem()))
	case *types.Named:
		if st, ok := simdInfo(t); ok {
			return st.t
		}
		if sse2, ok := sse2Info(t); ok {
			return sse2.t
		}
	}
	ice(fmt.Sprintf("error unknown type:\"%v\"", t))
	panic("")
}
Exemple #10
0
// flatten returns a list of directly contained fields in the preorder
// traversal of the type tree of t.  The resulting elements are all
// scalars (basic types or pointerlike types), except for struct/array
// "identity" nodes, whose type is that of the aggregate.
//
// reflect.Value is considered pointerlike, similar to interface{}.
//
// Callers must not mutate the result.
//
func (a *analysis) flatten(t types.Type) []*fieldInfo {
	fl, ok := a.flattenMemo[t]
	if !ok {
		switch t := t.(type) {
		case *types.Named:
			u := t.Underlying()
			if isInterface(u) {
				// Debuggability hack: don't remove
				// the named type from interfaces as
				// they're very verbose.
				fl = append(fl, &fieldInfo{typ: t})
			} else {
				fl = a.flatten(u)
			}

		case *types.Basic,
			*types.Signature,
			*types.Chan,
			*types.Map,
			*types.Interface,
			*types.Slice,
			*types.Pointer:
			fl = append(fl, &fieldInfo{typ: t})

		case *types.Array:
			fl = append(fl, &fieldInfo{typ: t}) // identity node
			for _, fi := range a.flatten(t.Elem()) {
				fl = append(fl, &fieldInfo{typ: fi.typ, op: true, tail: fi})
			}

		case *types.Struct:
			fl = append(fl, &fieldInfo{typ: t}) // identity node
			for i, n := 0, t.NumFields(); i < n; i++ {
				f := t.Field(i)
				for _, fi := range a.flatten(f.Type()) {
					fl = append(fl, &fieldInfo{typ: fi.typ, op: f, tail: fi})
				}
			}

		case *types.Tuple:
			// No identity node: tuples are never address-taken.
			n := t.Len()
			if n == 1 {
				// Don't add a fieldInfo link for singletons,
				// e.g. in params/results.
				fl = append(fl, a.flatten(t.At(0).Type())...)
			} else {
				for i := 0; i < n; i++ {
					f := t.At(i)
					for _, fi := range a.flatten(f.Type()) {
						fl = append(fl, &fieldInfo{typ: fi.typ, op: i, tail: fi})
					}
				}
			}

		default:
			panic(t)
		}

		a.flattenMemo[t] = fl
	}

	return fl
}
Exemple #11
0
func (g *objcGen) objcType(typ types.Type) string {
	if isErrorType(typ) {
		return "NSError*"
	}

	switch typ := typ.(type) {
	case *types.Basic:
		switch typ.Kind() {
		case types.Bool:
			return "BOOL"
		case types.Int:
			return "int"
		case types.Int8:
			return "int8_t"
		case types.Int16:
			return "int16_t"
		case types.Int32:
			return "int32_t"
		case types.Int64:
			return "int64_t"
		case types.Uint8:
			// byte is an alias of uint8, and the alias is lost.
			return "byte"
		case types.Uint16:
			return "uint16_t"
		case types.Uint32:
			return "uint32_t"
		case types.Uint64:
			return "uint64_t"
		case types.Float32:
			return "float"
		case types.Float64:
			return "double"
		case types.String:
			return "NSString*"
		default:
			g.errorf("unsupported type: %s", typ)
			return "TODO"
		}
	case *types.Slice:
		elem := g.objcType(typ.Elem())
		// Special case: NSData seems to be a better option for byte slice.
		if elem == "byte" {
			return "NSData*"
		}
		// TODO(hyangah): support other slice types: NSArray or CFArrayRef.
		// Investigate the performance implication.
		g.errorf("unsupported type: %s", typ)
		return "TODO"
	case *types.Pointer:
		if _, ok := typ.Elem().(*types.Named); ok {
			return g.objcType(typ.Elem()) + "*"
		}
		g.errorf("unsupported pointer to type: %s", typ)
		return "TODO"
	case *types.Named:
		n := typ.Obj()
		if n.Pkg() != g.pkg {
			g.errorf("type %s is in package %s; only types defined in package %s is supported", n.Name(), n.Pkg().Name(), g.pkg.Name())
			return "TODO"
		}
		switch typ.Underlying().(type) {
		case *types.Interface:
			return g.namePrefix + n.Name() + "*"
		case *types.Struct:
			return g.namePrefix + n.Name()
		}
		g.errorf("unsupported, named type %s", typ)
		return "TODO"
	default:
		g.errorf("unsupported type: %#+v, %s", typ, typ)
		return "TODO"
	}
}
Exemple #12
0
// matchArgTypeInternal is the internal version of matchArgType. It carries a map
// remembering what types are in progress so we don't recur when faced with recursive
// types or mutually recursive types.
func (f *File) matchArgTypeInternal(t printfArgType, typ types.Type, arg ast.Expr, inProgress map[types.Type]bool) bool {
	// %v, %T accept any argument type.
	if t == anyType {
		return true
	}
	if typ == nil {
		// external call
		typ = f.pkg.types[arg].Type
		if typ == nil {
			return true // probably a type check problem
		}
	}
	// If the type implements fmt.Formatter, we have nothing to check.
	// But (see issue 6259) that's not easy to verify, so instead we see
	// if its method set contains a Format function. We could do better,
	// even now, but we don't need to be 100% accurate. Wait for 6259 to
	// be fixed instead. TODO.
	if f.hasMethod(typ, "Format") {
		return true
	}
	// If we can use a string, might arg (dynamically) implement the Stringer or Error interface?
	if t&argString != 0 {
		if types.AssertableTo(errorType, typ) || types.AssertableTo(stringerType, typ) {
			return true
		}
	}

	typ = typ.Underlying()
	if inProgress[typ] {
		// We're already looking at this type. The call that started it will take care of it.
		return true
	}
	inProgress[typ] = true

	switch typ := typ.(type) {
	case *types.Signature:
		return t&argPointer != 0

	case *types.Map:
		// Recur: map[int]int matches %d.
		return t&argPointer != 0 ||
			(f.matchArgTypeInternal(t, typ.Key(), arg, inProgress) && f.matchArgTypeInternal(t, typ.Elem(), arg, inProgress))

	case *types.Chan:
		return t&argPointer != 0

	case *types.Array:
		// Same as slice.
		if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
			return true // %s matches []byte
		}
		// Recur: []int matches %d.
		return t&argPointer != 0 || f.matchArgTypeInternal(t, typ.Elem().Underlying(), arg, inProgress)

	case *types.Slice:
		// Same as array.
		if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
			return true // %s matches []byte
		}
		// Recur: []int matches %d. But watch out for
		//	type T []T
		// If the element is a pointer type (type T[]*T), it's handled fine by the Pointer case below.
		return t&argPointer != 0 || f.matchArgTypeInternal(t, typ.Elem(), arg, inProgress)

	case *types.Pointer:
		// Ugly, but dealing with an edge case: a known pointer to an invalid type,
		// probably something from a failed import.
		if typ.Elem().String() == "invalid type" {
			if *verbose {
				f.Warnf(arg.Pos(), "printf argument %v is pointer to invalid or unknown type", f.gofmt(arg))
			}
			return true // special case
		}
		// If it's actually a pointer with %p, it prints as one.
		if t == argPointer {
			return true
		}
		// If it's pointer to struct, that's equivalent in our analysis to whether we can print the struct.
		if str, ok := typ.Elem().Underlying().(*types.Struct); ok {
			return f.matchStructArgType(t, str, arg, inProgress)
		}
		// The rest can print with %p as pointers, or as integers with %x etc.
		return t&(argInt|argPointer) != 0

	case *types.Struct:
		return f.matchStructArgType(t, typ, arg, inProgress)

	case *types.Interface:
		// If the static type of the argument is empty interface, there's little we can do.
		// Example:
		//	func f(x interface{}) { fmt.Printf("%s", x) }
		// Whether x is valid for %s depends on the type of the argument to f. One day
		// we will be able to do better. For now, we assume that empty interface is OK
		// but non-empty interfaces, with Stringer and Error handled above, are errors.
		return typ.NumMethods() == 0

	case *types.Basic:
		switch typ.Kind() {
		case types.UntypedBool,
			types.Bool:
			return t&argBool != 0

		case types.UntypedInt,
			types.Int,
			types.Int8,
			types.Int16,
			types.Int32,
			types.Int64,
			types.Uint,
			types.Uint8,
			types.Uint16,
			types.Uint32,
			types.Uint64,
			types.Uintptr:
			return t&argInt != 0

		case types.UntypedFloat,
			types.Float32,
			types.Float64:
			return t&argFloat != 0

		case types.UntypedComplex,
			types.Complex64,
			types.Complex128:
			return t&argComplex != 0

		case types.UntypedString,
			types.String:
			return t&argString != 0

		case types.UnsafePointer:
			return t&(argPointer|argInt) != 0

		case types.UntypedRune:
			return t&(argInt|argRune) != 0

		case types.UntypedNil:
			return t&argPointer != 0 // TODO?

		case types.Invalid:
			if *verbose {
				f.Warnf(arg.Pos(), "printf argument %v has invalid or unknown type", f.gofmt(arg))
			}
			return true // Probably a type check problem.
		}
		panic("unreachable")
	}

	return false
}
Exemple #13
0
// zero returns a new "zero" value of the specified type.
func zero(t types.Type) value {
	switch t := t.(type) {
	case *types.Basic:
		if t.Kind() == types.UntypedNil {
			panic("untyped nil has no zero value")
		}
		if t.Info()&types.IsUntyped != 0 {
			// TODO(adonovan): make it an invariant that
			// this is unreachable.  Currently some
			// constants have 'untyped' types when they
			// should be defaulted by the typechecker.
			t = ssa.DefaultType(t).(*types.Basic)
		}
		switch t.Kind() {
		case types.Bool:
			return false
		case types.Int:
			return int(0)
		case types.Int8:
			return int8(0)
		case types.Int16:
			return int16(0)
		case types.Int32:
			return int32(0)
		case types.Int64:
			return int64(0)
		case types.Uint:
			return uint(0)
		case types.Uint8:
			return uint8(0)
		case types.Uint16:
			return uint16(0)
		case types.Uint32:
			return uint32(0)
		case types.Uint64:
			return uint64(0)
		case types.Uintptr:
			return uintptr(0)
		case types.Float32:
			return float32(0)
		case types.Float64:
			return float64(0)
		case types.Complex64:
			return complex64(0)
		case types.Complex128:
			return complex128(0)
		case types.String:
			return ""
		case types.UnsafePointer:
			return unsafe.Pointer(nil)
		default:
			panic(fmt.Sprint("zero for unexpected type:", t))
		}
	case *types.Pointer:
		return (*value)(nil)
	case *types.Array:
		a := make(array, t.Len())
		for i := range a {
			a[i] = zero(t.Elem())
		}
		return a
	case *types.Named:
		return zero(t.Underlying())
	case *types.Interface:
		return iface{} // nil type, methodset and value
	case *types.Slice:
		return []value(nil)
	case *types.Struct:
		s := make(structure, t.NumFields())
		for i := range s {
			s[i] = zero(t.Field(i).Type())
		}
		return s
	case *types.Tuple:
		if t.Len() == 1 {
			return zero(t.At(0).Type())
		}
		s := make(tuple, t.Len())
		for i := range s {
			s[i] = zero(t.At(i).Type())
		}
		return s
	case *types.Chan:
		return chan value(nil)
	case *types.Map:
		if usesBuiltinMap(t.Key()) {
			return map[value]value(nil)
		}
		return (*hashmap)(nil)
	case *types.Signature:
		return (*ssa.Function)(nil)
	}
	panic(fmt.Sprint("zero: unexpected ", t))
}
Exemple #14
0
// hashFor computes the hash of t.
func (h Hasher) hashFor(t types.Type) uint32 {
	// See Identical for rationale.
	switch t := t.(type) {
	case *types.Basic:
		return uint32(t.Kind())

	case *types.Array:
		return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem())

	case *types.Slice:
		return 9049 + 2*h.Hash(t.Elem())

	case *types.Struct:
		var hash uint32 = 9059
		for i, n := 0, t.NumFields(); i < n; i++ {
			f := t.Field(i)
			if f.Anonymous() {
				hash += 8861
			}
			hash += hashString(t.Tag(i))
			hash += hashString(f.Name()) // (ignore f.Pkg)
			hash += h.Hash(f.Type())
		}
		return hash

	case *types.Pointer:
		return 9067 + 2*h.Hash(t.Elem())

	case *types.Signature:
		var hash uint32 = 9091
		if t.Variadic() {
			hash *= 8863
		}
		return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results())

	case *types.Interface:
		var hash uint32 = 9103
		for i, n := 0, t.NumMethods(); i < n; i++ {
			// See go/types.identicalMethods for rationale.
			// Method order is not significant.
			// Ignore m.Pkg().
			m := t.Method(i)
			hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type())
		}
		return hash

	case *types.Map:
		return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem())

	case *types.Chan:
		return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem())

	case *types.Named:
		// Not safe with a copying GC; objects may move.
		return uint32(reflect.ValueOf(t.Obj()).Pointer())

	case *types.Tuple:
		return h.hashTuple(t)
	}
	panic(t)
}
Exemple #15
0
// addRuntimeType is called for each concrete type that can be the
// dynamic type of some interface or reflect.Value.
// Adapted from needMethods in go/ssa/builder.go
//
func (r *rta) addRuntimeType(T types.Type, skip bool) {
	if prev, ok := r.result.RuntimeTypes.At(T).(bool); ok {
		if skip && !prev {
			r.result.RuntimeTypes.Set(T, skip)
		}
		return
	}
	r.result.RuntimeTypes.Set(T, skip)

	mset := r.prog.MethodSets.MethodSet(T)

	if _, ok := T.Underlying().(*types.Interface); !ok {
		// T is a new concrete type.
		for i, n := 0, mset.Len(); i < n; i++ {
			sel := mset.At(i)
			m := sel.Obj()

			if m.Exported() {
				// Exported methods are always potentially callable via reflection.
				r.addReachable(r.prog.MethodValue(sel), true)
			}
		}

		// Add callgraph edge for each existing dynamic
		// "invoke"-mode call via that interface.
		for _, I := range r.interfaces(T) {
			sites, _ := r.invokeSites.At(I).([]ssa.CallInstruction)
			for _, site := range sites {
				r.addInvokeEdge(site, T)
			}
		}
	}

	// Precondition: T is not a method signature (*Signature with Recv()!=nil).
	// Recursive case: skip => don't call makeMethods(T).
	// Each package maintains its own set of types it has visited.

	var n *types.Named
	switch T := T.(type) {
	case *types.Named:
		n = T
	case *types.Pointer:
		n, _ = T.Elem().(*types.Named)
	}
	if n != nil {
		owner := n.Obj().Pkg()
		if owner == nil {
			return // built-in error type
		}
	}

	// Recursion over signatures of each exported method.
	for i := 0; i < mset.Len(); i++ {
		if mset.At(i).Obj().Exported() {
			sig := mset.At(i).Type().(*types.Signature)
			r.addRuntimeType(sig.Params(), true)  // skip the Tuple itself
			r.addRuntimeType(sig.Results(), true) // skip the Tuple itself
		}
	}

	switch t := T.(type) {
	case *types.Basic:
		// nop

	case *types.Interface:
		// nop---handled by recursion over method set.

	case *types.Pointer:
		r.addRuntimeType(t.Elem(), false)

	case *types.Slice:
		r.addRuntimeType(t.Elem(), false)

	case *types.Chan:
		r.addRuntimeType(t.Elem(), false)

	case *types.Map:
		r.addRuntimeType(t.Key(), false)
		r.addRuntimeType(t.Elem(), false)

	case *types.Signature:
		if t.Recv() != nil {
			panic(fmt.Sprintf("Signature %s has Recv %s", t, t.Recv()))
		}
		r.addRuntimeType(t.Params(), true)  // skip the Tuple itself
		r.addRuntimeType(t.Results(), true) // skip the Tuple itself

	case *types.Named:
		// A pointer-to-named type can be derived from a named
		// type via reflection.  It may have methods too.
		r.addRuntimeType(types.NewPointer(T), false)

		// Consider 'type T struct{S}' where S has methods.
		// Reflection provides no way to get from T to struct{S},
		// only to S, so the method set of struct{S} is unwanted,
		// so set 'skip' flag during recursion.
		r.addRuntimeType(t.Underlying(), true)

	case *types.Array:
		r.addRuntimeType(t.Elem(), false)

	case *types.Struct:
		for i, n := 0, t.NumFields(); i < n; i++ {
			r.addRuntimeType(t.Field(i).Type(), false)
		}

	case *types.Tuple:
		for i, n := 0, t.Len(); i < n; i++ {
			r.addRuntimeType(t.At(i).Type(), false)
		}

	default:
		panic(T)
	}
}
Exemple #16
0
// seqType returns a string that can be used for reading and writing a
// type using the seq library.
// TODO(hyangah): avoid panic; gobind needs to output the problematic code location.
func seqType(t types.Type) string {
	if isErrorType(t) {
		return "UTF16"
	}
	switch t := t.(type) {
	case *types.Basic:
		switch t.Kind() {
		case types.Int:
			return "Int"
		case types.Int8:
			return "Int8"
		case types.Int16:
			return "Int16"
		case types.Int32:
			return "Int32"
		case types.Int64:
			return "Int64"
		case types.Uint8: // Byte.
			// TODO(crawshaw): questionable, but vital?
			return "Byte"
		// TODO(crawshaw): case types.Uint, types.Uint16, types.Uint32, types.Uint64:
		case types.Float32:
			return "Float32"
		case types.Float64:
			return "Float64"
		case types.String:
			return "UTF16"
		default:
			// Should be caught earlier in processing.
			panic(fmt.Sprintf("unsupported basic seqType: %s", t))
		}
	case *types.Named:
		switch u := t.Underlying().(type) {
		case *types.Interface:
			return "Ref"
		default:
			panic(fmt.Sprintf("unsupported named seqType: %s / %T", u, u))
		}
	case *types.Slice:
		switch e := t.Elem().(type) {
		case *types.Basic:
			switch e.Kind() {
			case types.Uint8: // Byte.
				return "ByteArray"
			default:
				panic(fmt.Sprintf("unsupported seqType: %s(%s) / %T(%T)", t, e, t, e))
			}
		default:
			panic(fmt.Sprintf("unsupported seqType: %s(%s) / %T(%T)", t, e, t, e))
		}
	// TODO: let the types.Array case handled like types.Slice?
	case *types.Pointer:
		if _, ok := t.Elem().(*types.Named); ok {
			return "Ref"
		}
		panic(fmt.Sprintf("not supported yet, pointer type: %s / %T", t, t))

	default:
		panic(fmt.Sprintf("unsupported seqType: %s / %T", t, t))
	}
}
Exemple #17
0
func (tm *llvmTypeMap) getBackendType(t types.Type) backendType {
	switch t := t.(type) {
	case *types.Named:
		return tm.getBackendType(t.Underlying())

	case *types.Basic:
		switch t.Kind() {
		case types.Bool, types.Uint8:
			return &intBType{1, false}
		case types.Int8:
			return &intBType{1, true}
		case types.Uint16:
			return &intBType{2, false}
		case types.Int16:
			return &intBType{2, true}
		case types.Uint32:
			return &intBType{4, false}
		case types.Int32:
			return &intBType{4, true}
		case types.Uint64:
			return &intBType{8, false}
		case types.Int64:
			return &intBType{8, true}
		case types.Uint, types.Uintptr:
			return &intBType{tm.target.PointerSize(), false}
		case types.Int:
			return &intBType{tm.target.PointerSize(), true}
		case types.Float32:
			return &floatBType{false}
		case types.Float64:
			return &floatBType{true}
		case types.UnsafePointer:
			return &ptrBType{}
		case types.Complex64:
			f32 := &floatBType{false}
			return &structBType{[]backendType{f32, f32}}
		case types.Complex128:
			f64 := &floatBType{true}
			return &structBType{[]backendType{f64, f64}}
		case types.String:
			return &structBType{[]backendType{&ptrBType{}, &intBType{tm.target.PointerSize(), false}}}
		}

	case *types.Struct:
		var fields []backendType
		for i := 0; i != t.NumFields(); i++ {
			f := t.Field(i)
			fields = append(fields, tm.getBackendType(f.Type()))
		}
		return &structBType{fields}

	case *types.Pointer, *types.Signature, *types.Map, *types.Chan:
		return &ptrBType{}

	case *types.Interface:
		i8ptr := &ptrBType{}
		return &structBType{[]backendType{i8ptr, i8ptr}}

	case *types.Slice:
		return tm.sliceBackendType()

	case *types.Array:
		return &arrayBType{uint64(t.Len()), tm.getBackendType(t.Elem())}
	}

	panic("unhandled type: " + t.String())
}
Exemple #18
0
func (fr *frame) convert(v *govalue, dsttyp types.Type) *govalue {
	b := fr.builder

	// If it's a stack allocated value, we'll want to compare the
	// value type, not the pointer type.
	srctyp := v.typ

	// Get the underlying type, if any.
	origdsttyp := dsttyp
	dsttyp = dsttyp.Underlying()
	srctyp = srctyp.Underlying()

	// Identical (underlying) types? Just swap in the destination type.
	if types.Identical(srctyp, dsttyp) {
		return newValue(v.value, origdsttyp)
	}

	// Both pointer types with identical underlying types? Same as above.
	if srctyp, ok := srctyp.(*types.Pointer); ok {
		if dsttyp, ok := dsttyp.(*types.Pointer); ok {
			srctyp := srctyp.Elem().Underlying()
			dsttyp := dsttyp.Elem().Underlying()
			if types.Identical(srctyp, dsttyp) {
				return newValue(v.value, origdsttyp)
			}
		}
	}

	// string ->
	if isString(srctyp) {
		// (untyped) string -> string
		// XXX should untyped strings be able to escape go/types?
		if isString(dsttyp) {
			return newValue(v.value, origdsttyp)
		}

		// string -> []byte
		if isSlice(dsttyp, types.Byte) {
			value := v.value
			strdata := fr.builder.CreateExtractValue(value, 0, "")
			strlen := fr.builder.CreateExtractValue(value, 1, "")

			// Data must be copied, to prevent changes in
			// the byte slice from mutating the string.
			newdata := fr.createMalloc(strlen, false)
			fr.memcpy(newdata, strdata, strlen)

			struct_ := llvm.Undef(fr.types.ToLLVM(dsttyp))
			struct_ = fr.builder.CreateInsertValue(struct_, newdata, 0, "")
			struct_ = fr.builder.CreateInsertValue(struct_, strlen, 1, "")
			struct_ = fr.builder.CreateInsertValue(struct_, strlen, 2, "")
			return newValue(struct_, origdsttyp)
		}

		// string -> []rune
		if isSlice(dsttyp, types.Rune) {
			return fr.stringToRuneSlice(v)
		}
	}

	// []byte -> string
	if isSlice(srctyp, types.Byte) && isString(dsttyp) {
		value := v.value
		data := fr.builder.CreateExtractValue(value, 0, "")
		len := fr.builder.CreateExtractValue(value, 1, "")

		// Data must be copied, to prevent changes in
		// the byte slice from mutating the string.
		newdata := fr.createMalloc(len, false)
		fr.memcpy(newdata, data, len)

		struct_ := llvm.Undef(fr.types.ToLLVM(types.Typ[types.String]))
		struct_ = fr.builder.CreateInsertValue(struct_, newdata, 0, "")
		struct_ = fr.builder.CreateInsertValue(struct_, len, 1, "")
		return newValue(struct_, types.Typ[types.String])
	}

	// []rune -> string
	if isSlice(srctyp, types.Rune) && isString(dsttyp) {
		return fr.runeSliceToString(v)
	}

	// rune -> string
	if isString(dsttyp) && isInteger(srctyp) {
		return fr.runeToString(v)
	}

	// Unsafe pointer conversions.
	llvm_type := fr.types.ToLLVM(dsttyp)
	if dsttyp == types.Typ[types.UnsafePointer] { // X -> unsafe.Pointer
		if _, isptr := srctyp.(*types.Pointer); isptr {
			return newValue(v.value, origdsttyp)
		} else if srctyp == types.Typ[types.Uintptr] {
			value := b.CreateIntToPtr(v.value, llvm_type, "")
			return newValue(value, origdsttyp)
		}
	} else if srctyp == types.Typ[types.UnsafePointer] { // unsafe.Pointer -> X
		if _, isptr := dsttyp.(*types.Pointer); isptr {
			return newValue(v.value, origdsttyp)
		} else if dsttyp == types.Typ[types.Uintptr] {
			value := b.CreatePtrToInt(v.value, llvm_type, "")
			return newValue(value, origdsttyp)
		}
	}

	lv := v.value
	srcType := lv.Type()
	switch srcType.TypeKind() {
	case llvm.IntegerTypeKind:
		switch llvm_type.TypeKind() {
		case llvm.IntegerTypeKind:
			srcBits := srcType.IntTypeWidth()
			dstBits := llvm_type.IntTypeWidth()
			delta := srcBits - dstBits
			switch {
			case delta < 0:
				if !isUnsigned(srctyp) {
					lv = b.CreateSExt(lv, llvm_type, "")
				} else {
					lv = b.CreateZExt(lv, llvm_type, "")
				}
			case delta > 0:
				lv = b.CreateTrunc(lv, llvm_type, "")
			}
			return newValue(lv, origdsttyp)
		case llvm.FloatTypeKind, llvm.DoubleTypeKind:
			if !isUnsigned(v.Type()) {
				lv = b.CreateSIToFP(lv, llvm_type, "")
			} else {
				lv = b.CreateUIToFP(lv, llvm_type, "")
			}
			return newValue(lv, origdsttyp)
		}
	case llvm.DoubleTypeKind:
		switch llvm_type.TypeKind() {
		case llvm.FloatTypeKind:
			lv = b.CreateFPTrunc(lv, llvm_type, "")
			return newValue(lv, origdsttyp)
		case llvm.IntegerTypeKind:
			if !isUnsigned(dsttyp) {
				lv = b.CreateFPToSI(lv, llvm_type, "")
			} else {
				lv = b.CreateFPToUI(lv, llvm_type, "")
			}
			return newValue(lv, origdsttyp)
		}
	case llvm.FloatTypeKind:
		switch llvm_type.TypeKind() {
		case llvm.DoubleTypeKind:
			lv = b.CreateFPExt(lv, llvm_type, "")
			return newValue(lv, origdsttyp)
		case llvm.IntegerTypeKind:
			if !isUnsigned(dsttyp) {
				lv = b.CreateFPToSI(lv, llvm_type, "")
			} else {
				lv = b.CreateFPToUI(lv, llvm_type, "")
			}
			return newValue(lv, origdsttyp)
		}
	}

	// Complex -> complex. Complexes are only convertible to other
	// complexes, contant conversions aside. So we can just check the
	// source type here; given that the types are not identical
	// (checked above), we can assume the destination type is the alternate
	// complex type.
	if isComplex(srctyp) {
		var fpcast func(llvm.Builder, llvm.Value, llvm.Type, string) llvm.Value
		var fptype llvm.Type
		if srctyp == types.Typ[types.Complex64] {
			fpcast = (llvm.Builder).CreateFPExt
			fptype = llvm.DoubleType()
		} else {
			fpcast = (llvm.Builder).CreateFPTrunc
			fptype = llvm.FloatType()
		}
		if fpcast != nil {
			realv := b.CreateExtractValue(lv, 0, "")
			imagv := b.CreateExtractValue(lv, 1, "")
			realv = fpcast(b, realv, fptype, "")
			imagv = fpcast(b, imagv, fptype, "")
			lv = llvm.Undef(fr.types.ToLLVM(dsttyp))
			lv = b.CreateInsertValue(lv, realv, 0, "")
			lv = b.CreateInsertValue(lv, imagv, 1, "")
			return newValue(lv, origdsttyp)
		}
	}
	panic(fmt.Sprintf("unimplemented conversion: %s (%s) -> %s", v.typ, lv.Type(), origdsttyp))
}