func gdata(nam *Node, nr *Node, wid int) { if nam.Op != ONAME { Fatalf("gdata nam op %v", nam.Op) } if nam.Sym == nil { Fatalf("gdata nil nam sym") } switch nr.Op { case OLITERAL: switch u := nr.Val().U.(type) { case *Mpcplx: gdatacomplex(nam, u) case string: gdatastring(nam, u) case bool: i := int64(obj.Bool2int(u)) Linksym(nam.Sym).WriteInt(Ctxt, nam.Xoffset, wid, i) case *Mpint: Linksym(nam.Sym).WriteInt(Ctxt, nam.Xoffset, wid, u.Int64()) case *Mpflt: s := Linksym(nam.Sym) f := u.Float64() switch nam.Type.Etype { case TFLOAT32: s.WriteFloat32(Ctxt, nam.Xoffset, float32(f)) case TFLOAT64: s.WriteFloat64(Ctxt, nam.Xoffset, f) } default: Fatalf("gdata unhandled OLITERAL %v", nr) } case OADDR: if nr.Left.Op != ONAME { Fatalf("gdata ADDR left op %v", nr.Left.Op) } to := nr.Left Linksym(nam.Sym).WriteAddr(Ctxt, nam.Xoffset, wid, Linksym(to.Sym), to.Xoffset) case ONAME: if nr.Class != PFUNC { Fatalf("gdata NAME not PFUNC %d", nr.Class) } Linksym(nam.Sym).WriteAddr(Ctxt, nam.Xoffset, wid, Linksym(funcsym(nr.Sym)), nr.Xoffset) default: Fatalf("gdata unhandled op %v %v\n", nr, nr.Op) } }
func haspointers(t *Type) bool { switch t.Etype { case TINT, TUINT, TINT8, TUINT8, TINT16, TUINT16, TINT32, TUINT32, TINT64, TUINT64, TUINTPTR, TFLOAT32, TFLOAT64, TCOMPLEX64, TCOMPLEX128, TBOOL: return false case TSLICE: return true case TARRAY: at := t.Extra.(*ArrayType) if at.Haspointers != 0 { return at.Haspointers-1 != 0 } ret := false if t.NumElem() != 0 { // non-empty array ret = haspointers(t.Elem()) } at.Haspointers = 1 + uint8(obj.Bool2int(ret)) return ret case TSTRUCT: st := t.StructType() if st.Haspointers != 0 { return st.Haspointers-1 != 0 } ret := false for _, t1 := range t.Fields().Slice() { if haspointers(t1.Type) { ret = true break } } st.Haspointers = 1 + uint8(obj.Bool2int(ret)) return ret } return true }
// Convconst converts constant node n to type t and // places the result in con. func (n *Node) Convconst(con *Node, t *Type) { tt := Simsimtype(t) // copy the constant for conversion Nodconst(con, Types[TINT8], 0) con.Type = t con.Val = n.Val if Isint[tt] { con.Val.Ctype = CTINT con.Val.U = new(Mpint) var i int64 switch n.Val.Ctype { default: Fatal("convconst ctype=%d %v", n.Val.Ctype, Tconv(t, obj.FmtLong)) case CTINT, CTRUNE: i = Mpgetfix(n.Val.U.(*Mpint)) case CTBOOL: i = int64(obj.Bool2int(n.Val.U.(bool))) case CTNIL: i = 0 } i = iconv(i, tt) Mpmovecfix(con.Val.U.(*Mpint), i) return } if Isfloat[tt] { con.Val = toflt(con.Val) if con.Val.Ctype != CTFLT { Fatal("convconst ctype=%d %v", con.Val.Ctype, t) } if tt == TFLOAT32 { con.Val.U = truncfltlit(con.Val.U.(*Mpflt), t) } return } if Iscomplex[tt] { con.Val = tocplx(con.Val) if tt == TCOMPLEX64 { con.Val.U.(*Mpcplx).Real = *truncfltlit(&con.Val.U.(*Mpcplx).Real, Types[TFLOAT32]) con.Val.U.(*Mpcplx).Imag = *truncfltlit(&con.Val.U.(*Mpcplx).Imag, Types[TFLOAT32]) } return } Fatal("convconst %v constant", Tconv(t, obj.FmtLong)) }
// IntLiteral returns the Node's literal value as an interger. func (n *Node) IntLiteral() (x int64, ok bool) { switch { case n == nil: return case Isconst(n, CTINT): return n.Int(), true case Isconst(n, CTBOOL): return int64(obj.Bool2int(n.Bool())), true } return }
func intLiteral(n *gc.Node) (x int64, ok bool) { switch { case n == nil: return case gc.Isconst(n, gc.CTINT): return n.Int64(), true case gc.Isconst(n, gc.CTBOOL): return int64(obj.Bool2int(n.Bool())), true } return }
// Convconst converts constant node n to type t and // places the result in con. func (n *Node) Convconst(con *Node, t *Type) { tt := Simsimtype(t) // copy the constant for conversion Nodconst(con, Types[TINT8], 0) con.Type = t con.SetVal(n.Val()) if Isint[tt] { con.SetVal(Val{new(Mpint)}) var i int64 switch n.Val().Ctype() { default: Fatalf("convconst ctype=%d %v", n.Val().Ctype(), Tconv(t, FmtLong)) case CTINT, CTRUNE: i = n.Val().U.(*Mpint).Int64() case CTBOOL: i = int64(obj.Bool2int(n.Val().U.(bool))) case CTNIL: i = 0 } i = iconv(i, tt) con.Val().U.(*Mpint).SetInt64(i) return } if Isfloat[tt] { con.SetVal(toflt(con.Val())) if con.Val().Ctype() != CTFLT { Fatalf("convconst ctype=%d %v", con.Val().Ctype(), t) } if tt == TFLOAT32 { con.SetVal(Val{truncfltlit(con.Val().U.(*Mpflt), t)}) } return } if Iscomplex[tt] { con.SetVal(tocplx(con.Val())) if tt == TCOMPLEX64 { con.Val().U.(*Mpcplx).Real = *truncfltlit(&con.Val().U.(*Mpcplx).Real, Types[TFLOAT32]) con.Val().U.(*Mpcplx).Imag = *truncfltlit(&con.Val().U.(*Mpcplx).Imag, Types[TFLOAT32]) } return } Fatalf("convconst %v constant", Tconv(t, FmtLong)) }
func Gbranch(as int, t *Type, likely int) *obj.Prog { p := Prog(as) p.To.Type = obj.TYPE_BRANCH p.To.Val = nil if as != obj.AJMP && likely != 0 && Thearch.Thechar != '9' && Thearch.Thechar != '7' { p.From.Type = obj.TYPE_CONST p.From.Offset = int64(obj.Bool2int(likely > 0)) } if Debug['g'] != 0 { fmt.Printf("%v\n", p) } return p }
// Sort the list of stack variables. Autos after anything else, // within autos, unused after used, within used, things with // pointers first, zeroed things first, and then decreasing size. // Because autos are laid out in decreasing addresses // on the stack, pointers first, zeroed things first and decreasing size // really means, in memory, things with pointers needing zeroing at // the top of the stack and increasing in size. // Non-autos sort on offset. func cmpstackvar(a *Node, b *Node) int { if a.Class != b.Class { if a.Class == PAUTO { return +1 } return -1 } if a.Class != PAUTO { if a.Xoffset < b.Xoffset { return -1 } if a.Xoffset > b.Xoffset { return +1 } return 0 } if a.Used != b.Used { return obj.Bool2int(b.Used) - obj.Bool2int(a.Used) } ap := obj.Bool2int(haspointers(a.Type)) bp := obj.Bool2int(haspointers(b.Type)) if ap != bp { return bp - ap } ap = obj.Bool2int(a.Name.Needzero) bp = obj.Bool2int(b.Name.Needzero) if ap != bp { return bp - ap } if a.Type.Width < b.Type.Width { return +1 } if a.Type.Width > b.Type.Width { return -1 } return stringsCompare(a.Sym.Name, b.Sym.Name) }
// Naddr rewrites a to refer to n. // It assumes that a is zeroed on entry. func Naddr(a *obj.Addr, n *Node) { if n == nil { return } if n.Type != nil && n.Type.Etype != TIDEAL { // TODO(rsc): This is undone by the selective clearing of width below, // to match architectures that were not as aggressive in setting width // during naddr. Those widths must be cleared to avoid triggering // failures in gins when it detects real but heretofore latent (and one // hopes innocuous) type mismatches. // The type mismatches should be fixed and the clearing below removed. dowidth(n.Type) a.Width = n.Type.Width } switch n.Op { default: a := a // copy to let escape into Ctxt.Dconv Debug['h'] = 1 Dump("naddr", n) Fatalf("naddr: bad %v %v", Oconv(int(n.Op), 0), Ctxt.Dconv(a)) case OREGISTER: a.Type = obj.TYPE_REG a.Reg = n.Reg a.Sym = nil if Thearch.Thechar == '8' { // TODO(rsc): Never clear a->width. a.Width = 0 } case OINDREG: a.Type = obj.TYPE_MEM a.Reg = n.Reg a.Sym = Linksym(n.Sym) a.Offset = n.Xoffset if a.Offset != int64(int32(a.Offset)) { Yyerror("offset %d too large for OINDREG", a.Offset) } if Thearch.Thechar == '8' { // TODO(rsc): Never clear a->width. a.Width = 0 } // n->left is PHEAP ONAME for stack parameter. // compute address of actual parameter on stack. case OPARAM: a.Etype = uint8(Simtype[n.Left.Type.Etype]) a.Width = n.Left.Type.Width a.Offset = n.Xoffset a.Sym = Linksym(n.Left.Sym) a.Type = obj.TYPE_MEM a.Name = obj.NAME_PARAM a.Node = n.Left.Orig case OCLOSUREVAR: if !Curfn.Func.Needctxt { Fatalf("closurevar without needctxt") } a.Type = obj.TYPE_MEM a.Reg = int16(Thearch.REGCTXT) a.Sym = nil a.Offset = n.Xoffset case OCFUNC: Naddr(a, n.Left) a.Sym = Linksym(n.Left.Sym) case ONAME: a.Etype = 0 if n.Type != nil { a.Etype = uint8(Simtype[n.Type.Etype]) } a.Offset = n.Xoffset s := n.Sym a.Node = n.Orig //if(a->node >= (Node*)&n) // fatal("stack node"); if s == nil { s = Lookup(".noname") } if n.Name.Method { if n.Type != nil { if n.Type.Sym != nil { if n.Type.Sym.Pkg != nil { s = Pkglookup(s.Name, n.Type.Sym.Pkg) } } } } a.Type = obj.TYPE_MEM switch n.Class { default: Fatalf("naddr: ONAME class %v %d\n", n.Sym, n.Class) case PEXTERN: a.Name = obj.NAME_EXTERN case PAUTO: a.Name = obj.NAME_AUTO case PPARAM, PPARAMOUT: a.Name = obj.NAME_PARAM case PFUNC: a.Name = obj.NAME_EXTERN a.Type = obj.TYPE_ADDR a.Width = int64(Widthptr) s = funcsym(s) } a.Sym = Linksym(s) case OLITERAL: if Thearch.Thechar == '8' { a.Width = 0 } switch n.Val().Ctype() { default: Fatalf("naddr: const %v", Tconv(n.Type, obj.FmtLong)) case CTFLT: a.Type = obj.TYPE_FCONST a.Val = mpgetflt(n.Val().U.(*Mpflt)) case CTINT, CTRUNE: a.Sym = nil a.Type = obj.TYPE_CONST a.Offset = Mpgetfix(n.Val().U.(*Mpint)) case CTSTR: datagostring(n.Val().U.(string), a) case CTBOOL: a.Sym = nil a.Type = obj.TYPE_CONST a.Offset = int64(obj.Bool2int(n.Val().U.(bool))) case CTNIL: a.Sym = nil a.Type = obj.TYPE_CONST a.Offset = 0 } case OADDR: Naddr(a, n.Left) a.Etype = uint8(Tptr) if Thearch.Thechar != '0' && Thearch.Thechar != '5' && Thearch.Thechar != '7' && Thearch.Thechar != '9' { // TODO(rsc): Do this even for arm, ppc64. a.Width = int64(Widthptr) } if a.Type != obj.TYPE_MEM { a := a // copy to let escape into Ctxt.Dconv Fatalf("naddr: OADDR %v (from %v)", Ctxt.Dconv(a), Oconv(int(n.Left.Op), 0)) } a.Type = obj.TYPE_ADDR // itable of interface value case OITAB: Naddr(a, n.Left) if a.Type == obj.TYPE_CONST && a.Offset == 0 { break // itab(nil) } a.Etype = uint8(Tptr) a.Width = int64(Widthptr) // pointer in a string or slice case OSPTR: Naddr(a, n.Left) if a.Type == obj.TYPE_CONST && a.Offset == 0 { break // ptr(nil) } a.Etype = uint8(Simtype[Tptr]) a.Offset += int64(Array_array) a.Width = int64(Widthptr) // len of string or slice case OLEN: Naddr(a, n.Left) if a.Type == obj.TYPE_CONST && a.Offset == 0 { break // len(nil) } a.Etype = uint8(Simtype[TUINT]) a.Offset += int64(Array_nel) if Thearch.Thechar != '5' { // TODO(rsc): Do this even on arm. a.Width = int64(Widthint) } // cap of string or slice case OCAP: Naddr(a, n.Left) if a.Type == obj.TYPE_CONST && a.Offset == 0 { break // cap(nil) } a.Etype = uint8(Simtype[TUINT]) a.Offset += int64(Array_cap) if Thearch.Thechar != '5' { // TODO(rsc): Do this even on arm. a.Width = int64(Widthint) } } return }
func dtypesym(t *Type) *Sym { // Replace byte, rune aliases with real type. // They've been separate internally to make error messages // better, but we have to merge them in the reflect tables. if t == bytetype || t == runetype { t = Types[t.Etype] } if isideal(t) { Fatalf("dtypesym %v", t) } s := typesym(t) if s.Flags&SymSiggen != 0 { return s } s.Flags |= SymSiggen // special case (look for runtime below): // when compiling package runtime, // emit the type structures for int, float, etc. tbase := t if Isptr[t.Etype] && t.Sym == nil && t.Type.Sym != nil { tbase = t.Type } dupok := 0 if tbase.Sym == nil { dupok = obj.DUPOK } if compiling_runtime != 0 && (tbase == Types[tbase.Etype] || tbase == bytetype || tbase == runetype || tbase == errortype) { // int, float, etc goto ok } // named types from other files are defined only by those files if tbase.Sym != nil && !tbase.Local { return s } if isforw[tbase.Etype] { return s } ok: ot := 0 xt := 0 switch t.Etype { default: ot = dcommontype(s, ot, t) xt = ot - 2*Widthptr case TARRAY: if t.Bound >= 0 { // ../../runtime/type.go:/ArrayType s1 := dtypesym(t.Type) t2 := typ(TARRAY) t2.Type = t.Type t2.Bound = -1 // slice s2 := dtypesym(t2) ot = dcommontype(s, ot, t) xt = ot - 2*Widthptr ot = dsymptr(s, ot, s1, 0) ot = dsymptr(s, ot, s2, 0) ot = duintptr(s, ot, uint64(t.Bound)) } else { // ../../runtime/type.go:/SliceType s1 := dtypesym(t.Type) ot = dcommontype(s, ot, t) xt = ot - 2*Widthptr ot = dsymptr(s, ot, s1, 0) } // ../../runtime/type.go:/ChanType case TCHAN: s1 := dtypesym(t.Type) ot = dcommontype(s, ot, t) xt = ot - 2*Widthptr ot = dsymptr(s, ot, s1, 0) ot = duintptr(s, ot, uint64(t.Chan)) case TFUNC: for t1 := getthisx(t).Type; t1 != nil; t1 = t1.Down { dtypesym(t1.Type) } isddd := false for t1 := getinargx(t).Type; t1 != nil; t1 = t1.Down { isddd = t1.Isddd dtypesym(t1.Type) } for t1 := getoutargx(t).Type; t1 != nil; t1 = t1.Down { dtypesym(t1.Type) } ot = dcommontype(s, ot, t) xt = ot - 2*Widthptr ot = duint8(s, ot, uint8(obj.Bool2int(isddd))) // two slice headers: in and out. ot = int(Rnd(int64(ot), int64(Widthptr))) ot = dsymptr(s, ot, s, ot+2*(Widthptr+2*Widthint)) n := t.Thistuple + t.Intuple ot = duintxx(s, ot, uint64(n), Widthint) ot = duintxx(s, ot, uint64(n), Widthint) ot = dsymptr(s, ot, s, ot+1*(Widthptr+2*Widthint)+n*Widthptr) ot = duintxx(s, ot, uint64(t.Outtuple), Widthint) ot = duintxx(s, ot, uint64(t.Outtuple), Widthint) // slice data for t1 := getthisx(t).Type; t1 != nil; t1 = t1.Down { ot = dsymptr(s, ot, dtypesym(t1.Type), 0) n++ } for t1 := getinargx(t).Type; t1 != nil; t1 = t1.Down { ot = dsymptr(s, ot, dtypesym(t1.Type), 0) n++ } for t1 := getoutargx(t).Type; t1 != nil; t1 = t1.Down { ot = dsymptr(s, ot, dtypesym(t1.Type), 0) n++ } case TINTER: m := imethods(t) n := len(m) for _, a := range m { dtypesym(a.type_) } // ../../../runtime/type.go:/InterfaceType ot = dcommontype(s, ot, t) xt = ot - 2*Widthptr ot = dsymptr(s, ot, s, ot+Widthptr+2*Widthint) ot = duintxx(s, ot, uint64(n), Widthint) ot = duintxx(s, ot, uint64(n), Widthint) for _, a := range m { // ../../../runtime/type.go:/imethod ot = dgostringptr(s, ot, a.name) ot = dgopkgpath(s, ot, a.pkg) ot = dsymptr(s, ot, dtypesym(a.type_), 0) } // ../../../runtime/type.go:/MapType case TMAP: s1 := dtypesym(t.Down) s2 := dtypesym(t.Type) s3 := dtypesym(mapbucket(t)) s4 := dtypesym(hmap(t)) ot = dcommontype(s, ot, t) xt = ot - 2*Widthptr ot = dsymptr(s, ot, s1, 0) ot = dsymptr(s, ot, s2, 0) ot = dsymptr(s, ot, s3, 0) ot = dsymptr(s, ot, s4, 0) if t.Down.Width > MAXKEYSIZE { ot = duint8(s, ot, uint8(Widthptr)) ot = duint8(s, ot, 1) // indirect } else { ot = duint8(s, ot, uint8(t.Down.Width)) ot = duint8(s, ot, 0) // not indirect } if t.Type.Width > MAXVALSIZE { ot = duint8(s, ot, uint8(Widthptr)) ot = duint8(s, ot, 1) // indirect } else { ot = duint8(s, ot, uint8(t.Type.Width)) ot = duint8(s, ot, 0) // not indirect } ot = duint16(s, ot, uint16(mapbucket(t).Width)) ot = duint8(s, ot, uint8(obj.Bool2int(isreflexive(t.Down)))) ot = duint8(s, ot, uint8(obj.Bool2int(needkeyupdate(t.Down)))) case TPTR32, TPTR64: if t.Type.Etype == TANY { // ../../runtime/type.go:/UnsafePointerType ot = dcommontype(s, ot, t) break } // ../../runtime/type.go:/PtrType s1 := dtypesym(t.Type) ot = dcommontype(s, ot, t) xt = ot - 2*Widthptr ot = dsymptr(s, ot, s1, 0) // ../../runtime/type.go:/StructType // for security, only the exported fields. case TSTRUCT: n := 0 for t1 := t.Type; t1 != nil; t1 = t1.Down { dtypesym(t1.Type) n++ } ot = dcommontype(s, ot, t) xt = ot - 2*Widthptr ot = dsymptr(s, ot, s, ot+Widthptr+2*Widthint) ot = duintxx(s, ot, uint64(n), Widthint) ot = duintxx(s, ot, uint64(n), Widthint) for t1 := t.Type; t1 != nil; t1 = t1.Down { // ../../runtime/type.go:/structField if t1.Sym != nil && t1.Embedded == 0 { ot = dgostringptr(s, ot, t1.Sym.Name) if exportname(t1.Sym.Name) { ot = dgostringptr(s, ot, "") } else { ot = dgopkgpath(s, ot, t1.Sym.Pkg) } } else { ot = dgostringptr(s, ot, "") if t1.Type.Sym != nil && t1.Type.Sym.Pkg == builtinpkg { ot = dgopkgpath(s, ot, localpkg) } else { ot = dgostringptr(s, ot, "") } } ot = dsymptr(s, ot, dtypesym(t1.Type), 0) ot = dgostrlitptr(s, ot, t1.Note) ot = duintptr(s, ot, uint64(t1.Width)) // field offset } } ot = dextratype(s, ot, t, xt) ggloblsym(s, int32(ot), int16(dupok|obj.RODATA)) // generate typelink.foo pointing at s = type.foo. // The linker will leave a table of all the typelinks for // types in the binary, so reflect can find them. // We only need the link for unnamed composites that // we want be able to find. if t.Sym == nil { switch t.Etype { case TPTR32, TPTR64: // The ptrto field of the type data cannot be relied on when // dynamic linking: a type T may be defined in a module that makes // no use of pointers to that type, but another module can contain // a package that imports the first one and does use *T pointers. // The second module will end up defining type data for *T and a // type.*T symbol pointing at it. It's important that calling // .PtrTo() on the reflect.Type for T returns this type data and // not some synthesized object, so we need reflect to be able to // find it! if !Ctxt.Flag_dynlink { break } fallthrough case TARRAY, TCHAN, TFUNC, TMAP: slink := typelinksym(t) dsymptr(slink, 0, s, 0) ggloblsym(slink, int32(Widthptr), int16(dupok|obj.RODATA)) } } return s }
func haspointers(t *Type) bool { if t.Haspointers != 0 { return t.Haspointers-1 != 0 } var ret bool switch t.Etype { case TINT, TUINT, TINT8, TUINT8, TINT16, TUINT16, TINT32, TUINT32, TINT64, TUINT64, TUINTPTR, TFLOAT32, TFLOAT64, TCOMPLEX64, TCOMPLEX128, TBOOL: ret = false case TARRAY: if t.Bound < 0 { // slice ret = true break } if t.Bound == 0 { // empty array ret = false break } ret = haspointers(t.Type) case TSTRUCT: ret = false for t1 := t.Type; t1 != nil; t1 = t1.Down { if haspointers(t1.Type) { ret = true break } } case TSTRING, TPTR32, TPTR64, TUNSAFEPTR, TINTER, TCHAN, TMAP, TFUNC: fallthrough default: ret = true case TFIELD: Fatalf("haspointers: unexpected type, %v", t) } t.Haspointers = 1 + uint8(obj.Bool2int(ret)) return ret }
func dtypesym(t *Type) *Sym { // Replace byte, rune aliases with real type. // They've been separate internally to make error messages // better, but we have to merge them in the reflect tables. if t == bytetype || t == runetype { t = Types[t.Etype] } if t.IsUntyped() { Fatalf("dtypesym %v", t) } s := typesym(t) if s.Flags&SymSiggen != 0 { return s } s.Flags |= SymSiggen // special case (look for runtime below): // when compiling package runtime, // emit the type structures for int, float, etc. tbase := t if t.IsPtr() && t.Sym == nil && t.Elem().Sym != nil { tbase = t.Elem() } dupok := 0 if tbase.Sym == nil { dupok = obj.DUPOK } if myimportpath == "runtime" && (tbase == Types[tbase.Etype] || tbase == bytetype || tbase == runetype || tbase == errortype) { // int, float, etc goto ok } // named types from other files are defined only by those files if tbase.Sym != nil && !tbase.Local { return s } if isforw[tbase.Etype] { return s } ok: ot := 0 switch t.Etype { default: ot = dcommontype(s, ot, t) ot = dextratype(s, ot, t, 0) case TARRAY: // ../../../../runtime/type.go:/arrayType s1 := dtypesym(t.Elem()) t2 := typSlice(t.Elem()) s2 := dtypesym(t2) ot = dcommontype(s, ot, t) ot = dsymptr(s, ot, s1, 0) ot = dsymptr(s, ot, s2, 0) ot = duintptr(s, ot, uint64(t.NumElem())) ot = dextratype(s, ot, t, 0) case TSLICE: // ../../../../runtime/type.go:/sliceType s1 := dtypesym(t.Elem()) ot = dcommontype(s, ot, t) ot = dsymptr(s, ot, s1, 0) ot = dextratype(s, ot, t, 0) case TCHAN: // ../../../../runtime/type.go:/chanType s1 := dtypesym(t.Elem()) ot = dcommontype(s, ot, t) ot = dsymptr(s, ot, s1, 0) ot = duintptr(s, ot, uint64(t.ChanDir())) ot = dextratype(s, ot, t, 0) case TFUNC: for _, t1 := range t.Recvs().Fields().Slice() { dtypesym(t1.Type) } isddd := false for _, t1 := range t.Params().Fields().Slice() { isddd = t1.Isddd dtypesym(t1.Type) } for _, t1 := range t.Results().Fields().Slice() { dtypesym(t1.Type) } ot = dcommontype(s, ot, t) inCount := t.Recvs().NumFields() + t.Params().NumFields() outCount := t.Results().NumFields() if isddd { outCount |= 1 << 15 } ot = duint16(s, ot, uint16(inCount)) ot = duint16(s, ot, uint16(outCount)) if Widthptr == 8 { ot += 4 // align for *rtype } dataAdd := (inCount + t.Results().NumFields()) * Widthptr ot = dextratype(s, ot, t, dataAdd) // Array of rtype pointers follows funcType. for _, t1 := range t.Recvs().Fields().Slice() { ot = dsymptr(s, ot, dtypesym(t1.Type), 0) } for _, t1 := range t.Params().Fields().Slice() { ot = dsymptr(s, ot, dtypesym(t1.Type), 0) } for _, t1 := range t.Results().Fields().Slice() { ot = dsymptr(s, ot, dtypesym(t1.Type), 0) } case TINTER: m := imethods(t) n := len(m) for _, a := range m { dtypesym(a.type_) } // ../../../../runtime/type.go:/interfaceType ot = dcommontype(s, ot, t) var tpkg *Pkg if t.Sym != nil && t != Types[t.Etype] && t != errortype { tpkg = t.Sym.Pkg } ot = dgopkgpath(s, ot, tpkg) ot = dsymptr(s, ot, s, ot+Widthptr+2*Widthint+uncommonSize(t)) ot = duintxx(s, ot, uint64(n), Widthint) ot = duintxx(s, ot, uint64(n), Widthint) dataAdd := imethodSize() * n ot = dextratype(s, ot, t, dataAdd) lsym := Linksym(s) for _, a := range m { // ../../../../runtime/type.go:/imethod exported := exportname(a.name) var pkg *Pkg if !exported && a.pkg != tpkg { pkg = a.pkg } nsym := dname(a.name, "", pkg, exported) ot = dsymptrOffLSym(lsym, ot, nsym, 0) ot = dsymptrOffLSym(lsym, ot, Linksym(dtypesym(a.type_)), 0) } // ../../../../runtime/type.go:/mapType case TMAP: s1 := dtypesym(t.Key()) s2 := dtypesym(t.Val()) s3 := dtypesym(mapbucket(t)) s4 := dtypesym(hmap(t)) ot = dcommontype(s, ot, t) ot = dsymptr(s, ot, s1, 0) ot = dsymptr(s, ot, s2, 0) ot = dsymptr(s, ot, s3, 0) ot = dsymptr(s, ot, s4, 0) if t.Key().Width > MAXKEYSIZE { ot = duint8(s, ot, uint8(Widthptr)) ot = duint8(s, ot, 1) // indirect } else { ot = duint8(s, ot, uint8(t.Key().Width)) ot = duint8(s, ot, 0) // not indirect } if t.Val().Width > MAXVALSIZE { ot = duint8(s, ot, uint8(Widthptr)) ot = duint8(s, ot, 1) // indirect } else { ot = duint8(s, ot, uint8(t.Val().Width)) ot = duint8(s, ot, 0) // not indirect } ot = duint16(s, ot, uint16(mapbucket(t).Width)) ot = duint8(s, ot, uint8(obj.Bool2int(isreflexive(t.Key())))) ot = duint8(s, ot, uint8(obj.Bool2int(needkeyupdate(t.Key())))) ot = dextratype(s, ot, t, 0) case TPTR32, TPTR64: if t.Elem().Etype == TANY { // ../../../../runtime/type.go:/UnsafePointerType ot = dcommontype(s, ot, t) ot = dextratype(s, ot, t, 0) break } // ../../../../runtime/type.go:/ptrType s1 := dtypesym(t.Elem()) ot = dcommontype(s, ot, t) ot = dsymptr(s, ot, s1, 0) ot = dextratype(s, ot, t, 0) // ../../../../runtime/type.go:/structType // for security, only the exported fields. case TSTRUCT: n := 0 for _, t1 := range t.Fields().Slice() { dtypesym(t1.Type) n++ } ot = dcommontype(s, ot, t) pkg := localpkg if t.Sym != nil { pkg = t.Sym.Pkg } ot = dgopkgpath(s, ot, pkg) ot = dsymptr(s, ot, s, ot+Widthptr+2*Widthint+uncommonSize(t)) ot = duintxx(s, ot, uint64(n), Widthint) ot = duintxx(s, ot, uint64(n), Widthint) dataAdd := n * structfieldSize() ot = dextratype(s, ot, t, dataAdd) for _, f := range t.Fields().Slice() { // ../../../../runtime/type.go:/structField ot = dnameField(s, ot, f) ot = dsymptr(s, ot, dtypesym(f.Type), 0) ot = duintptr(s, ot, uint64(f.Offset)) } } ot = dextratypeData(s, ot, t) ggloblsym(s, int32(ot), int16(dupok|obj.RODATA)) // generate typelink.foo pointing at s = type.foo. // // The linker will leave a table of all the typelinks for // types in the binary, so the runtime can find them. // // When buildmode=shared, all types are in typelinks so the // runtime can deduplicate type pointers. keep := Ctxt.Flag_dynlink if !keep && t.Sym == nil { // For an unnamed type, we only need the link if the type can // be created at run time by reflect.PtrTo and similar // functions. If the type exists in the program, those // functions must return the existing type structure rather // than creating a new one. switch t.Etype { case TPTR32, TPTR64, TARRAY, TCHAN, TFUNC, TMAP, TSLICE, TSTRUCT: keep = true } } if keep { slink := typelinkLSym(t) dsymptrOffLSym(slink, 0, Linksym(s), 0) ggloblLSym(slink, 4, int16(dupok|obj.RODATA)) } return s }
// Naddr rewrites a to refer to n. // It assumes that a is zeroed on entry. func Naddr(a *obj.Addr, n *Node) { if n == nil { return } if n.Type != nil && n.Type.Etype != TIDEAL { // TODO(rsc): This is undone by the selective clearing of width below, // to match architectures that were not as aggressive in setting width // during naddr. Those widths must be cleared to avoid triggering // failures in gins when it detects real but heretofore latent (and one // hopes innocuous) type mismatches. // The type mismatches should be fixed and the clearing below removed. dowidth(n.Type) a.Width = n.Type.Width } switch n.Op { default: a := a // copy to let escape into Ctxt.Dconv Debug['h'] = 1 Dump("naddr", n) Fatalf("naddr: bad %v %v", n.Op, Ctxt.Dconv(a)) case OREGISTER: a.Type = obj.TYPE_REG a.Reg = n.Reg a.Sym = nil if Thearch.LinkArch.Family == sys.I386 { // TODO(rsc): Never clear a->width. a.Width = 0 } case OINDREG: a.Type = obj.TYPE_MEM a.Reg = n.Reg a.Sym = Linksym(n.Sym) a.Offset = n.Xoffset if a.Offset != int64(int32(a.Offset)) { Yyerror("offset %d too large for OINDREG", a.Offset) } if Thearch.LinkArch.Family == sys.I386 { // TODO(rsc): Never clear a->width. a.Width = 0 } case OCLOSUREVAR: if !Curfn.Func.Needctxt { Fatalf("closurevar without needctxt") } a.Type = obj.TYPE_MEM a.Reg = int16(Thearch.REGCTXT) a.Sym = nil a.Offset = n.Xoffset case OCFUNC: Naddr(a, n.Left) a.Sym = Linksym(n.Left.Sym) case ONAME: a.Etype = 0 if n.Type != nil { a.Etype = uint8(Simtype[n.Type.Etype]) } a.Offset = n.Xoffset s := n.Sym a.Node = n.Orig //if(a->node >= (Node*)&n) // fatal("stack node"); if s == nil { s = Lookup(".noname") } if n.Name.Method && n.Type != nil && n.Type.Sym != nil && n.Type.Sym.Pkg != nil { s = Pkglookup(s.Name, n.Type.Sym.Pkg) } a.Type = obj.TYPE_MEM switch n.Class { default: Fatalf("naddr: ONAME class %v %d\n", n.Sym, n.Class) case PEXTERN: a.Name = obj.NAME_EXTERN case PAUTO: a.Name = obj.NAME_AUTO case PPARAM, PPARAMOUT: a.Name = obj.NAME_PARAM case PFUNC: a.Name = obj.NAME_EXTERN a.Type = obj.TYPE_ADDR a.Width = int64(Widthptr) s = funcsym(s) } a.Sym = Linksym(s) case ODOT: // A special case to make write barriers more efficient. // Taking the address of the first field of a named struct // is the same as taking the address of the struct. if !n.Left.Type.IsStruct() || n.Left.Type.Field(0).Sym != n.Sym { Debug['h'] = 1 Dump("naddr", n) Fatalf("naddr: bad %v %v", n.Op, Ctxt.Dconv(a)) } Naddr(a, n.Left) case OLITERAL: if Thearch.LinkArch.Family == sys.I386 { a.Width = 0 } switch u := n.Val().U.(type) { default: Fatalf("naddr: const %v", Tconv(n.Type, FmtLong)) case *Mpflt: a.Type = obj.TYPE_FCONST a.Val = u.Float64() case *Mpint: a.Sym = nil a.Type = obj.TYPE_CONST a.Offset = u.Int64() case string: datagostring(u, a) case bool: a.Sym = nil a.Type = obj.TYPE_CONST a.Offset = int64(obj.Bool2int(u)) case *NilVal: a.Sym = nil a.Type = obj.TYPE_CONST a.Offset = 0 } case OADDR: Naddr(a, n.Left) a.Etype = uint8(Tptr) if !Thearch.LinkArch.InFamily(sys.MIPS64, sys.ARM, sys.ARM64, sys.PPC64, sys.S390X) { // TODO(rsc): Do this even for these architectures. a.Width = int64(Widthptr) } if a.Type != obj.TYPE_MEM { a := a // copy to let escape into Ctxt.Dconv Fatalf("naddr: OADDR %v (from %v)", Ctxt.Dconv(a), n.Left.Op) } a.Type = obj.TYPE_ADDR case OITAB: // itable of interface value Naddr(a, n.Left) if a.Type == obj.TYPE_CONST && a.Offset == 0 { break // itab(nil) } a.Etype = uint8(Tptr) a.Width = int64(Widthptr) case OIDATA: // idata of interface value Naddr(a, n.Left) if a.Type == obj.TYPE_CONST && a.Offset == 0 { break // idata(nil) } if isdirectiface(n.Type) { a.Etype = uint8(Simtype[n.Type.Etype]) } else { a.Etype = uint8(Tptr) } a.Offset += int64(Widthptr) a.Width = int64(Widthptr) // pointer in a string or slice case OSPTR: Naddr(a, n.Left) if a.Type == obj.TYPE_CONST && a.Offset == 0 { break // ptr(nil) } a.Etype = uint8(Simtype[Tptr]) a.Offset += int64(Array_array) a.Width = int64(Widthptr) // len of string or slice case OLEN: Naddr(a, n.Left) if a.Type == obj.TYPE_CONST && a.Offset == 0 { break // len(nil) } a.Etype = uint8(Simtype[TUINT]) a.Offset += int64(Array_nel) if Thearch.LinkArch.Family != sys.ARM { // TODO(rsc): Do this even on arm. a.Width = int64(Widthint) } // cap of string or slice case OCAP: Naddr(a, n.Left) if a.Type == obj.TYPE_CONST && a.Offset == 0 { break // cap(nil) } a.Etype = uint8(Simtype[TUINT]) a.Offset += int64(Array_cap) if Thearch.LinkArch.Family != sys.ARM { // TODO(rsc): Do this even on arm. a.Width = int64(Widthint) } } }
func dtypesym(t *Type) *Sym { // Replace byte, rune aliases with real type. // They've been separate internally to make error messages // better, but we have to merge them in the reflect tables. if t == bytetype || t == runetype { t = Types[t.Etype] } if isideal(t) { Fatalf("dtypesym %v", t) } s := typesym(t) if s.Flags&SymSiggen != 0 { return s } s.Flags |= SymSiggen // special case (look for runtime below): // when compiling package runtime, // emit the type structures for int, float, etc. tbase := t if Isptr[t.Etype] && t.Sym == nil && t.Type.Sym != nil { tbase = t.Type } dupok := 0 if tbase.Sym == nil { dupok = obj.DUPOK } if myimportpath == "runtime" && (tbase == Types[tbase.Etype] || tbase == bytetype || tbase == runetype || tbase == errortype) { // int, float, etc goto ok } // named types from other files are defined only by those files if tbase.Sym != nil && !tbase.Local { return s } if isforw[tbase.Etype] { return s } ok: ot := 0 switch t.Etype { default: ot = dcommontype(s, ot, t) ot = dextratype(s, ot, t, 0) case TARRAY: if t.Bound >= 0 { // ../../../../runtime/type.go:/arrayType s1 := dtypesym(t.Type) t2 := typ(TARRAY) t2.Type = t.Type t2.Bound = -1 // slice s2 := dtypesym(t2) ot = dcommontype(s, ot, t) ot = dsymptr(s, ot, s1, 0) ot = dsymptr(s, ot, s2, 0) ot = duintptr(s, ot, uint64(t.Bound)) } else { // ../../../../runtime/type.go:/sliceType s1 := dtypesym(t.Type) ot = dcommontype(s, ot, t) ot = dsymptr(s, ot, s1, 0) } ot = dextratype(s, ot, t, 0) // ../../../../runtime/type.go:/chanType case TCHAN: s1 := dtypesym(t.Type) ot = dcommontype(s, ot, t) ot = dsymptr(s, ot, s1, 0) ot = duintptr(s, ot, uint64(t.Chan)) ot = dextratype(s, ot, t, 0) case TFUNC: for t1, it := IterFields(t.Recvs()); t1 != nil; t1 = it.Next() { dtypesym(t1.Type) } isddd := false for t1, it := IterFields(t.Params()); t1 != nil; t1 = it.Next() { isddd = t1.Isddd dtypesym(t1.Type) } for t1, it := IterFields(t.Results()); t1 != nil; t1 = it.Next() { dtypesym(t1.Type) } ot = dcommontype(s, ot, t) inCount := t.Thistuple + t.Intuple outCount := t.Outtuple if isddd { outCount |= 1 << 15 } ot = duint16(s, ot, uint16(inCount)) ot = duint16(s, ot, uint16(outCount)) if Widthptr == 8 { ot += 4 // align for *rtype } dataAdd := (inCount + t.Outtuple) * Widthptr ot = dextratype(s, ot, t, dataAdd) // Array of rtype pointers follows funcType. for t1, it := IterFields(t.Recvs()); t1 != nil; t1 = it.Next() { ot = dsymptr(s, ot, dtypesym(t1.Type), 0) } for t1, it := IterFields(t.Params()); t1 != nil; t1 = it.Next() { ot = dsymptr(s, ot, dtypesym(t1.Type), 0) } for t1, it := IterFields(t.Results()); t1 != nil; t1 = it.Next() { ot = dsymptr(s, ot, dtypesym(t1.Type), 0) } case TINTER: m := imethods(t) n := len(m) for _, a := range m { dtypesym(a.type_) } // ../../../../runtime/type.go:/interfaceType ot = dcommontype(s, ot, t) ot = dsymptr(s, ot, s, ot+Widthptr+2*Widthint+uncommonSize(t)) ot = duintxx(s, ot, uint64(n), Widthint) ot = duintxx(s, ot, uint64(n), Widthint) dataAdd := imethodSize() * n ot = dextratype(s, ot, t, dataAdd) for _, a := range m { // ../../../../runtime/type.go:/imethod ot = dgostringptr(s, ot, a.name) ot = dgopkgpath(s, ot, a.pkg) ot = dsymptr(s, ot, dtypesym(a.type_), 0) } // ../../../../runtime/type.go:/mapType case TMAP: s1 := dtypesym(t.Key()) s2 := dtypesym(t.Type) s3 := dtypesym(mapbucket(t)) s4 := dtypesym(hmap(t)) ot = dcommontype(s, ot, t) ot = dsymptr(s, ot, s1, 0) ot = dsymptr(s, ot, s2, 0) ot = dsymptr(s, ot, s3, 0) ot = dsymptr(s, ot, s4, 0) if t.Key().Width > MAXKEYSIZE { ot = duint8(s, ot, uint8(Widthptr)) ot = duint8(s, ot, 1) // indirect } else { ot = duint8(s, ot, uint8(t.Key().Width)) ot = duint8(s, ot, 0) // not indirect } if t.Type.Width > MAXVALSIZE { ot = duint8(s, ot, uint8(Widthptr)) ot = duint8(s, ot, 1) // indirect } else { ot = duint8(s, ot, uint8(t.Type.Width)) ot = duint8(s, ot, 0) // not indirect } ot = duint16(s, ot, uint16(mapbucket(t).Width)) ot = duint8(s, ot, uint8(obj.Bool2int(isreflexive(t.Key())))) ot = duint8(s, ot, uint8(obj.Bool2int(needkeyupdate(t.Key())))) ot = dextratype(s, ot, t, 0) case TPTR32, TPTR64: if t.Type.Etype == TANY { // ../../../../runtime/type.go:/UnsafePointerType ot = dcommontype(s, ot, t) ot = dextratype(s, ot, t, 0) break } // ../../../../runtime/type.go:/ptrType s1 := dtypesym(t.Type) ot = dcommontype(s, ot, t) ot = dsymptr(s, ot, s1, 0) ot = dextratype(s, ot, t, 0) // ../../../../runtime/type.go:/structType // for security, only the exported fields. case TSTRUCT: n := 0 for t1, it := IterFields(t); t1 != nil; t1 = it.Next() { dtypesym(t1.Type) n++ } ot = dcommontype(s, ot, t) ot = dsymptr(s, ot, s, ot+Widthptr+2*Widthint+uncommonSize(t)) ot = duintxx(s, ot, uint64(n), Widthint) ot = duintxx(s, ot, uint64(n), Widthint) dataAdd := n * structfieldSize() ot = dextratype(s, ot, t, dataAdd) for t1, it := IterFields(t); t1 != nil; t1 = it.Next() { // ../../../../runtime/type.go:/structField if t1.Sym != nil && t1.Embedded == 0 { ot = dgostringptr(s, ot, t1.Sym.Name) if exportname(t1.Sym.Name) { ot = dgostringptr(s, ot, "") } else { ot = dgopkgpath(s, ot, t1.Sym.Pkg) } } else { ot = dgostringptr(s, ot, "") if t1.Type.Sym != nil && (t1.Type.Sym.Pkg == builtinpkg || !exportname(t1.Type.Sym.Name)) { ot = dgopkgpath(s, ot, localpkg) } else { ot = dgostringptr(s, ot, "") } } ot = dsymptr(s, ot, dtypesym(t1.Type), 0) ot = dgostrlitptr(s, ot, t1.Note) ot = duintptr(s, ot, uint64(t1.Width)) // field offset } } ot = dextratypeData(s, ot, t) ggloblsym(s, int32(ot), int16(dupok|obj.RODATA)) // generate typelink.foo pointing at s = type.foo. // The linker will leave a table of all the typelinks for // types in the binary, so reflect can find them. // We only need the link for unnamed composites that // we want be able to find. if t.Sym == nil { switch t.Etype { case TPTR32, TPTR64, TARRAY, TCHAN, TFUNC, TMAP: slink := typelinksym(t) dsymptr(slink, 0, s, 0) ggloblsym(slink, int32(Widthptr), int16(dupok|obj.RODATA)) } } return s }
func haspointers(t *Type) bool { if t.Haspointers != 0 { return t.Haspointers-1 != 0 } var ret bool switch t.Etype { case TINT, TUINT, TINT8, TUINT8, TINT16, TUINT16, TINT32, TUINT32, TINT64, TUINT64, TUINTPTR, TFLOAT32, TFLOAT64, TCOMPLEX64, TCOMPLEX128, TBOOL: ret = false case TARRAY: if t.Bound < 0 { // slice ret = true break } if t.Bound == 0 { // empty array ret = false break } ret = haspointers(t.Type) case TSTRUCT: ret = false for t1, it := IterFields(t); t1 != nil; t1 = it.Next() { if haspointers(t1.Type) { ret = true break } } case TSTRING, TPTR32, TPTR64, TUNSAFEPTR, TINTER, TCHAN, TMAP, TFUNC: fallthrough default: ret = true } t.Haspointers = 1 + uint8(obj.Bool2int(ret)) return ret }
func gdata(nam *Node, nr *Node, wid int) { if nam.Op != ONAME { Fatalf("gdata nam op %v", nam.Op) } if nam.Sym == nil { Fatalf("gdata nil nam sym") } s := Linksym(nam.Sym) switch nr.Op { case OLITERAL: switch u := nr.Val().U.(type) { case bool: i := int64(obj.Bool2int(u)) s.WriteInt(Ctxt, nam.Xoffset, wid, i) case *Mpint: s.WriteInt(Ctxt, nam.Xoffset, wid, u.Int64()) case *Mpflt: f := u.Float64() switch nam.Type.Etype { case TFLOAT32: s.WriteFloat32(Ctxt, nam.Xoffset, float32(f)) case TFLOAT64: s.WriteFloat64(Ctxt, nam.Xoffset, f) } case *Mpcplx: r := u.Real.Float64() i := u.Imag.Float64() switch nam.Type.Etype { case TCOMPLEX64: s.WriteFloat32(Ctxt, nam.Xoffset, float32(r)) s.WriteFloat32(Ctxt, nam.Xoffset+4, float32(i)) case TCOMPLEX128: s.WriteFloat64(Ctxt, nam.Xoffset, r) s.WriteFloat64(Ctxt, nam.Xoffset+8, i) } case string: symdata := stringsym(u) s.WriteAddr(Ctxt, nam.Xoffset, Widthptr, symdata, 0) s.WriteInt(Ctxt, nam.Xoffset+int64(Widthptr), Widthint, int64(len(u))) default: Fatalf("gdata unhandled OLITERAL %v", nr) } case OADDR: if nr.Left.Op != ONAME { Fatalf("gdata ADDR left op %v", nr.Left.Op) } to := nr.Left s.WriteAddr(Ctxt, nam.Xoffset, wid, Linksym(to.Sym), to.Xoffset) case ONAME: if nr.Class != PFUNC { Fatalf("gdata NAME not PFUNC %d", nr.Class) } s.WriteAddr(Ctxt, nam.Xoffset, wid, Linksym(funcsym(nr.Sym)), nr.Xoffset) default: Fatalf("gdata unhandled op %v %v\n", nr, nr.Op) } }