// gobEncodeOpFor returns the op for a type that is known to implement // GobEncoder. func (enc *Encoder) gobEncodeOpFor(ut *userTypeInfo) (*encOp, int) { rt := ut.user if ut.encIndir == -1 { rt = reflect.PtrTo(rt) } else if ut.encIndir > 0 { for i := int8(0); i < ut.encIndir; i++ { rt = rt.Elem() } } var op encOp op = func(i *encInstr, state *encoderState, p unsafe.Pointer) { var v reflect.Value if ut.encIndir == -1 { // Need to climb up one level to turn value into pointer. v = reflect.NewAt(rt, unsafe.Pointer(&p)).Elem() } else { v = reflect.NewAt(rt, p).Elem() } if !state.sendZero && isZero(v) { return } state.update(i) state.enc.encodeGobEncoder(state.b, ut, v) } return &op, int(ut.encIndir) // encIndir: op will get called with p == address of receiver. }
// gobDecodeOpFor returns the op for a type that is known to implement // GobDecoder. func (dec *Decoder) gobDecodeOpFor(ut *userTypeInfo) (*decOp, int) { rcvrType := ut.user if ut.decIndir == -1 { rcvrType = reflect.PtrTo(rcvrType) } else if ut.decIndir > 0 { for i := int8(0); i < ut.decIndir; i++ { rcvrType = rcvrType.Elem() } } var op decOp op = func(i *decInstr, state *decoderState, p unsafe.Pointer) { // Caller has gotten us to within one indirection of our value. if i.indir > 0 { if *(*unsafe.Pointer)(p) == nil { *(*unsafe.Pointer)(p) = unsafe.Pointer(reflect.New(ut.base).Pointer()) } } // Now p is a pointer to the base type. Do we need to climb out to // get to the receiver type? var v reflect.Value if ut.decIndir == -1 { v = reflect.NewAt(rcvrType, unsafe.Pointer(&p)).Elem() } else { v = reflect.NewAt(rcvrType, p).Elem() } state.dec.decodeGobDecoder(ut, state, v) } return &op, int(ut.indir) }
func (func_data *FuncData) Call(args unsafe.Pointer, ret unsafe.Pointer) { numIn := func_data.fn.Type().NumIn() var goArgs []*uint32 get_c_slice(args, numIn, unsafe.Pointer(&goArgs)) getArg := func(index int) reflect.Value { return reflect.NewAt(func_data.fn_type.In(index), unsafe.Pointer(goArgs[index])).Elem() } funArgs := make([]reflect.Value, numIn) for i := 0; i < numIn; i++ { funArgs[i] = getArg(i) } rets := func_data.fn.Call(funArgs) numOut := func_data.fn_type.NumOut() if numOut == 1 { reflect.NewAt(func_data.fn_type.Out(0), ret).Elem().Set(rets[0]) } }
func valRepr(s *gosym.Sym, typ reflect.Type, values []string, _html bool) (r string) { val, _ := strconv.ParseUint(values[0], 0, 64) var val2 uint64 if len(values) > 1 { val2, _ = strconv.ParseUint(values[1], 0, 64) } // If there's a panic prettyfy'ing the value just // assume it's a pointer. It's better than // omitting the error page. defer func() { if recover() != nil { r = pointerRepr(nil, val, false) } }() switch types.Kind(typ.Kind()) { case types.Bool: if val == 0 { return "= false" } return "= true" case types.Int: return "= " + strconv.FormatInt(int64(val), 10) case types.Uint: return "= " + strconv.FormatUint(val, 10) case types.Float: if typ.Kind() == reflect.Float32 { return "= " + strconv.FormatFloat(float64(math.Float32frombits(uint32(val))), 'g', -1, 32) } return "= " + strconv.FormatFloat(math.Float64frombits(uint64(val)), 'g', -1, 64) case types.Slice: return sliceRepr(val, val2, s) case types.String: v := stringRepr(val, val2) if _html { v = html.Escape(v) } return v case types.Interface: if typ.NumMethod() == 0 { return emptyInterfaceRepr(val, val2) } idata := [2]uintptr{uintptr(val), uintptr(val2)} v := reflect.NewAt(typ, unsafe.Pointer(&idata[0])).Elem() return descRepr(val, &v, _html) case types.Func: fn := reflect.NewAt(typ, unsafe.Pointer(&val)).Elem() f := runtime.FuncForPC(fn.Pointer()) if f != nil { return "= " + f.Name() } } return pointerRepr(typ, val, _html) }
func (c *RedisCacher) deserialize(byt []byte) (ptr interface{}, err error) { b := bytes.NewBuffer(byt) decoder := gob.NewDecoder(b) var p interface{} err = decoder.Decode(&p) if err != nil { c.logErrf("decode failed: %v", err) return } v := reflect.ValueOf(p) c.logDebugf("deserialize type:%v", v.Type()) if v.Kind() == reflect.Struct { var pp interface{} = &p datas := reflect.ValueOf(pp).Elem().InterfaceData() sp := reflect.NewAt(v.Type(), unsafe.Pointer(datas[1])).Interface() ptr = sp vv := reflect.ValueOf(ptr) c.logDebugf("deserialize convert ptr type:%v | CanAddr:%t", vv.Type(), vv.CanAddr()) } else { ptr = p } return }
//export goMInit func goMInit(db, pClientData unsafe.Pointer, argc int, argv **C.char, pzErr **C.char, isCreate int) unsafe.Pointer { m := (*sqliteModule)(pClientData) if m.c.db != (*C.sqlite3)(db) { *pzErr = mPrintf("%s", "Inconsistent db handles") return nil } args := make([]string, argc) var A []*C.char slice := reflect.SliceHeader{Data: uintptr(unsafe.Pointer(argv)), Len: argc, Cap: argc} a := reflect.NewAt(reflect.TypeOf(A), unsafe.Pointer(&slice)).Elem().Interface() for i, s := range a.([]*C.char) { args[i] = C.GoString(s) } var vTab VTab var err error if isCreate == 1 { vTab, err = m.module.Create(m.c, args) } else { vTab, err = m.module.Connect(m.c, args) } if err != nil { *pzErr = mPrintf("%s", err.Error()) return nil } vt := &sqliteVTab{m, vTab, nil} // prevents 'vt' from being gced if m.vts == nil { m.vts = make(map[*sqliteVTab]bool) } m.vts[vt] = true *pzErr = nil return unsafe.Pointer(vt) }
func (m *GslMultiset) Slice_() interface{} { baseType := gogsl.GOGSL_SIZE_T_TYPE sliceType := reflect.SliceOf(baseType) size := K(m) hdr := &reflect.SliceHeader{Len: size, Cap: size, Data: uintptr(C.get_multiset_data((*C.gsl_multiset)(unsafe.Pointer(m.Ptr()))))} return reflect.NewAt(sliceType, unsafe.Pointer(hdr)).Elem().Interface() }
//export closureMarshal func closureMarshal(closure *C.GClosure, ret *C.GValue, nParams C.guint, params *C.GValue, hint, data C.gpointer) { // callback value f := *((*interface{})(unsafe.Pointer(data))) fValue := reflect.ValueOf(f) fType := fValue.Type() if int(nParams) != fType.NumIn() { log.Fatal("number of parameters and arguments mismatch") } // convert GValue to reflect.Value var paramSlice []C.GValue h := (*reflect.SliceHeader)(unsafe.Pointer(¶mSlice)) h.Len = int(nParams) h.Cap = h.Len h.Data = uintptr(unsafe.Pointer(params)) var arguments []reflect.Value for i, gv := range paramSlice { goValue := fromGValue(&gv) var arg reflect.Value switch fType.In(i).Kind() { case reflect.Ptr: p := goValue.(unsafe.Pointer) arg = reflect.NewAt(fType.In(i), unsafe.Pointer(&p)).Elem() default: panic("FIXME") //TODO } arguments = append(arguments, arg) } // call fValue.Call(arguments[:fType.NumIn()]) //TODO set return value }
func sliceEncoder(typ reflect.Type) (typeEncoder, error) { switch typ.Elem().Kind() { case reflect.Int8, reflect.Uint8, reflect.Int16, reflect.Uint16, reflect.Int32, reflect.Uint32, reflect.Int64, reflect.Uint64: // Take advantage of the fast path in Write return func(enc *encoder, p unsafe.Pointer) error { v := reflect.NewAt(typ, p).Elem() return Write(enc, enc.order, v.Interface()) }, nil } etyp := typ.Elem() eenc, err := makeEncoder(etyp) if err != nil { return nil, err } s := etyp.Size() return func(enc *encoder, p unsafe.Pointer) error { h := (*reflect.SliceHeader)(p) ep := unsafe.Pointer(h.Data) for ii := 0; ii < h.Len; ii++ { if err := eenc(enc, ep); err != nil { return err } } ep = unsafe.Pointer(uintptr(ep) + s) return nil }, nil }
func (g *Group) scanSubGroupHandler(realval reflect.Value, sfield *reflect.StructField) (bool, error) { mtag := newMultiTag(string(sfield.Tag)) if err := mtag.Parse(); err != nil { return true, err } subgroup := mtag.Get("group") if len(subgroup) != 0 { ptrval := reflect.NewAt(realval.Type(), unsafe.Pointer(realval.UnsafeAddr())) description := mtag.Get("description") group, err := g.AddGroup(subgroup, description, ptrval.Interface()) if err != nil { return true, err } group.Namespace = mtag.Get("namespace") group.Hidden = mtag.Get("hidden") != "" return true, nil } return false, nil }
func (c *Command) scanSubCommandHandler(parentg *Group) scanHandler { f := func(realval reflect.Value, sfield *reflect.StructField) (bool, error) { mtag := newMultiTag(string(sfield.Tag)) if err := mtag.Parse(); err != nil { return true, err } subcommand := mtag.Get("command") if len(subcommand) != 0 { ptrval := reflect.NewAt(realval.Type(), unsafe.Pointer(realval.UnsafeAddr())) shortDescription := mtag.Get("description") longDescription := mtag.Get("long-description") subcommandsOptional := mtag.Get("subcommands-optional") subc, err := c.AddCommand(subcommand, shortDescription, longDescription, ptrval.Interface()) if err != nil { return true, err } if len(subcommandsOptional) > 0 { subc.SubcommandsOptional = true } return true, nil } return parentg.scanSubGroupHandler(realval, sfield) } return f }
// Decode an embedded message. func (o *Buffer) dec_struct_message(p *Properties, base uintptr) (err error) { raw, e := o.DecodeRawBytes(false) if e != nil { return e } ptr := (**struct{})(unsafe.Pointer(base + p.offset)) typ := p.stype.Elem() bas := reflect.New(typ).Pointer() structp := unsafe.Pointer(bas) *ptr = (*struct{})(structp) // If the object can unmarshal itself, let it. if p.isMarshaler { iv := reflect.NewAt(p.stype.Elem(), structp).Interface() return iv.(Unmarshaler).Unmarshal(raw) } obuf := o.buf oi := o.index o.buf = raw o.index = 0 err = o.unmarshalType(p.stype, p.sprop, false, bas) o.buf = obuf o.index = oi return err }
//export gslNtupleValueFunctionCaller func gslNtupleValueFunctionCaller(x uintptr, cFunParamPtr uintptr) float64 { ntsf := (*GslNtupleValueFunction)(unsafe.Pointer(cFunParamPtr)) hdr := &reflect.SliceHeader{Data: x, Len: ntsf.ntuple.refLen, Cap: ntsf.ntuple.refLen} sliceType := reflect.SliceOf(ntsf.ntuple.baseType) slice := reflect.NewAt(sliceType, unsafe.Pointer(hdr)).Elem().Interface() return ntsf.Function(slice, ntsf.Params) }
//export gslSimanCopyConstructorCaller func gslSimanCopyConstructorCaller(xptr uintptr) uintptr { //fmt.Println("CC") xarg := (*GslSimanArgument)(unsafe.Pointer(xptr)) if xarg.impl.ctorFn != nil { nptr := xarg.impl.ctorFn(xarg.x) rv := &GslSimanArgument{impl: xarg.impl, x: nptr} rvptr := uintptr(unsafe.Pointer(rv)) xarg.impl.holdRefs[rvptr] = rv //fmt.Printf("CC. RV x is %s inval=%f, outval=%f\n", reflect.TypeOf(rv.x).String(), *xarg.x.(*float64), *rv.x.(*float64)) return rvptr } else { var rv *GslSimanArgument x := reflect.ValueOf(xarg.x) xType := x.Type() baseType := xType.Elem() if xType.Kind() == reflect.Ptr { nxptr := reflect.New(baseType) //C.memcpy(unsafe.Pointer(nxptr), unsafe.Pointer(x.Pointer()), C.size_t(baseType.Size())) nxptr.Elem().Set(x.Elem()) rv = &GslSimanArgument{impl: xarg.impl, x: reflect.NewAt(baseType, unsafe.Pointer(nxptr.Pointer())).Interface()} } else { panic("fail") rv = &GslSimanArgument{impl: xarg.impl, x: xarg.x} } //fmt.Printf("CC RV x is %s inval=%f, outval=%f\n", reflect.TypeOf(rv.x).String(), *xarg.x.(*float64), *rv.x.(*float64)) rvptr := uintptr(unsafe.Pointer(rv)) xarg.impl.holdRefs[rvptr] = rv return rvptr } }
// UnsafeReflectValue converts the passed reflect.Value into a one that bypasses // the typical safety restrictions preventing access to unaddressable and // unexported data. It works by digging the raw pointer to the underlying // value out of the protected value and generating a new unprotected (unsafe) // reflect.Value to it. // // This allows us to check for implementations of the Stringer and error // interfaces to be used for pretty printing ordinarily unaddressable and // inaccessible values such as unexported struct fields. func UnsafeReflectValue(v reflect.Value) (rv reflect.Value) { indirects := 1 vt := v.Type() upv := unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetPtr) rvf := *(*uintptr)(unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetFlag)) if rvf&flagIndir != 0 { vt = reflect.PtrTo(v.Type()) indirects++ } else if offsetScalar != 0 { // The value is in the scalar field when it's not one of the // reference types. switch vt.Kind() { case reflect.Uintptr: case reflect.Chan: case reflect.Func: case reflect.Map: case reflect.Ptr: case reflect.UnsafePointer: default: upv = unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetScalar) } } pv := reflect.NewAt(vt, upv) rv = pv for i := 0; i < indirects; i++ { rv = rv.Elem() } return rv }
func arrayEncoder(typ reflect.Type) (typeEncoder, error) { al := typ.Len() etyp := typ.Elem() switch etyp.Kind() { case reflect.Int8, reflect.Uint8, reflect.Int16, reflect.Uint16, reflect.Int32, reflect.Uint32, reflect.Int64, reflect.Uint64: // Take advantage of the fast path in Write return func(enc *encoder, p unsafe.Pointer) error { v := reflect.NewAt(typ, p).Elem().Slice(0, al) return Write(enc, enc.order, v.Interface()) }, nil } eenc, err := makeEncoder(etyp) if err != nil { return nil, err } s := etyp.Size() return func(enc *encoder, p unsafe.Pointer) error { for ii := 0; ii < al; ii++ { if err := eenc(enc, p); err != nil { return err } p = unsafe.Pointer(uintptr(p) + s) } return nil }, nil }
func (p *GslPermutation) Slice_() interface{} { baseType := gogsl.GOGSL_SIZE_T_TYPE sliceType := reflect.SliceOf(baseType) size := p.Len() hdr := &reflect.SliceHeader{Len: size, Cap: size, Data: uintptr(C.get_permutation_data((*C.gsl_permutation)(unsafe.Pointer(p.Ptr()))))} return reflect.NewAt(sliceType, unsafe.Pointer(hdr)).Elem().Interface() }
func sliceDecoder(typ reflect.Type) (typeDecoder, error) { etyp := typ.Elem() switch etyp.Kind() { case reflect.Int8, reflect.Uint8, reflect.Int16, reflect.Uint16, reflect.Int32, reflect.Uint32, reflect.Int64, reflect.Uint64: // Take advantage of the fast path in Read return func(dec *decoder, p unsafe.Pointer) error { v := reflect.NewAt(typ, p).Elem() return Read(dec, dec.order, v.Interface()) }, nil } edec, err := makeDecoder(typ.Elem()) if err != nil { return nil, err } s := etyp.Size() return func(dec *decoder, p unsafe.Pointer) error { h := (*reflect.SliceHeader)(p) ep := unsafe.Pointer(h.Data) for ii := 0; ii < h.Len; ii++ { if err := edec(dec, ep); err != nil { return err } ep = unsafe.Pointer(uintptr(ep) + s) } return nil }, nil }
func structPointer_InterfaceRef(p structPointer, f field, t reflect.Type) interface{} { point := unsafe.Pointer(uintptr(p) + uintptr(f)) r := reflect.NewAt(t, point) if r.Elem().IsNil() { return nil } return r.Elem().Interface() }
func dispose(v uintptr, t r.Type) { if m, ok := t.MethodByName("Dispose"); ok { // && m.Func.Type().NumIn() == 2 { f := m.Func tv := r.NewAt(f.Type().In(1).Elem(), unsafe.Pointer(v)) f.Call([]r.Value{r.New(t).Elem(), tv}) } }
/****************************************************************************************\ * More operations on sequences * \****************************************************************************************/ func (self Seq) ForEach(t reflect.Type, callback func(interface{})) { c_seq := (*C.CvSeq)(&self) seqLen := int(c_seq.total) for i := 0; i < seqLen; i++ { c_obj := C.cvGetSeqElem(c_seq, C.int(i)) obj := reflect.NewAt(t, unsafe.Pointer(c_obj)) callback(obj.Interface()) } }
// CopyTo copies a RawStat KStat into a struct that you supply a // pointer to. The size of the struct must exactly match the size of // the RawStat's data. // // CopyStat imposes conditions on the struct that you are copying to: // it must be composed entirely of primitive integer types with defined // sizes (intN and uintN), or arrays and structs that ultimately only // contain them. All fields should be exported. // // If you give CopyStat a bad argument, it generally panics. // // This API is provisional and may be changed or deleted. func (k *KStat) CopyTo(ptr interface{}) error { if err := k.prep(); err != nil { return err } if k.Type != RawStat { return errors.New("KStat is not a RawStat") } // Validity checks: not nil value, not nil pointer value, // is a pointer to struct. if ptr == nil { panic("CopyTo given nil pointer") } vp := reflect.ValueOf(ptr) if vp.Kind() != reflect.Ptr { panic("CopyTo not given a pointer") } if vp.IsNil() { panic("CopyTo given nil pointer") } dst := vp.Elem() if dst.Kind() != reflect.Struct { panic("CopyTo: not pointer to struct") } // Is the struct safe to copy into, which means primitive types // and structs/arrays of primitive types? if !safeThing(dst.Type()) { panic("CopyTo: not a safe structure, contains unsupported fields") } if !dst.CanSet() { panic("CopyTo: struct cannot be set for some reason") } // Verify that the size of the target struct matches the size // of the raw KStat. if uintptr(k.ksp.ks_data_size) != dst.Type().Size() { return errors.New("struct size does not match KStat size") } // The following is exactly the magic that we performed for // specific types earlier. We take k.ksp.ks_data and turn // it into a typed pointer to the target object's type: // // src := ((*<type>)(k.kps.ks_data)) src := reflect.NewAt(dst.Type(), unsafe.Pointer(k.ksp.ks_data)) // We now dereference that into the destination to copy the // data: // // dst = *src dst.Set(reflect.Indirect(src)) return nil }
// Struct attempts to unmarshall the values in 'from' into the fields // in the structure pointed to by 'target'. The field names are used as // map keys. Optional format strings can be used to morph the field // names into keys, eg "--%s" will map field "foo" to key "--foo". // If more than one format is supplied, these will be tried in order // until the first matching key is found. // When coercing from string to any integer types, if the string ends // with B|K|M|G|T (case-insensitive) then these will be interpreted // as multipliers of 1, 1024, etc. // // Example: // type x struct{ // intslice []int // boolval bool // s string // } // // mymap := map[string]interface{} { // "--intslice": []string {"5", "12", "0.5k"}, // "--boolval" : true, // "-s" : "hello", // } // // var myx x // // err := coerce.Struct(&myx, mymap, "--%s", "-%s") // fmt.Println(err, myx) // <nil> {[5 12 512] true hello} // // Note: coercing unexported fields uses 'unsafe' pointers // func Struct(to interface{}, from map[string]interface{}, formats ...string) error { // parse errors are accumulated into errstr errstr := "" // get target as reflect.Value and check kind: pt := reflect.ValueOf(to) vt := reflect.Indirect(pt) if vt.Kind() != reflect.Struct || pt.Kind() != reflect.Ptr { return fmt.Errorf("expected *struct for 'to', got %v", pt.Kind()) } // iterate over struct fields for i := 0; i < vt.NumField(); i++ { // get field type and pointer to value f := vt.Type().Field(i) vf := vt.Field(i) if !vf.CanSet() { // use 'unsafe' workaround for unexported fields: if string(f.Name[0]) == strings.ToLower(string(f.Name[0])) { pu := unsafe.Pointer(vf.Addr().Pointer()) vf = reflect.Indirect(reflect.NewAt(vf.Type(), pu)) } if !vf.CanSet() { errstr += "field " + f.Name + "not setable\n" continue } } // look for field name in map keys v, err := findVal(f.Name, from, formats) if err != nil { continue } if v == nil { // nil value in map - leave the field alone continue } vv := reflect.ValueOf(v) err = unmarshall(vf, vv) if err != nil { errstr += err.Error() + "\n" } } if errstr != "" { return fmt.Errorf("%s", errstr[:len(errstr)-1]) // strips trailling newline } return nil }
func appendBytes(intSize int, v reflect.Value, b []byte) reflect.Value { length := (len(b) * 8) / intSize sh := &reflect.SliceHeader{} sh.Cap = length sh.Len = length sh.Data = uintptr(unsafe.Pointer(&b[0])) nslice := reflect.NewAt(v.Type(), unsafe.Pointer(sh)).Elem() return reflect.AppendSlice(v, nslice) }
func pointerRepr(typ reflect.Type, val uint64, _html bool) string { if val == 0 { return "= nil" } if typ != nil && _html { v := reflect.NewAt(typ, unsafe.Pointer(&val)).Elem() if typ.Kind() == reflect.Map || (typ.Kind() == reflect.Ptr && typ.Elem().Kind() == reflect.Struct) { return descRepr(val, &v, _html) } } return descRepr(val, nil, _html) }
func sliceRepr(val1 uint64, val2 uint64, s *gosym.Sym) string { if val1 == 0 { return "= nil" } sh := &reflect.SliceHeader{ Data: uintptr(val1), Len: int(val2), Cap: int(val2), } val := reflect.NewAt(reflectType(s.GoType), unsafe.Pointer(sh)) return fmt.Sprintf("= %v", val.Elem().Interface()) }
func ByteSlice(i interface{}) []byte { /* A byteslice is by definition its own buffer. Any type which implements the Buffer interface will generate its result using that method. */ switch b := i.(type) { case []byte: return b case MemoryBlock: return b.ByteSlice() case nil: return []byte{} } /* For nil values we return a buffer to a zero-capacity byte slice. There are cerain types which cannot be cast as a buffer and instead raise a panic. In the rare case of the interface itself containing another interface we recursively query. When given a pointer we use its target address and the size of the type it points to construct a SliceHeader. For SliceValues we can do a simple conversion of the SliceHeader to a byteslice. For StringValues we treat them as a fixed capacity byte slice. */ var header *reflect.SliceHeader switch value := reflect.ValueOf(i); value.Kind() { case reflect.Slice: h, s, _ := SliceHeader(i) header = Scale(h, s, 1) case reflect.String: s := value.String() stringheader := *(*reflect.StringHeader)(unsafe.Pointer(&s)) header = &reflect.SliceHeader{stringheader.Data, stringheader.Len, stringheader.Len} case reflect.Interface, reflect.Ptr: header = valueHeader(value.Elem()) default: // For every other type the value gives us an address for the data // Given this and the size of the underlying allocated memory we can // then create a []byte sliceheader and return a valid slice header = valueHeader(value) } bs := reflect.NewAt(_BYTE_SLICE, unsafe.Pointer(header)) if !bs.Elem().IsValid() { return []byte{} } return bs.Elem().Interface().([]byte) }
func appendStructPointer(base structPointer, f field, typ reflect.Type) structPointer { size := typ.Elem().Size() oldHeader := structPointer_GetSliceHeader(base, f) oldSlice := reflect.NewAt(typ, unsafe.Pointer(oldHeader)).Elem() newLen := oldHeader.Len + 1 newSlice := reflect.MakeSlice(typ, newLen, newLen) reflect.Copy(newSlice, oldSlice) bas := toStructPointer(newSlice) oldHeader.Data = uintptr(bas) oldHeader.Len = newLen oldHeader.Cap = newLen return structPointer(unsafe.Pointer(uintptr(unsafe.Pointer(bas)) + uintptr(uintptr(newLen-1)*size))) }
//export goClassObjGet func goClassObjGet(obj unsafe.Pointer, idx int) unsafe.Pointer { field := getField(idx) item := unsafe.Pointer(uintptr(obj) + field.Offset) var o Object if field.Type == otyp { o = *(*Object)(item) } else { o = reflect.NewAt(field.Type, item).Elem().Interface().(Object) } o.Incref() return unsafe.Pointer(c(o)) }
func (d sqlite3) setModelValue(value reflect.Value, field reflect.Value) error { switch field.Type().Kind() { case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: field.SetInt(value.Elem().Int()) case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: // reading uint from int value causes panic switch value.Elem().Kind() { case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: field.SetUint(uint64(value.Elem().Int())) default: field.SetUint(value.Elem().Uint()) } case reflect.Bool: if value.Elem().Int() == 0 { field.SetBool(false) } else { field.SetBool(true) } case reflect.Float32, reflect.Float64: field.SetFloat(value.Elem().Float()) case reflect.String: field.SetString(value.Elem().String()) case reflect.Slice: if reflect.TypeOf(value.Interface()).Elem().Kind() == reflect.Uint8 { field.SetBytes(value.Elem().Bytes()) } case reflect.Struct: if _, ok := field.Interface().(time.Time); ok { var t time.Time var err error switch value.Elem().Kind() { case reflect.String: t, err = time.Parse("2006-01-02 15:04:05", value.Elem().String()) if err != nil { return err } case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: t = time.Unix(value.Elem().Int(), 0) case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: t = time.Unix(int64(value.Elem().Uint()), 0) } v := reflect.NewAt(reflect.TypeOf(time.Time{}), unsafe.Pointer(&t)) field.Set(v.Elem()) } } return nil }