func realloc(ptr unsafe.Pointer, size int) (unsafe.Pointer, error) { p := C.realloc(ptr, C.size_t(size)) if p == nil { return nil, AllocationFailed } return p, nil }
//export allocator func allocator(ud unsafe.Pointer, ptr unsafe.Pointer, osize uintptr, nsize uintptr) (ret unsafe.Pointer) { ret = nil if nsize == 0 { C.free(ptr) } else { ret = C.realloc(ptr, C.size_t(nsize)) } return }
func (m Malloc) Realloc(p *unsafe.Pointer, size int) bool { if size == 0 { m.Dealloc(*p) *p = nil return true } *p = unsafe.Pointer(C.realloc(*p, C.size_t(size))) return *p != nil }
func test6506() { // nothing to run, just make sure this compiles var x C.size_t C.calloc(x, x) C.malloc(x) C.realloc(nil, x) C.memcpy(nil, nil, x) C.memcmp(nil, nil, x) C.memmove(nil, nil, x) C.strncpy(nil, nil, x) C.strncmp(nil, nil, x) C.strncat(nil, nil, x) x = C.strxfrm(nil, nil, x) C.memchr(nil, 0, x) x = C.strcspn(nil, nil) x = C.strspn(nil, nil) C.memset(nil, 0, x) x = C.strlen(nil) _ = x }
func doReadDocData(ddrC *C.lucy_DefaultDocReader, docID int32, doc interface{}) error { // Adapt for different types of "doc". var setField func(interface{}, string, interface{}) error var fields interface{} switch v := doc.(type) { case Doc: docC := (*C.lucy_Doc)(clownfish.Unwrap(v, "doc")) fieldsMap := fetchDocFields(docC) for field, _ := range fieldsMap { delete(fieldsMap, field) } fields = fieldsMap setField = setMapField case map[string]interface{}: for field, _ := range v { delete(v, field) } fields = v setField = setMapField default: // Get reflection value and type for the supplied struct. var hitValue reflect.Value if reflect.ValueOf(doc).Kind() == reflect.Ptr { temp := reflect.ValueOf(doc).Elem() if temp.Kind() == reflect.Struct { if temp.CanSet() { hitValue = temp } } } if hitValue == (reflect.Value{}) { mess := fmt.Sprintf("Arg not writeable struct pointer: %v", reflect.TypeOf(doc)) return clownfish.NewErr(mess) } fields = hitValue setField = setStructField } ivars := C.lucy_DefDocReader_IVARS(ddrC) schema := ivars.schema datInstream := ivars.dat_in ixInstream := ivars.ix_in fieldNameCap := C.size_t(31) var fieldName *C.char = ((*C.char)(C.malloc(fieldNameCap + 1))) defer C.free(unsafe.Pointer(fieldName)) // Get data file pointer from index, read number of fields. C.LUCY_InStream_Seek(ixInstream, C.int64_t(docID*8)) start := C.LUCY_InStream_Read_U64(ixInstream) C.LUCY_InStream_Seek(datInstream, C.int64_t(start)) numFields := uint32(C.LUCY_InStream_Read_C32(datInstream)) // Decode stored data and build up the doc field by field. for i := uint32(0); i < numFields; i++ { // Read field name. fieldNameLen := C.size_t(C.LUCY_InStream_Read_C32(datInstream)) if fieldNameLen > fieldNameCap { fieldNameCap = fieldNameLen fieldName = ((*C.char)(C.realloc(unsafe.Pointer(fieldName), fieldNameCap+1))) } C.LUCY_InStream_Read_Bytes(datInstream, fieldName, fieldNameLen) // Find the Field's FieldType. // TODO: Creating and destroying a new string each time is // inefficient. The solution should be to add a privte // Schema_Fetch_Type_Utf8 method which takes char* and size_t. fieldNameStr := C.cfish_Str_new_from_utf8(fieldName, fieldNameLen) fieldNameGo := C.GoStringN(fieldName, C.int(fieldNameLen)) fieldType := C.LUCY_Schema_Fetch_Type(schema, fieldNameStr) C.cfish_dec_refcount(unsafe.Pointer(fieldNameStr)) // Read the field value. switch C.LUCY_FType_Primitive_ID(fieldType) & C.lucy_FType_PRIMITIVE_ID_MASK { case C.lucy_FType_TEXT: valueLen := C.size_t(C.LUCY_InStream_Read_C32(datInstream)) buf := ((*C.char)(C.malloc(valueLen + 1))) C.LUCY_InStream_Read_Bytes(datInstream, buf, valueLen) val := C.GoStringN(buf, C.int(valueLen)) err := setField(fields, fieldNameGo, val) if err != nil { return err } case C.lucy_FType_BLOB: valueLen := C.size_t(C.LUCY_InStream_Read_C32(datInstream)) buf := ((*C.char)(C.malloc(valueLen))) C.LUCY_InStream_Read_Bytes(datInstream, buf, valueLen) val := C.GoBytes(unsafe.Pointer(buf), C.int(valueLen)) err := setField(fields, fieldNameGo, val) if err != nil { return err } case C.lucy_FType_FLOAT32: err := setField(fields, fieldNameGo, float32(C.LUCY_InStream_Read_F32(datInstream))) if err != nil { return err } case C.lucy_FType_FLOAT64: err := setField(fields, fieldNameGo, float64(C.LUCY_InStream_Read_F64(datInstream))) if err != nil { return err } case C.lucy_FType_INT32: err := setField(fields, fieldNameGo, int32(C.LUCY_InStream_Read_C32(datInstream))) if err != nil { return err } case C.lucy_FType_INT64: err := setField(fields, fieldNameGo, int64(C.LUCY_InStream_Read_C64(datInstream))) if err != nil { return err } default: return clownfish.NewErr( "Internal Lucy error: bad type id for field " + fieldNameGo) } } return nil }
func (mb *memBuffer) resize(newSize C.size_t) { mb.ptr = C.realloc(mb.ptr, newSize) mb.size = newSize }
//export GOLUCY_DefDocReader_Fetch_Doc func GOLUCY_DefDocReader_Fetch_Doc(ddr *C.lucy_DefaultDocReader, docID C.int32_t) *C.lucy_HitDoc { ivars := C.lucy_DefDocReader_IVARS(ddr) schema := ivars.schema datInstream := ivars.dat_in ixInstream := ivars.ix_in fields := C.cfish_Hash_new(1) fieldNameCap := C.size_t(31) var fieldName *C.char = ((*C.char)(C.malloc(fieldNameCap + 1))) // Get data file pointer from index, read number of fields. C.LUCY_InStream_Seek(ixInstream, C.int64_t(docID*8)) start := C.LUCY_InStream_Read_U64(ixInstream) C.LUCY_InStream_Seek(datInstream, C.int64_t(start)) numFields := uint32(C.LUCY_InStream_Read_C32(datInstream)) // Decode stored data and build up the doc field by field. for i := uint32(0); i < numFields; i++ { // Read field name. fieldNameLen := C.size_t(C.LUCY_InStream_Read_C32(datInstream)) if fieldNameLen > fieldNameCap { fieldNameCap = fieldNameLen fieldName = ((*C.char)(C.realloc(unsafe.Pointer(fieldName), fieldNameCap+1))) } C.LUCY_InStream_Read_Bytes(datInstream, fieldName, fieldNameLen) // Find the Field's FieldType. // TODO: Creating and destroying a new string each time is // inefficient. The solution should be to add a privte // Schema_Fetch_Type_Utf8 method which takes char* and size_t. fieldNameStr := C.cfish_Str_new_from_utf8(fieldName, fieldNameLen) fieldType := C.LUCY_Schema_Fetch_Type(schema, fieldNameStr) C.cfish_dec_refcount(unsafe.Pointer(fieldNameStr)) // Read the field value. var value *C.cfish_Obj switch C.LUCY_FType_Primitive_ID(fieldType) & C.lucy_FType_PRIMITIVE_ID_MASK { case C.lucy_FType_TEXT: valueLen := C.size_t(C.LUCY_InStream_Read_C32(datInstream)) buf := ((*C.char)(C.malloc(valueLen + 1))) C.LUCY_InStream_Read_Bytes(datInstream, buf, valueLen) C.null_terminate_string(buf, valueLen) value = ((*C.cfish_Obj)(C.cfish_Str_new_steal_utf8(buf, valueLen))) case C.lucy_FType_BLOB: valueLen := C.size_t(C.LUCY_InStream_Read_C32(datInstream)) buf := ((*C.char)(C.malloc(valueLen))) C.LUCY_InStream_Read_Bytes(datInstream, buf, valueLen) value = ((*C.cfish_Obj)(C.cfish_Blob_new_steal(buf, valueLen))) case C.lucy_FType_FLOAT32: value = ((*C.cfish_Obj)(C.cfish_Float_new(C.double(C.LUCY_InStream_Read_F32(datInstream))))) case C.lucy_FType_FLOAT64: value = ((*C.cfish_Obj)(C.cfish_Float_new(C.LUCY_InStream_Read_F64(datInstream)))) case C.lucy_FType_INT32: value = ((*C.cfish_Obj)(C.cfish_Int_new(C.int64_t(C.LUCY_InStream_Read_C32(datInstream))))) case C.lucy_FType_INT64: value = ((*C.cfish_Obj)(C.cfish_Int_new(C.int64_t(C.LUCY_InStream_Read_C64(datInstream))))) default: value = nil panic(clownfish.NewErr("Internal Lucy error: bad type id for field " + C.GoStringN(fieldName, C.int(fieldNameLen)))) } // Store the value. C.CFISH_Hash_Store_Utf8(fields, fieldName, fieldNameLen, value) } C.free(unsafe.Pointer(fieldName)) retval := C.lucy_HitDoc_new(unsafe.Pointer(fields), docID, 0.0) C.cfish_dec_refcount(unsafe.Pointer(fields)) return retval }
func (cursor *Cursor) execute2(sql string, args ...interface{}) (rowsAffected int, err error) { const dialect = 1 var isc_status [20]C.ISC_STATUS // prepare query sql2 := C.CString(sql) defer C.free(unsafe.Pointer(sql2)) sql3 := (*C.ISC_SCHAR)(unsafe.Pointer(sql2)) C.isc_dsql_prepare(&isc_status[0], &cursor.connection.transact, &cursor.stmt, 0, sql3, cursor.connection.dialect, cursor.o_sqlda) if err = fbErrorCheck(&isc_status); err != nil { return } // get statement type isc_info_stmt := [...]C.ISC_SCHAR{C.isc_info_sql_stmt_type} var isc_info_buff [16]C.ISC_SCHAR C.isc_dsql_sql_info(&isc_status[0], &cursor.stmt, C.short(unsafe.Sizeof(isc_info_stmt[0])), &isc_info_stmt[0], C.short(unsafe.Sizeof(isc_info_buff[0])*16), &isc_info_buff[0]) if err = fbErrorCheck(&isc_status); err != nil { return } var statement C.long if isc_info_buff[0] == C.isc_info_sql_stmt_type { length := C.isc_vax_integer(&isc_info_buff[1], 2) statement = C.long(C.isc_vax_integer(&isc_info_buff[3], C.short(length))) } else { statement = 0 } // describe input parameters C.isc_dsql_describe_bind(&isc_status[0], &cursor.stmt, dialect, cursor.i_sqlda) if err = fbErrorCheck(&isc_status); err != nil { return } // describe output parameters C.isc_dsql_describe(&isc_status[0], &cursor.stmt, 1, cursor.o_sqlda) if err = fbErrorCheck(&isc_status); err != nil { return } // get number of parameters and reallocate SQLDA in_params := cursor.i_sqlda.sqld if cursor.i_sqlda.sqln < in_params { C.free(unsafe.Pointer(cursor.i_sqlda)) cursor.i_sqlda = C.sqlda_alloc(C.long(in_params)) // describe again C.isc_dsql_describe_bind(&isc_status[0], &cursor.stmt, dialect, cursor.i_sqlda) if err = fbErrorCheck(&isc_status); err != nil { return } } // get size of parameters buffer and reallocate it if in_params > 0 { length := C.calculate_buffsize(cursor.i_sqlda) if length > cursor.i_buffer_size { cursor.i_buffer = (*C.char)(C.realloc(unsafe.Pointer(cursor.i_buffer), C.size_t(length))) cursor.i_buffer_size = length } } if cursor.o_sqlda.sqld != 0 { // open cursor if statement is query // get number of columns and reallocate SQLDA cols := cursor.o_sqlda.sqld if cursor.o_sqlda.sqln < cols { C.free(unsafe.Pointer(cursor.o_sqlda)) cursor.o_sqlda = C.sqlda_alloc(C.long(cols)) // describe again C.isc_dsql_describe(&isc_status[0], &cursor.stmt, 1, cursor.o_sqlda) if err = fbErrorCheck(&isc_status); err != nil { return } } var i_sqlda *C.XSQLDA if in_params > 0 { cursor.setInputParams(args) i_sqlda = cursor.i_sqlda } else { i_sqlda = (*C.XSQLDA)(nil) } // open cursor C.isc_dsql_execute2(&isc_status[0], &cursor.connection.transact, &cursor.stmt, C.SQLDA_VERSION1, i_sqlda, (*C.XSQLDA)(nil)) if err = fbErrorCheck(&isc_status); err != nil { return } cursor.open = true // get size of results buffer and reallocate it length := C.calculate_buffsize(cursor.o_sqlda) if length > cursor.o_buffer_size { cursor.o_buffer = (*C.char)(C.realloc(unsafe.Pointer(cursor.o_buffer), C.size_t(length))) cursor.o_buffer_size = length } // Set the description attributes cursor.Fields = fieldsFromSqlda(cursor.o_sqlda, cursor.connection.database.LowercaseNames) cursor.FieldsMap = fieldsMapFromSlice(cursor.Fields) } else { // execute statement if not query if statement == C.isc_info_sql_stmt_start_trans { panic("use fb.Connection.Transaction()") } else if statement == C.isc_info_sql_stmt_commit { panic("use fb.Connection.Commit()") } else if statement == C.isc_info_sql_stmt_rollback { panic("use fb.Connection.Rollback()") } else if in_params > 0 { cursor.executeWithParams(args) } else { C.isc_dsql_execute2(&isc_status[0], &cursor.connection.transact, &cursor.stmt, C.SQLDA_VERSION1, (*C.XSQLDA)(nil), (*C.XSQLDA)(nil)) if err = fbErrorCheck(&isc_status); err != nil { return } } rowsAffected = cursor.rowsAffected(statement) } return }