// mapzero ensures that zeroptr points to a buffer large enough to // serve as the zero value for t. func mapzero(t *_type) { // Is the type small enough for existing buffer? cursize := uintptr(atomic.Loadp(unsafe.Pointer(&zerosize))) if t.size <= cursize { return } // Allocate a new buffer. lock(&zerolock) cursize = uintptr(atomic.Loadp(unsafe.Pointer(&zerosize))) if cursize < t.size { for cursize < t.size { cursize *= 2 if cursize == 0 { // need >2GB zero on 32-bit machine throw("map element too large") } } atomic.Storep1(unsafe.Pointer(&zeroptr), persistentalloc(cursize, 64, &memstats.other_sys)) atomic.Storep1(unsafe.Pointer(&zerosize), unsafe.Pointer(zerosize)) } unlock(&zerolock) }
//go:linkname sync_atomic_StorePointer sync/atomic.StorePointer //go:nosplit func sync_atomic_StorePointer(ptr *unsafe.Pointer, new unsafe.Pointer) { sync_atomic_StoreUintptr((*uintptr)(unsafe.Pointer(ptr)), uintptr(new)) atomic.Storep1(noescape(unsafe.Pointer(ptr)), new) writebarrierptr_nostore((*uintptr)(unsafe.Pointer(ptr)), uintptr(new)) }
//go:nosplit func atomicstorep(ptr unsafe.Pointer, new unsafe.Pointer) { atomic.Storep1(noescape(ptr), new) writebarrierptr_nostore((*uintptr)(ptr), uintptr(new)) }