// Variant of sync/atomic's TestUnaligned64: func TestUnaligned64(t *testing.T) { // Unaligned 64-bit atomics on 32-bit systems are // a continual source of pain. Test that on 32-bit systems they crash // instead of failing silently. switch runtime.GOARCH { default: if unsafe.Sizeof(int(0)) != 4 { t.Skip("test only runs on 32-bit systems") } case "amd64p32": // amd64p32 can handle unaligned atomics. t.Skipf("test not needed on %v", runtime.GOARCH) } x := make([]uint32, 4) up64 := (*uint64)(unsafe.Pointer(&x[1])) // misaligned p64 := (*int64)(unsafe.Pointer(&x[1])) // misaligned shouldPanic(t, "Load64", func() { atomic.Load64(up64) }) shouldPanic(t, "Loadint64", func() { atomic.Loadint64(p64) }) shouldPanic(t, "Store64", func() { atomic.Store64(up64, 0) }) shouldPanic(t, "Xadd64", func() { atomic.Xadd64(up64, 1) }) shouldPanic(t, "Xchg64", func() { atomic.Xchg64(up64, 1) }) shouldPanic(t, "Cas64", func() { atomic.Cas64(up64, 1, 2) }) }
func testAtomic64() { test_z64 = 42 test_x64 = 0 prefetcht0(uintptr(unsafe.Pointer(&test_z64))) prefetcht1(uintptr(unsafe.Pointer(&test_z64))) prefetcht2(uintptr(unsafe.Pointer(&test_z64))) prefetchnta(uintptr(unsafe.Pointer(&test_z64))) if atomic.Cas64(&test_z64, test_x64, 1) { throw("cas64 failed") } if test_x64 != 0 { throw("cas64 failed") } test_x64 = 42 if !atomic.Cas64(&test_z64, test_x64, 1) { throw("cas64 failed") } if test_x64 != 42 || test_z64 != 1 { throw("cas64 failed") } if atomic.Load64(&test_z64) != 1 { throw("load64 failed") } atomic.Store64(&test_z64, (1<<40)+1) if atomic.Load64(&test_z64) != (1<<40)+1 { throw("store64 failed") } if atomic.Xadd64(&test_z64, (1<<40)+1) != (2<<40)+2 { throw("xadd64 failed") } if atomic.Load64(&test_z64) != (2<<40)+2 { throw("xadd64 failed") } if atomic.Xchg64(&test_z64, (3<<40)+3) != (2<<40)+2 { throw("xchg64 failed") } if atomic.Load64(&test_z64) != (3<<40)+3 { throw("xchg64 failed") } }