diff options
author | Kevin Krakauer <krakauer@google.com> | 2021-05-07 13:51:09 -0700 |
---|---|---|
committer | Kevin Krakauer <krakauer@google.com> | 2021-05-07 13:51:09 -0700 |
commit | 314a5f8d7e7402e66ba5d22f79fa13143a7b69d0 (patch) | |
tree | dd729cf35e33b126144edff5c5803788b8e2e282 | |
parent | 66de003e19642b5d6eb6d3f7660ad638271f3477 (diff) |
explicitly 0-index backing array
-rw-r--r-- | pkg/atomicbitops/aligned_32bit_unsafe.go | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/pkg/atomicbitops/aligned_32bit_unsafe.go b/pkg/atomicbitops/aligned_32bit_unsafe.go index 3ebbee7b1..776da53b0 100644 --- a/pkg/atomicbitops/aligned_32bit_unsafe.go +++ b/pkg/atomicbitops/aligned_32bit_unsafe.go @@ -40,7 +40,7 @@ func (aa *AlignedAtomicInt64) ptr() *int64 { // In the 15-byte aa.value, there are guaranteed to be 8 contiguous // bytes with 64-bit alignment. We find an address in this range by // adding 7, then clear the 3 least significant bits to get its start. - return (*int64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value)) + 7) &^ 7)) + return (*int64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7)) } // Load is analagous to atomic.LoadInt64. @@ -77,7 +77,7 @@ func (aa *AlignedAtomicUint64) ptr() *uint64 { // In the 15-byte aa.value, there are guaranteed to be 8 contiguous // bytes with 64-bit alignment. We find an address in this range by // adding 7, then clear the 3 least significant bits to get its start. - return (*uint64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value)) + 7) &^ 7)) + return (*uint64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7)) } // Load is analagous to atomic.LoadUint64. |