diff options
author | gVisor bot <gvisor-bot@google.com> | 2021-08-05 18:52:44 +0000 |
---|---|---|
committer | gVisor bot <gvisor-bot@google.com> | 2021-08-05 18:52:44 +0000 |
commit | de46d762900d0ccdbcaa66bc6d85d6b9f7712106 (patch) | |
tree | 8fc887255eaf0bdba642d5f48c20829ae8c3e480 /pkg/atomicbitops/aligned_32bit_unsafe.go | |
parent | 00c0987485a12c8746988e253cc079b63828cdaf (diff) | |
parent | 42301ae7db54cf4c835524fb733b6c0efe9e89aa (diff) |
Merge release-20210726.0-37-g42301ae7d (automated)
Diffstat (limited to 'pkg/atomicbitops/aligned_32bit_unsafe.go')
-rw-r--r-- | pkg/atomicbitops/aligned_32bit_unsafe.go | 22 |
1 files changed, 12 insertions, 10 deletions
diff --git a/pkg/atomicbitops/aligned_32bit_unsafe.go b/pkg/atomicbitops/aligned_32bit_unsafe.go index 383f81ff2..a17d317cc 100644 --- a/pkg/atomicbitops/aligned_32bit_unsafe.go +++ b/pkg/atomicbitops/aligned_32bit_unsafe.go @@ -34,14 +34,15 @@ import ( // // +stateify savable type AlignedAtomicInt64 struct { - value [15]byte + value int64 + value32 int32 } func (aa *AlignedAtomicInt64) ptr() *int64 { - // In the 15-byte aa.value, there are guaranteed to be 8 contiguous - // bytes with 64-bit alignment. We find an address in this range by - // adding 7, then clear the 3 least significant bits to get its start. - return (*int64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7)) + // On 32-bit systems, aa.value is is guaranteed to be 32-bit aligned. + // It means that in the 12-byte aa.value, there are guaranteed to be 8 + // contiguous bytes with 64-bit alignment. + return (*int64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value)) + 4) &^ 7)) } // Load is analagous to atomic.LoadInt64. @@ -71,14 +72,15 @@ func (aa *AlignedAtomicInt64) Add(v int64) int64 { // // +stateify savable type AlignedAtomicUint64 struct { - value [15]byte + value uint64 + value32 uint32 } func (aa *AlignedAtomicUint64) ptr() *uint64 { - // In the 15-byte aa.value, there are guaranteed to be 8 contiguous - // bytes with 64-bit alignment. We find an address in this range by - // adding 7, then clear the 3 least significant bits to get its start. - return (*uint64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7)) + // On 32-bit systems, aa.value is is guaranteed to be 32-bit aligned. + // It means that in the 12-byte aa.value, there are guaranteed to be 8 + // contiguous bytes with 64-bit alignment. + return (*uint64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value)) + 4) &^ 7)) } // Load is analagous to atomic.LoadUint64. |