summaryrefslogtreecommitdiffhomepage
path: root/pkg/atomicbitops/aligned_32bit_unsafe.go
diff options
context:
space:
mode:
Diffstat (limited to 'pkg/atomicbitops/aligned_32bit_unsafe.go')
-rw-r--r--pkg/atomicbitops/aligned_32bit_unsafe.go22
1 files changed, 12 insertions, 10 deletions
diff --git a/pkg/atomicbitops/aligned_32bit_unsafe.go b/pkg/atomicbitops/aligned_32bit_unsafe.go
index 383f81ff2..a17d317cc 100644
--- a/pkg/atomicbitops/aligned_32bit_unsafe.go
+++ b/pkg/atomicbitops/aligned_32bit_unsafe.go
@@ -34,14 +34,15 @@ import (
//
// +stateify savable
type AlignedAtomicInt64 struct {
- value [15]byte
+ value int64
+ value32 int32
}
func (aa *AlignedAtomicInt64) ptr() *int64 {
- // In the 15-byte aa.value, there are guaranteed to be 8 contiguous
- // bytes with 64-bit alignment. We find an address in this range by
- // adding 7, then clear the 3 least significant bits to get its start.
- return (*int64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7))
+ // On 32-bit systems, aa.value is is guaranteed to be 32-bit aligned.
+ // It means that in the 12-byte aa.value, there are guaranteed to be 8
+ // contiguous bytes with 64-bit alignment.
+ return (*int64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value)) + 4) &^ 7))
}
// Load is analagous to atomic.LoadInt64.
@@ -71,14 +72,15 @@ func (aa *AlignedAtomicInt64) Add(v int64) int64 {
//
// +stateify savable
type AlignedAtomicUint64 struct {
- value [15]byte
+ value uint64
+ value32 uint32
}
func (aa *AlignedAtomicUint64) ptr() *uint64 {
- // In the 15-byte aa.value, there are guaranteed to be 8 contiguous
- // bytes with 64-bit alignment. We find an address in this range by
- // adding 7, then clear the 3 least significant bits to get its start.
- return (*uint64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7))
+ // On 32-bit systems, aa.value is is guaranteed to be 32-bit aligned.
+ // It means that in the 12-byte aa.value, there are guaranteed to be 8
+ // contiguous bytes with 64-bit alignment.
+ return (*uint64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value)) + 4) &^ 7))
}
// Load is analagous to atomic.LoadUint64.