summaryrefslogtreecommitdiffhomepage
path: root/pkg/atomicbitops/aligned_32bit_unsafe.go
diff options
context:
space:
mode:
authorAndrei Vagin <avagin@gmail.com>2021-07-26 15:41:11 -0700
committerAndrei Vagin <avagin@gmail.com>2021-08-04 21:17:21 -0700
commit376e8904320cff0b9594dc0388f51daded756cc4 (patch)
treeccb51422368570056e224da8cfec9698fa175673 /pkg/atomicbitops/aligned_32bit_unsafe.go
parenta42d3fd0aeb6c67c3fd2fb851845a1f88a298972 (diff)
Reduce overhead of AlignedAtomic types
AlignedAtomicUint64 is 15 bytes and it takes 16 bytes in structures. On 32-bit systems, variables and structure fields is guaranteed to be 32-bit aligned and this means that we need only 12 bytes to find 8 contiguous bytes.
Diffstat (limited to 'pkg/atomicbitops/aligned_32bit_unsafe.go')
-rw-r--r--pkg/atomicbitops/aligned_32bit_unsafe.go22
1 files changed, 12 insertions, 10 deletions
diff --git a/pkg/atomicbitops/aligned_32bit_unsafe.go b/pkg/atomicbitops/aligned_32bit_unsafe.go
index 383f81ff2..a17d317cc 100644
--- a/pkg/atomicbitops/aligned_32bit_unsafe.go
+++ b/pkg/atomicbitops/aligned_32bit_unsafe.go
@@ -34,14 +34,15 @@ import (
//
// +stateify savable
type AlignedAtomicInt64 struct {
- value [15]byte
+ value int64
+ value32 int32
}
func (aa *AlignedAtomicInt64) ptr() *int64 {
- // In the 15-byte aa.value, there are guaranteed to be 8 contiguous
- // bytes with 64-bit alignment. We find an address in this range by
- // adding 7, then clear the 3 least significant bits to get its start.
- return (*int64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7))
+ // On 32-bit systems, aa.value is is guaranteed to be 32-bit aligned.
+ // It means that in the 12-byte aa.value, there are guaranteed to be 8
+ // contiguous bytes with 64-bit alignment.
+ return (*int64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value)) + 4) &^ 7))
}
// Load is analagous to atomic.LoadInt64.
@@ -71,14 +72,15 @@ func (aa *AlignedAtomicInt64) Add(v int64) int64 {
//
// +stateify savable
type AlignedAtomicUint64 struct {
- value [15]byte
+ value uint64
+ value32 uint32
}
func (aa *AlignedAtomicUint64) ptr() *uint64 {
- // In the 15-byte aa.value, there are guaranteed to be 8 contiguous
- // bytes with 64-bit alignment. We find an address in this range by
- // adding 7, then clear the 3 least significant bits to get its start.
- return (*uint64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7))
+ // On 32-bit systems, aa.value is is guaranteed to be 32-bit aligned.
+ // It means that in the 12-byte aa.value, there are guaranteed to be 8
+ // contiguous bytes with 64-bit alignment.
+ return (*uint64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value)) + 4) &^ 7))
}
// Load is analagous to atomic.LoadUint64.