diff options
author | Andrei Vagin <avagin@gmail.com> | 2021-07-26 15:41:11 -0700 |
---|---|---|
committer | Andrei Vagin <avagin@gmail.com> | 2021-08-04 21:17:21 -0700 |
commit | 376e8904320cff0b9594dc0388f51daded756cc4 (patch) | |
tree | ccb51422368570056e224da8cfec9698fa175673 /pkg/atomicbitops | |
parent | a42d3fd0aeb6c67c3fd2fb851845a1f88a298972 (diff) |
Reduce overhead of AlignedAtomic types
AlignedAtomicUint64 is 15 bytes and it takes 16 bytes in structures. On
32-bit systems, variables and structure fields is guaranteed to be
32-bit aligned and this means that we need only 12 bytes to find 8
contiguous bytes.
Diffstat (limited to 'pkg/atomicbitops')
-rw-r--r-- | pkg/atomicbitops/BUILD | 5 | ||||
-rw-r--r-- | pkg/atomicbitops/aligned_32bit_unsafe.go | 22 | ||||
-rw-r--r-- | pkg/atomicbitops/aligned_test.go | 35 |
3 files changed, 51 insertions, 11 deletions
diff --git a/pkg/atomicbitops/BUILD b/pkg/atomicbitops/BUILD index 11072d4de..02c0e52b9 100644 --- a/pkg/atomicbitops/BUILD +++ b/pkg/atomicbitops/BUILD @@ -18,7 +18,10 @@ go_library( go_test( name = "atomicbitops_test", size = "small", - srcs = ["atomicbitops_test.go"], + srcs = [ + "aligned_test.go", + "atomicbitops_test.go", + ], library = ":atomicbitops", deps = ["//pkg/sync"], ) diff --git a/pkg/atomicbitops/aligned_32bit_unsafe.go b/pkg/atomicbitops/aligned_32bit_unsafe.go index 383f81ff2..a17d317cc 100644 --- a/pkg/atomicbitops/aligned_32bit_unsafe.go +++ b/pkg/atomicbitops/aligned_32bit_unsafe.go @@ -34,14 +34,15 @@ import ( // // +stateify savable type AlignedAtomicInt64 struct { - value [15]byte + value int64 + value32 int32 } func (aa *AlignedAtomicInt64) ptr() *int64 { - // In the 15-byte aa.value, there are guaranteed to be 8 contiguous - // bytes with 64-bit alignment. We find an address in this range by - // adding 7, then clear the 3 least significant bits to get its start. - return (*int64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7)) + // On 32-bit systems, aa.value is is guaranteed to be 32-bit aligned. + // It means that in the 12-byte aa.value, there are guaranteed to be 8 + // contiguous bytes with 64-bit alignment. + return (*int64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value)) + 4) &^ 7)) } // Load is analagous to atomic.LoadInt64. @@ -71,14 +72,15 @@ func (aa *AlignedAtomicInt64) Add(v int64) int64 { // // +stateify savable type AlignedAtomicUint64 struct { - value [15]byte + value uint64 + value32 uint32 } func (aa *AlignedAtomicUint64) ptr() *uint64 { - // In the 15-byte aa.value, there are guaranteed to be 8 contiguous - // bytes with 64-bit alignment. We find an address in this range by - // adding 7, then clear the 3 least significant bits to get its start. - return (*uint64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7)) + // On 32-bit systems, aa.value is is guaranteed to be 32-bit aligned. + // It means that in the 12-byte aa.value, there are guaranteed to be 8 + // contiguous bytes with 64-bit alignment. + return (*uint64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value)) + 4) &^ 7)) } // Load is analagous to atomic.LoadUint64. diff --git a/pkg/atomicbitops/aligned_test.go b/pkg/atomicbitops/aligned_test.go new file mode 100644 index 000000000..e7123d2b8 --- /dev/null +++ b/pkg/atomicbitops/aligned_test.go @@ -0,0 +1,35 @@ +// Copyright 2021 The gVisor Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package atomicbitops + +import ( + "testing" +) + +func TestAtomiciInt64(t *testing.T) { + v := struct { + v8 int8 + v64 AlignedAtomicInt64 + }{} + v.v64.Add(1) +} + +func TestAtomicUint64(t *testing.T) { + v := struct { + v8 uint8 + v64 AlignedAtomicUint64 + }{} + v.v64.Add(1) +} |