summaryrefslogtreecommitdiffhomepage
path: root/pkg/atomicbitops
diff options
context:
space:
mode:
authorgVisor bot <gvisor-bot@google.com>2021-05-12 01:11:17 +0000
committergVisor bot <gvisor-bot@google.com>2021-05-12 01:11:17 +0000
commita8920885e3c9a185f6e686451aec9be18b9d06e4 (patch)
tree6115ca3be9187ef39a8bebbe208f3660381eacfe /pkg/atomicbitops
parentccc377e3a485ddda506be9ff0149bb936f954816 (diff)
parent8f6bfe257e162c14faa25ee00dc249859994c2c8 (diff)
Merge release-20210503.0-45-g8f6bfe257 (automated)
Diffstat (limited to 'pkg/atomicbitops')
-rw-r--r--pkg/atomicbitops/aligned_32bit_unsafe.go96
-rw-r--r--pkg/atomicbitops/aligned_64bit.go71
-rw-r--r--pkg/atomicbitops/atomicbitops_32bit_unsafe_state_autogen.go64
-rw-r--r--pkg/atomicbitops/atomicbitops_64bit_state_autogen.go64
4 files changed, 295 insertions, 0 deletions
diff --git a/pkg/atomicbitops/aligned_32bit_unsafe.go b/pkg/atomicbitops/aligned_32bit_unsafe.go
new file mode 100644
index 000000000..776da53b0
--- /dev/null
+++ b/pkg/atomicbitops/aligned_32bit_unsafe.go
@@ -0,0 +1,96 @@
+// Copyright 2021 The gVisor Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build arm 386
+
+package atomicbitops
+
+import (
+ "sync/atomic"
+ "unsafe"
+)
+
+// AlignedAtomicInt64 is an atomic int64 that is guaranteed to be 64-bit
+// aligned, even on 32-bit systems.
+//
+// Per https://golang.org/pkg/sync/atomic/#pkg-note-BUG:
+//
+// "On ARM, 386, and 32-bit MIPS, it is the caller's responsibility to arrange
+// for 64-bit alignment of 64-bit words accessed atomically. The first word in
+// a variable or in an allocated struct, array, or slice can be relied upon to
+// be 64-bit aligned."
+//
+// +stateify savable
+type AlignedAtomicInt64 struct {
+ value [15]byte
+}
+
+func (aa *AlignedAtomicInt64) ptr() *int64 {
+ // In the 15-byte aa.value, there are guaranteed to be 8 contiguous
+ // bytes with 64-bit alignment. We find an address in this range by
+ // adding 7, then clear the 3 least significant bits to get its start.
+ return (*int64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7))
+}
+
+// Load is analagous to atomic.LoadInt64.
+func (aa *AlignedAtomicInt64) Load() int64 {
+ return atomic.LoadInt64(aa.ptr())
+}
+
+// Store is analagous to atomic.StoreInt64.
+func (aa *AlignedAtomicInt64) Store(v int64) {
+ atomic.StoreInt64(aa.ptr(), v)
+}
+
+// Add is analagous to atomic.AddInt64.
+func (aa *AlignedAtomicInt64) Add(v int64) int64 {
+ return atomic.AddInt64(aa.ptr(), v)
+}
+
+// AlignedAtomicUint64 is an atomic uint64 that is guaranteed to be 64-bit
+// aligned, even on 32-bit systems.
+//
+// Per https://golang.org/pkg/sync/atomic/#pkg-note-BUG:
+//
+// "On ARM, 386, and 32-bit MIPS, it is the caller's responsibility to arrange
+// for 64-bit alignment of 64-bit words accessed atomically. The first word in
+// a variable or in an allocated struct, array, or slice can be relied upon to
+// be 64-bit aligned."
+//
+// +stateify savable
+type AlignedAtomicUint64 struct {
+ value [15]byte
+}
+
+func (aa *AlignedAtomicUint64) ptr() *uint64 {
+ // In the 15-byte aa.value, there are guaranteed to be 8 contiguous
+ // bytes with 64-bit alignment. We find an address in this range by
+ // adding 7, then clear the 3 least significant bits to get its start.
+ return (*uint64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7))
+}
+
+// Load is analagous to atomic.LoadUint64.
+func (aa *AlignedAtomicUint64) Load() uint64 {
+ return atomic.LoadUint64(aa.ptr())
+}
+
+// Store is analagous to atomic.StoreUint64.
+func (aa *AlignedAtomicUint64) Store(v uint64) {
+ atomic.StoreUint64(aa.ptr(), v)
+}
+
+// Add is analagous to atomic.AddUint64.
+func (aa *AlignedAtomicUint64) Add(v uint64) uint64 {
+ return atomic.AddUint64(aa.ptr(), v)
+}
diff --git a/pkg/atomicbitops/aligned_64bit.go b/pkg/atomicbitops/aligned_64bit.go
new file mode 100644
index 000000000..869ba40cd
--- /dev/null
+++ b/pkg/atomicbitops/aligned_64bit.go
@@ -0,0 +1,71 @@
+// Copyright 2021 The gVisor Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build amd64 arm64
+
+package atomicbitops
+
+import "sync/atomic"
+
+// AlignedAtomicInt64 is an atomic int64 that is guaranteed to be 64-bit
+// aligned, even on 32-bit systems. On 64-bit machines, it's just a regular
+// int64.
+//
+// See aligned_unsafe.go in this directory for justification.
+//
+// +stateify savable
+type AlignedAtomicInt64 struct {
+ value int64
+}
+
+// Load is analagous to atomic.LoadInt64.
+func (aa *AlignedAtomicInt64) Load() int64 {
+ return atomic.LoadInt64(&aa.value)
+}
+
+// Store is analagous to atomic.StoreInt64.
+func (aa *AlignedAtomicInt64) Store(v int64) {
+ atomic.StoreInt64(&aa.value, v)
+}
+
+// Add is analagous to atomic.AddInt64.
+func (aa *AlignedAtomicInt64) Add(v int64) int64 {
+ return atomic.AddInt64(&aa.value, v)
+}
+
+// AlignedAtomicUint64 is an atomic uint64 that is guaranteed to be 64-bit
+// aligned, even on 32-bit systems. On 64-bit machines, it's just a regular
+// uint64.
+//
+// See aligned_unsafe.go in this directory for justification.
+//
+// +stateify savable
+type AlignedAtomicUint64 struct {
+ value uint64
+}
+
+// Load is analagous to atomic.LoadUint64.
+func (aa *AlignedAtomicUint64) Load() uint64 {
+ return atomic.LoadUint64(&aa.value)
+}
+
+// Store is analagous to atomic.StoreUint64.
+func (aa *AlignedAtomicUint64) Store(v uint64) {
+ atomic.StoreUint64(&aa.value, v)
+}
+
+// Add is analagous to atomic.AddUint64.
+func (aa *AlignedAtomicUint64) Add(v uint64) uint64 {
+ return atomic.AddUint64(&aa.value, v)
+}
diff --git a/pkg/atomicbitops/atomicbitops_32bit_unsafe_state_autogen.go b/pkg/atomicbitops/atomicbitops_32bit_unsafe_state_autogen.go
new file mode 100644
index 000000000..61eca1a6d
--- /dev/null
+++ b/pkg/atomicbitops/atomicbitops_32bit_unsafe_state_autogen.go
@@ -0,0 +1,64 @@
+// automatically generated by stateify.
+
+// +build arm 386
+
+package atomicbitops
+
+import (
+ "gvisor.dev/gvisor/pkg/state"
+)
+
+func (aa *AlignedAtomicInt64) StateTypeName() string {
+ return "pkg/atomicbitops.AlignedAtomicInt64"
+}
+
+func (aa *AlignedAtomicInt64) StateFields() []string {
+ return []string{
+ "value",
+ }
+}
+
+func (aa *AlignedAtomicInt64) beforeSave() {}
+
+// +checklocksignore
+func (aa *AlignedAtomicInt64) StateSave(stateSinkObject state.Sink) {
+ aa.beforeSave()
+ stateSinkObject.Save(0, &aa.value)
+}
+
+func (aa *AlignedAtomicInt64) afterLoad() {}
+
+// +checklocksignore
+func (aa *AlignedAtomicInt64) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &aa.value)
+}
+
+func (aa *AlignedAtomicUint64) StateTypeName() string {
+ return "pkg/atomicbitops.AlignedAtomicUint64"
+}
+
+func (aa *AlignedAtomicUint64) StateFields() []string {
+ return []string{
+ "value",
+ }
+}
+
+func (aa *AlignedAtomicUint64) beforeSave() {}
+
+// +checklocksignore
+func (aa *AlignedAtomicUint64) StateSave(stateSinkObject state.Sink) {
+ aa.beforeSave()
+ stateSinkObject.Save(0, &aa.value)
+}
+
+func (aa *AlignedAtomicUint64) afterLoad() {}
+
+// +checklocksignore
+func (aa *AlignedAtomicUint64) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &aa.value)
+}
+
+func init() {
+ state.Register((*AlignedAtomicInt64)(nil))
+ state.Register((*AlignedAtomicUint64)(nil))
+}
diff --git a/pkg/atomicbitops/atomicbitops_64bit_state_autogen.go b/pkg/atomicbitops/atomicbitops_64bit_state_autogen.go
new file mode 100644
index 000000000..cfec0ec7b
--- /dev/null
+++ b/pkg/atomicbitops/atomicbitops_64bit_state_autogen.go
@@ -0,0 +1,64 @@
+// automatically generated by stateify.
+
+// +build amd64 arm64
+
+package atomicbitops
+
+import (
+ "gvisor.dev/gvisor/pkg/state"
+)
+
+func (aa *AlignedAtomicInt64) StateTypeName() string {
+ return "pkg/atomicbitops.AlignedAtomicInt64"
+}
+
+func (aa *AlignedAtomicInt64) StateFields() []string {
+ return []string{
+ "value",
+ }
+}
+
+func (aa *AlignedAtomicInt64) beforeSave() {}
+
+// +checklocksignore
+func (aa *AlignedAtomicInt64) StateSave(stateSinkObject state.Sink) {
+ aa.beforeSave()
+ stateSinkObject.Save(0, &aa.value)
+}
+
+func (aa *AlignedAtomicInt64) afterLoad() {}
+
+// +checklocksignore
+func (aa *AlignedAtomicInt64) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &aa.value)
+}
+
+func (aa *AlignedAtomicUint64) StateTypeName() string {
+ return "pkg/atomicbitops.AlignedAtomicUint64"
+}
+
+func (aa *AlignedAtomicUint64) StateFields() []string {
+ return []string{
+ "value",
+ }
+}
+
+func (aa *AlignedAtomicUint64) beforeSave() {}
+
+// +checklocksignore
+func (aa *AlignedAtomicUint64) StateSave(stateSinkObject state.Sink) {
+ aa.beforeSave()
+ stateSinkObject.Save(0, &aa.value)
+}
+
+func (aa *AlignedAtomicUint64) afterLoad() {}
+
+// +checklocksignore
+func (aa *AlignedAtomicUint64) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &aa.value)
+}
+
+func init() {
+ state.Register((*AlignedAtomicInt64)(nil))
+ state.Register((*AlignedAtomicUint64)(nil))
+}