summaryrefslogtreecommitdiffhomepage
path: root/pkg/atomicbitops/atomicbitops_noasm.go
diff options
context:
space:
mode:
authornybidari <59618317+nybidari@users.noreply.github.com>2020-02-25 15:33:59 -0800
committerGitHub <noreply@github.com>2020-02-25 15:33:59 -0800
commit818abc2bd5096bf7dc6f621cfd2923bee4e0fc7b (patch)
tree0f7f90cecb6afc332a3229651eba4da391a46b41 /pkg/atomicbitops/atomicbitops_noasm.go
parentacc405ba60834f5dce9ce04cd762d5cda02224cb (diff)
parent72e3f3a3eef3a1dc02db0ff71f98a5d7fe89a6e3 (diff)
Merge branch 'master' into iptables
Diffstat (limited to 'pkg/atomicbitops/atomicbitops_noasm.go')
-rw-r--r--pkg/atomicbitops/atomicbitops_noasm.go105
1 files changed, 105 insertions, 0 deletions
diff --git a/pkg/atomicbitops/atomicbitops_noasm.go b/pkg/atomicbitops/atomicbitops_noasm.go
new file mode 100644
index 000000000..3b2898256
--- /dev/null
+++ b/pkg/atomicbitops/atomicbitops_noasm.go
@@ -0,0 +1,105 @@
+// Copyright 2018 The gVisor Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build !amd64,!arm64
+
+package atomicbitops
+
+import (
+ "sync/atomic"
+)
+
+func AndUint32(addr *uint32, val uint32) {
+ for {
+ o := atomic.LoadUint32(addr)
+ n := o & val
+ if atomic.CompareAndSwapUint32(addr, o, n) {
+ break
+ }
+ }
+}
+
+func OrUint32(addr *uint32, val uint32) {
+ for {
+ o := atomic.LoadUint32(addr)
+ n := o | val
+ if atomic.CompareAndSwapUint32(addr, o, n) {
+ break
+ }
+ }
+}
+
+func XorUint32(addr *uint32, val uint32) {
+ for {
+ o := atomic.LoadUint32(addr)
+ n := o ^ val
+ if atomic.CompareAndSwapUint32(addr, o, n) {
+ break
+ }
+ }
+}
+
+func CompareAndSwapUint32(addr *uint32, old, new uint32) (prev uint32) {
+ for {
+ prev = atomic.LoadUint32(addr)
+ if prev != old {
+ return
+ }
+ if atomic.CompareAndSwapUint32(addr, old, new) {
+ return
+ }
+ }
+}
+
+func AndUint64(addr *uint64, val uint64) {
+ for {
+ o := atomic.LoadUint64(addr)
+ n := o & val
+ if atomic.CompareAndSwapUint64(addr, o, n) {
+ break
+ }
+ }
+}
+
+func OrUint64(addr *uint64, val uint64) {
+ for {
+ o := atomic.LoadUint64(addr)
+ n := o | val
+ if atomic.CompareAndSwapUint64(addr, o, n) {
+ break
+ }
+ }
+}
+
+func XorUint64(addr *uint64, val uint64) {
+ for {
+ o := atomic.LoadUint64(addr)
+ n := o ^ val
+ if atomic.CompareAndSwapUint64(addr, o, n) {
+ break
+ }
+ }
+}
+
+func CompareAndSwapUint64(addr *uint64, old, new uint64) (prev uint64) {
+ for {
+ prev = atomic.LoadUint64(addr)
+ if prev != old {
+ return
+ }
+ if atomic.CompareAndSwapUint64(addr, old, new) {
+ return
+ }
+ }
+}