summaryrefslogtreecommitdiffhomepage
path: root/pkg/atomicbitops/atomic_bitops_arm64.s
diff options
context:
space:
mode:
Diffstat (limited to 'pkg/atomicbitops/atomic_bitops_arm64.s')
-rw-r--r--pkg/atomicbitops/atomic_bitops_arm64.s139
1 files changed, 139 insertions, 0 deletions
diff --git a/pkg/atomicbitops/atomic_bitops_arm64.s b/pkg/atomicbitops/atomic_bitops_arm64.s
new file mode 100644
index 000000000..97f8808c1
--- /dev/null
+++ b/pkg/atomicbitops/atomic_bitops_arm64.s
@@ -0,0 +1,139 @@
+// Copyright 2019 The gVisor Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// +build arm64
+
+#include "textflag.h"
+
+TEXT ·AndUint32(SB),$0-12
+ MOVD ptr+0(FP), R0
+ MOVW val+8(FP), R1
+again:
+ LDAXRW (R0), R2
+ ANDW R1, R2
+ STLXRW R2, (R0), R3
+ CBNZ R3, again
+ RET
+
+TEXT ·OrUint32(SB),$0-12
+ MOVD ptr+0(FP), R0
+ MOVW val+8(FP), R1
+again:
+ LDAXRW (R0), R2
+ ORRW R1, R2
+ STLXRW R2, (R0), R3
+ CBNZ R3, again
+ RET
+
+TEXT ·XorUint32(SB),$0-12
+ MOVD ptr+0(FP), R0
+ MOVW val+8(FP), R1
+again:
+ LDAXRW (R0), R2
+ EORW R1, R2
+ STLXRW R2, (R0), R3
+ CBNZ R3, again
+ RET
+
+TEXT ·CompareAndSwapUint32(SB),$0-20
+ MOVD addr+0(FP), R0
+ MOVW old+8(FP), R1
+ MOVW new+12(FP), R2
+
+again:
+ LDAXRW (R0), R3
+ CMPW R1, R3
+ BNE done
+ STLXRW R2, (R0), R4
+ CBNZ R4, again
+done:
+ MOVW R3, prev+16(FP)
+ RET
+
+TEXT ·AndUint64(SB),$0-16
+ MOVD ptr+0(FP), R0
+ MOVD val+8(FP), R1
+again:
+ LDAXR (R0), R2
+ AND R1, R2
+ STLXR R2, (R0), R3
+ CBNZ R3, again
+ RET
+
+TEXT ·OrUint64(SB),$0-16
+ MOVD ptr+0(FP), R0
+ MOVD val+8(FP), R1
+again:
+ LDAXR (R0), R2
+ ORR R1, R2
+ STLXR R2, (R0), R3
+ CBNZ R3, again
+ RET
+
+TEXT ·XorUint64(SB),$0-16
+ MOVD ptr+0(FP), R0
+ MOVD val+8(FP), R1
+again:
+ LDAXR (R0), R2
+ EOR R1, R2
+ STLXR R2, (R0), R3
+ CBNZ R3, again
+ RET
+
+TEXT ·CompareAndSwapUint64(SB),$0-32
+ MOVD addr+0(FP), R0
+ MOVD old+8(FP), R1
+ MOVD new+16(FP), R2
+
+again:
+ LDAXR (R0), R3
+ CMP R1, R3
+ BNE done
+ STLXR R2, (R0), R4
+ CBNZ R4, again
+done:
+ MOVD R3, prev+24(FP)
+ RET
+
+TEXT ·IncUnlessZeroInt32(SB),NOSPLIT,$0-9
+ MOVD addr+0(FP), R0
+
+again:
+ LDAXRW (R0), R1
+ CBZ R1, fail
+ ADDW $1, R1
+ STLXRW R1, (R0), R2
+ CBNZ R2, again
+ MOVW $1, R2
+ MOVB R2, ret+8(FP)
+ RET
+fail:
+ MOVB ZR, ret+8(FP)
+ RET
+
+TEXT ·DecUnlessOneInt32(SB),NOSPLIT,$0-9
+ MOVD addr+0(FP), R0
+
+again:
+ LDAXRW (R0), R1
+ SUBSW $1, R1, R1
+ BEQ fail
+ STLXRW R1, (R0), R2
+ CBNZ R2, again
+ MOVW $1, R2
+ MOVB R2, ret+8(FP)
+ RET
+fail:
+ MOVB ZR, ret+8(FP)
+ RET