summaryrefslogtreecommitdiffhomepage
path: root/pkg/atomicbitops/aligned_32bit_unsafe.go
blob: df706b45367185fdceff62e916a8fc093cf11454 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
// Copyright 2021 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

// +build arm mips 386

package atomicbitops

import (
	"sync/atomic"
	"unsafe"
)

// AlignedAtomicInt64 is an atomic int64 that is guaranteed to be 64-bit
// aligned, even on 32-bit systems.
//
// Per https://golang.org/pkg/sync/atomic/#pkg-note-BUG:
//
// "On ARM, 386, and 32-bit MIPS, it is the caller's responsibility to arrange
// for 64-bit alignment of 64-bit words accessed atomically. The first word in
// a variable or in an allocated struct, array, or slice can be relied upon to
// be 64-bit aligned."
//
// +stateify savable
type AlignedAtomicInt64 struct {
	value [15]byte
}

func (aa *AlignedAtomicInt64) ptr() *int64 {
	// In the 15-byte aa.value, there are guaranteed to be 8 contiguous
	// bytes with 64-bit alignment. We find an address in this range by
	// adding 7, then clear the 3 least significant bits to get its start.
	return (*int64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7))
}

// Load is analagous to atomic.LoadInt64.
func (aa *AlignedAtomicInt64) Load() int64 {
	return atomic.LoadInt64(aa.ptr())
}

// Store is analagous to atomic.StoreInt64.
func (aa *AlignedAtomicInt64) Store(v int64) {
	atomic.StoreInt64(aa.ptr(), v)
}

// Add is analagous to atomic.AddInt64.
func (aa *AlignedAtomicInt64) Add(v int64) int64 {
	return atomic.AddInt64(aa.ptr(), v)
}

// AlignedAtomicUint64 is an atomic uint64 that is guaranteed to be 64-bit
// aligned, even on 32-bit systems.
//
// Per https://golang.org/pkg/sync/atomic/#pkg-note-BUG:
//
// "On ARM, 386, and 32-bit MIPS, it is the caller's responsibility to arrange
// for 64-bit alignment of 64-bit words accessed atomically. The first word in
// a variable or in an allocated struct, array, or slice can be relied upon to
// be 64-bit aligned."
//
// +stateify savable
type AlignedAtomicUint64 struct {
	value [15]byte
}

func (aa *AlignedAtomicUint64) ptr() *uint64 {
	// In the 15-byte aa.value, there are guaranteed to be 8 contiguous
	// bytes with 64-bit alignment. We find an address in this range by
	// adding 7, then clear the 3 least significant bits to get its start.
	return (*uint64)(unsafe.Pointer((uintptr(unsafe.Pointer(&aa.value[0])) + 7) &^ 7))
}

// Load is analagous to atomic.LoadUint64.
func (aa *AlignedAtomicUint64) Load() uint64 {
	return atomic.LoadUint64(aa.ptr())
}

// Store is analagous to atomic.StoreUint64.
func (aa *AlignedAtomicUint64) Store(v uint64) {
	atomic.StoreUint64(aa.ptr(), v)
}

// Add is analagous to atomic.AddUint64.
func (aa *AlignedAtomicUint64) Add(v uint64) uint64 {
	return atomic.AddUint64(aa.ptr(), v)
}