summaryrefslogtreecommitdiffhomepage
path: root/pkg/sentry/kernel/seqatomic_taskgoroutineschedinfo_unsafe.go
diff options
context:
space:
mode:
authorgVisor bot <gvisor-bot@google.com>2019-07-12 16:19:23 +0000
committergVisor bot <gvisor-bot@google.com>2019-07-12 16:19:23 +0000
commit715afd5da8c2d5aacd7ab64535b92dac1abc3f37 (patch)
tree432c7c1ada560a23f26de6e9b8beefdda97ccdcc /pkg/sentry/kernel/seqatomic_taskgoroutineschedinfo_unsafe.go
parente3abe5ebdf5e0c98429faee7d57d126d452f7293 (diff)
parent69e0affaecda24b4d193e4592202b55b53afecc3 (diff)
Merge 69e0affa (automated)
Diffstat (limited to 'pkg/sentry/kernel/seqatomic_taskgoroutineschedinfo_unsafe.go')
-rwxr-xr-xpkg/sentry/kernel/seqatomic_taskgoroutineschedinfo_unsafe.go55
1 files changed, 55 insertions, 0 deletions
diff --git a/pkg/sentry/kernel/seqatomic_taskgoroutineschedinfo_unsafe.go b/pkg/sentry/kernel/seqatomic_taskgoroutineschedinfo_unsafe.go
new file mode 100755
index 000000000..895abb129
--- /dev/null
+++ b/pkg/sentry/kernel/seqatomic_taskgoroutineschedinfo_unsafe.go
@@ -0,0 +1,55 @@
+package kernel
+
+import (
+ "unsafe"
+
+ "fmt"
+ "gvisor.dev/gvisor/third_party/gvsync"
+ "reflect"
+ "strings"
+)
+
+// SeqAtomicLoad returns a copy of *ptr, ensuring that the read does not race
+// with any writer critical sections in sc.
+func SeqAtomicLoadTaskGoroutineSchedInfo(sc *gvsync.SeqCount, ptr *TaskGoroutineSchedInfo) TaskGoroutineSchedInfo {
+ // This function doesn't use SeqAtomicTryLoad because doing so is
+ // measurably, significantly (~20%) slower; Go is awful at inlining.
+ var val TaskGoroutineSchedInfo
+ for {
+ epoch := sc.BeginRead()
+ if gvsync.RaceEnabled {
+
+ gvsync.Memmove(unsafe.Pointer(&val), unsafe.Pointer(ptr), unsafe.Sizeof(val))
+ } else {
+
+ val = *ptr
+ }
+ if sc.ReadOk(epoch) {
+ break
+ }
+ }
+ return val
+}
+
+// SeqAtomicTryLoad returns a copy of *ptr while in a reader critical section
+// in sc initiated by a call to sc.BeginRead() that returned epoch. If the read
+// would race with a writer critical section, SeqAtomicTryLoad returns
+// (unspecified, false).
+func SeqAtomicTryLoadTaskGoroutineSchedInfo(sc *gvsync.SeqCount, epoch gvsync.SeqCountEpoch, ptr *TaskGoroutineSchedInfo) (TaskGoroutineSchedInfo, bool) {
+ var val TaskGoroutineSchedInfo
+ if gvsync.RaceEnabled {
+ gvsync.Memmove(unsafe.Pointer(&val), unsafe.Pointer(ptr), unsafe.Sizeof(val))
+ } else {
+ val = *ptr
+ }
+ return val, sc.ReadOk(epoch)
+}
+
+func initTaskGoroutineSchedInfo() {
+ var val TaskGoroutineSchedInfo
+ typ := reflect.TypeOf(val)
+ name := typ.Name()
+ if ptrs := gvsync.PointersInType(typ, name); len(ptrs) != 0 {
+ panic(fmt.Sprintf("SeqAtomicLoad<%s> is invalid since values %s of type %s contain pointers:\n%s", typ, name, typ, strings.Join(ptrs, "\n")))
+ }
+}