diff options
author | gVisor bot <gvisor-bot@google.com> | 2019-11-21 19:43:52 +0000 |
---|---|---|
committer | gVisor bot <gvisor-bot@google.com> | 2019-11-21 19:43:52 +0000 |
commit | b3f49ebd74ff30ad50b495ddd563ee5aa9cd2bca (patch) | |
tree | ad383037abfddbfeb8a11229eb1637909b25002e /pkg/sentry/time/seqatomic_parameters_unsafe.go | |
parent | 158f38053d931caf57b90bb0b496cf7d2ec4d42b (diff) | |
parent | c0f89eba6ebdec08460bd796fc62d6aef674d141 (diff) |
Merge release-20191114.0-18-gc0f89eb (automated)
Diffstat (limited to 'pkg/sentry/time/seqatomic_parameters_unsafe.go')
-rwxr-xr-x | pkg/sentry/time/seqatomic_parameters_unsafe.go | 20 |
1 files changed, 10 insertions, 10 deletions
diff --git a/pkg/sentry/time/seqatomic_parameters_unsafe.go b/pkg/sentry/time/seqatomic_parameters_unsafe.go index 89792c56d..e3c134e49 100755 --- a/pkg/sentry/time/seqatomic_parameters_unsafe.go +++ b/pkg/sentry/time/seqatomic_parameters_unsafe.go @@ -1,25 +1,25 @@ package time import ( - "fmt" - "reflect" "strings" "unsafe" - "gvisor.dev/gvisor/third_party/gvsync" + "fmt" + "gvisor.dev/gvisor/pkg/syncutil" + "reflect" ) // SeqAtomicLoad returns a copy of *ptr, ensuring that the read does not race // with any writer critical sections in sc. -func SeqAtomicLoadParameters(sc *gvsync.SeqCount, ptr *Parameters) Parameters { +func SeqAtomicLoadParameters(sc *syncutil.SeqCount, ptr *Parameters) Parameters { // This function doesn't use SeqAtomicTryLoad because doing so is // measurably, significantly (~20%) slower; Go is awful at inlining. var val Parameters for { epoch := sc.BeginRead() - if gvsync.RaceEnabled { + if syncutil.RaceEnabled { - gvsync.Memmove(unsafe.Pointer(&val), unsafe.Pointer(ptr), unsafe.Sizeof(val)) + syncutil.Memmove(unsafe.Pointer(&val), unsafe.Pointer(ptr), unsafe.Sizeof(val)) } else { val = *ptr @@ -35,10 +35,10 @@ func SeqAtomicLoadParameters(sc *gvsync.SeqCount, ptr *Parameters) Parameters { // in sc initiated by a call to sc.BeginRead() that returned epoch. If the read // would race with a writer critical section, SeqAtomicTryLoad returns // (unspecified, false). -func SeqAtomicTryLoadParameters(sc *gvsync.SeqCount, epoch gvsync.SeqCountEpoch, ptr *Parameters) (Parameters, bool) { +func SeqAtomicTryLoadParameters(sc *syncutil.SeqCount, epoch syncutil.SeqCountEpoch, ptr *Parameters) (Parameters, bool) { var val Parameters - if gvsync.RaceEnabled { - gvsync.Memmove(unsafe.Pointer(&val), unsafe.Pointer(ptr), unsafe.Sizeof(val)) + if syncutil.RaceEnabled { + syncutil.Memmove(unsafe.Pointer(&val), unsafe.Pointer(ptr), unsafe.Sizeof(val)) } else { val = *ptr } @@ -49,7 +49,7 @@ func initParameters() { var val Parameters typ := reflect.TypeOf(val) name := typ.Name() - if ptrs := gvsync.PointersInType(typ, name); len(ptrs) != 0 { + if ptrs := syncutil.PointersInType(typ, name); len(ptrs) != 0 { panic(fmt.Sprintf("SeqAtomicLoad<%s> is invalid since values %s of type %s contain pointers:\n%s", typ, name, typ, strings.Join(ptrs, "\n"))) } } |