diff options
-rw-r--r-- | pkg/abi/linux/linux_abi_autogen_unsafe.go | 64 | ||||
-rw-r--r-- | pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go | 8 | ||||
-rw-r--r-- | pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go | 6 | ||||
-rw-r--r-- | pkg/sentry/kernel/seqatomic_taskgoroutineschedinfo_unsafe.go | 4 | ||||
-rw-r--r-- | pkg/sentry/platform/ring0/defs_impl_arm64.go | 6 | ||||
-rw-r--r-- | pkg/sentry/time/seqatomic_parameters_unsafe.go | 4 |
6 files changed, 46 insertions, 46 deletions
diff --git a/pkg/abi/linux/linux_abi_autogen_unsafe.go b/pkg/abi/linux/linux_abi_autogen_unsafe.go index 4fa0a9ffe..573062796 100644 --- a/pkg/abi/linux/linux_abi_autogen_unsafe.go +++ b/pkg/abi/linux/linux_abi_autogen_unsafe.go @@ -1104,7 +1104,7 @@ func (s *Statx) Packed() bool { // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. func (s *Statx) MarshalUnsafe(dst []byte) { - if s.Btime.Packed() && s.Ctime.Packed() && s.Mtime.Packed() && s.Atime.Packed() { + if s.Atime.Packed() && s.Btime.Packed() && s.Ctime.Packed() && s.Mtime.Packed() { safecopy.CopyIn(dst, unsafe.Pointer(s)) } else { // Type Statx doesn't have a packed layout in memory, fallback to MarshalBytes. @@ -1114,7 +1114,7 @@ func (s *Statx) MarshalUnsafe(dst []byte) { // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. func (s *Statx) UnmarshalUnsafe(src []byte) { - if s.Ctime.Packed() && s.Mtime.Packed() && s.Atime.Packed() && s.Btime.Packed() { + if s.Atime.Packed() && s.Btime.Packed() && s.Ctime.Packed() && s.Mtime.Packed() { safecopy.CopyOut(unsafe.Pointer(s), src) } else { // Type Statx doesn't have a packed layout in memory, fallback to UnmarshalBytes. @@ -1125,7 +1125,7 @@ func (s *Statx) UnmarshalUnsafe(src []byte) { // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit func (s *Statx) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { - if !s.Ctime.Packed() && s.Mtime.Packed() && s.Atime.Packed() && s.Btime.Packed() { + if !s.Atime.Packed() && s.Btime.Packed() && s.Ctime.Packed() && s.Mtime.Packed() { // Type Statx doesn't have a packed layout in memory, fall back to MarshalBytes. buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. s.MarshalBytes(buf) // escapes: fallback. @@ -1155,7 +1155,7 @@ func (s *Statx) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit func (s *Statx) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - if !s.Atime.Packed() && s.Btime.Packed() && s.Ctime.Packed() && s.Mtime.Packed() { + if !s.Btime.Packed() && s.Ctime.Packed() && s.Mtime.Packed() && s.Atime.Packed() { // Type Statx doesn't have a packed layout in memory, fall back to UnmarshalBytes. buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. length, err := cc.CopyInBytes(addr, buf) // escapes: okay. @@ -4416,7 +4416,7 @@ func (i *IPTEntry) UnmarshalBytes(src []byte) { // Packed implements marshal.Marshallable.Packed. //go:nosplit func (i *IPTEntry) Packed() bool { - return i.IP.Packed() && i.Counters.Packed() + return i.Counters.Packed() && i.IP.Packed() } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. @@ -4431,7 +4431,7 @@ func (i *IPTEntry) MarshalUnsafe(dst []byte) { // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. func (i *IPTEntry) UnmarshalUnsafe(src []byte) { - if i.Counters.Packed() && i.IP.Packed() { + if i.IP.Packed() && i.Counters.Packed() { safecopy.CopyOut(unsafe.Pointer(i), src) } else { // Type IPTEntry doesn't have a packed layout in memory, fallback to UnmarshalBytes. @@ -4442,7 +4442,7 @@ func (i *IPTEntry) UnmarshalUnsafe(src []byte) { // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit func (i *IPTEntry) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { - if !i.Counters.Packed() && i.IP.Packed() { + if !i.IP.Packed() && i.Counters.Packed() { // Type IPTEntry doesn't have a packed layout in memory, fall back to MarshalBytes. buf := cc.CopyScratchBuffer(i.SizeBytes()) // escapes: okay. i.MarshalBytes(buf) // escapes: fallback. @@ -4609,7 +4609,7 @@ func (i *IPTIP) Packed() bool { // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. func (i *IPTIP) MarshalUnsafe(dst []byte) { - if i.Dst.Packed() && i.SrcMask.Packed() && i.DstMask.Packed() && i.Src.Packed() { + if i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() && i.DstMask.Packed() { safecopy.CopyIn(dst, unsafe.Pointer(i)) } else { // Type IPTIP doesn't have a packed layout in memory, fallback to MarshalBytes. @@ -4630,7 +4630,7 @@ func (i *IPTIP) UnmarshalUnsafe(src []byte) { // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit func (i *IPTIP) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { - if !i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() && i.DstMask.Packed() { + if !i.DstMask.Packed() && i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() { // Type IPTIP doesn't have a packed layout in memory, fall back to MarshalBytes. buf := cc.CopyScratchBuffer(i.SizeBytes()) // escapes: okay. i.MarshalBytes(buf) // escapes: fallback. @@ -4660,7 +4660,7 @@ func (i *IPTIP) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit func (i *IPTIP) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - if !i.DstMask.Packed() && i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() { + if !i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() && i.DstMask.Packed() { // Type IPTIP doesn't have a packed layout in memory, fall back to UnmarshalBytes. buf := cc.CopyScratchBuffer(i.SizeBytes()) // escapes: okay. length, err := cc.CopyInBytes(addr, buf) // escapes: okay. @@ -4686,7 +4686,7 @@ func (i *IPTIP) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // WriteTo implements io.WriterTo.WriteTo. func (i *IPTIP) WriteTo(writer io.Writer) (int64, error) { - if !i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() && i.DstMask.Packed() { + if !i.DstMask.Packed() && i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() { // Type IPTIP doesn't have a packed layout in memory, fall back to MarshalBytes. buf := make([]byte, i.SizeBytes()) i.MarshalBytes(buf) @@ -5400,7 +5400,7 @@ func (i *IP6TEntry) Packed() bool { // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. func (i *IP6TEntry) MarshalUnsafe(dst []byte) { - if i.IPv6.Packed() && i.Counters.Packed() { + if i.Counters.Packed() && i.IPv6.Packed() { safecopy.CopyIn(dst, unsafe.Pointer(i)) } else { // Type IP6TEntry doesn't have a packed layout in memory, fallback to MarshalBytes. @@ -5477,7 +5477,7 @@ func (i *IP6TEntry) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, erro // WriteTo implements io.WriterTo.WriteTo. func (i *IP6TEntry) WriteTo(writer io.Writer) (int64, error) { - if !i.IPv6.Packed() && i.Counters.Packed() { + if !i.Counters.Packed() && i.IPv6.Packed() { // Type IP6TEntry doesn't have a packed layout in memory, fall back to MarshalBytes. buf := make([]byte, i.SizeBytes()) i.MarshalBytes(buf) @@ -5592,12 +5592,12 @@ func (i *IP6TIP) UnmarshalBytes(src []byte) { // Packed implements marshal.Marshallable.Packed. //go:nosplit func (i *IP6TIP) Packed() bool { - return i.Dst.Packed() && i.SrcMask.Packed() && i.DstMask.Packed() && i.Src.Packed() + return i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() && i.DstMask.Packed() } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. func (i *IP6TIP) MarshalUnsafe(dst []byte) { - if i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() && i.DstMask.Packed() { + if i.DstMask.Packed() && i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() { safecopy.CopyIn(dst, unsafe.Pointer(i)) } else { // Type IP6TIP doesn't have a packed layout in memory, fallback to MarshalBytes. @@ -5607,7 +5607,7 @@ func (i *IP6TIP) MarshalUnsafe(dst []byte) { // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. func (i *IP6TIP) UnmarshalUnsafe(src []byte) { - if i.DstMask.Packed() && i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() { + if i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() && i.DstMask.Packed() { safecopy.CopyOut(unsafe.Pointer(i), src) } else { // Type IP6TIP doesn't have a packed layout in memory, fallback to UnmarshalBytes. @@ -5618,7 +5618,7 @@ func (i *IP6TIP) UnmarshalUnsafe(src []byte) { // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit func (i *IP6TIP) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { - if !i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() && i.DstMask.Packed() { + if !i.SrcMask.Packed() && i.DstMask.Packed() && i.Src.Packed() && i.Dst.Packed() { // Type IP6TIP doesn't have a packed layout in memory, fall back to MarshalBytes. buf := cc.CopyScratchBuffer(i.SizeBytes()) // escapes: okay. i.MarshalBytes(buf) // escapes: fallback. @@ -5674,7 +5674,7 @@ func (i *IP6TIP) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) // WriteTo implements io.WriterTo.WriteTo. func (i *IP6TIP) WriteTo(writer io.Writer) (int64, error) { - if !i.DstMask.Packed() && i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() { + if !i.Src.Packed() && i.Dst.Packed() && i.SrcMask.Packed() && i.DstMask.Packed() { // Type IP6TIP doesn't have a packed layout in memory, fall back to MarshalBytes. buf := make([]byte, i.SizeBytes()) i.MarshalBytes(buf) @@ -6223,7 +6223,7 @@ func (r *Rusage) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit func (r *Rusage) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - if !r.UTime.Packed() && r.STime.Packed() { + if !r.STime.Packed() && r.UTime.Packed() { // Type Rusage doesn't have a packed layout in memory, fall back to UnmarshalBytes. buf := cc.CopyScratchBuffer(r.SizeBytes()) // escapes: okay. length, err := cc.CopyInBytes(addr, buf) // escapes: okay. @@ -6370,7 +6370,7 @@ func (s *SemidDS) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit func (s *SemidDS) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - if !s.SemPerm.Packed() && s.SemOTime.Packed() && s.SemCTime.Packed() { + if !s.SemCTime.Packed() && s.SemPerm.Packed() && s.SemOTime.Packed() { // Type SemidDS doesn't have a packed layout in memory, fall back to UnmarshalBytes. buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. length, err := cc.CopyInBytes(addr, buf) // escapes: okay. @@ -6661,12 +6661,12 @@ func (s *ShmidDS) UnmarshalBytes(src []byte) { // Packed implements marshal.Marshallable.Packed. //go:nosplit func (s *ShmidDS) Packed() bool { - return s.ShmCtime.Packed() && s.ShmPerm.Packed() && s.ShmAtime.Packed() && s.ShmDtime.Packed() + return s.ShmAtime.Packed() && s.ShmDtime.Packed() && s.ShmCtime.Packed() && s.ShmPerm.Packed() } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. func (s *ShmidDS) MarshalUnsafe(dst []byte) { - if s.ShmPerm.Packed() && s.ShmAtime.Packed() && s.ShmDtime.Packed() && s.ShmCtime.Packed() { + if s.ShmAtime.Packed() && s.ShmDtime.Packed() && s.ShmCtime.Packed() && s.ShmPerm.Packed() { safecopy.CopyIn(dst, unsafe.Pointer(s)) } else { // Type ShmidDS doesn't have a packed layout in memory, fallback to MarshalBytes. @@ -6676,7 +6676,7 @@ func (s *ShmidDS) MarshalUnsafe(dst []byte) { // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. func (s *ShmidDS) UnmarshalUnsafe(src []byte) { - if s.ShmDtime.Packed() && s.ShmCtime.Packed() && s.ShmPerm.Packed() && s.ShmAtime.Packed() { + if s.ShmPerm.Packed() && s.ShmAtime.Packed() && s.ShmDtime.Packed() && s.ShmCtime.Packed() { safecopy.CopyOut(unsafe.Pointer(s), src) } else { // Type ShmidDS doesn't have a packed layout in memory, fallback to UnmarshalBytes. @@ -6687,7 +6687,7 @@ func (s *ShmidDS) UnmarshalUnsafe(src []byte) { // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit func (s *ShmidDS) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { - if !s.ShmPerm.Packed() && s.ShmAtime.Packed() && s.ShmDtime.Packed() && s.ShmCtime.Packed() { + if !s.ShmCtime.Packed() && s.ShmPerm.Packed() && s.ShmAtime.Packed() && s.ShmDtime.Packed() { // Type ShmidDS doesn't have a packed layout in memory, fall back to MarshalBytes. buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. s.MarshalBytes(buf) // escapes: fallback. @@ -6717,7 +6717,7 @@ func (s *ShmidDS) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit func (s *ShmidDS) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - if !s.ShmPerm.Packed() && s.ShmAtime.Packed() && s.ShmDtime.Packed() && s.ShmCtime.Packed() { + if !s.ShmAtime.Packed() && s.ShmDtime.Packed() && s.ShmCtime.Packed() && s.ShmPerm.Packed() { // Type ShmidDS doesn't have a packed layout in memory, fall back to UnmarshalBytes. buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. length, err := cc.CopyInBytes(addr, buf) // escapes: okay. @@ -6743,7 +6743,7 @@ func (s *ShmidDS) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) // WriteTo implements io.WriterTo.WriteTo. func (s *ShmidDS) WriteTo(writer io.Writer) (int64, error) { - if !s.ShmPerm.Packed() && s.ShmAtime.Packed() && s.ShmDtime.Packed() && s.ShmCtime.Packed() { + if !s.ShmCtime.Packed() && s.ShmPerm.Packed() && s.ShmAtime.Packed() && s.ShmDtime.Packed() { // Type ShmidDS doesn't have a packed layout in memory, fall back to MarshalBytes. buf := make([]byte, s.SizeBytes()) s.MarshalBytes(buf) @@ -8774,7 +8774,7 @@ func (i *Itimerspec) Packed() bool { // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. func (i *Itimerspec) MarshalUnsafe(dst []byte) { - if i.Interval.Packed() && i.Value.Packed() { + if i.Value.Packed() && i.Interval.Packed() { safecopy.CopyIn(dst, unsafe.Pointer(i)) } else { // Type Itimerspec doesn't have a packed layout in memory, fallback to MarshalBytes. @@ -8851,7 +8851,7 @@ func (i *Itimerspec) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, err // WriteTo implements io.WriterTo.WriteTo. func (i *Itimerspec) WriteTo(writer io.Writer) (int64, error) { - if !i.Interval.Packed() && i.Value.Packed() { + if !i.Value.Packed() && i.Interval.Packed() { // Type Itimerspec doesn't have a packed layout in memory, fall back to MarshalBytes. buf := make([]byte, i.SizeBytes()) i.MarshalBytes(buf) @@ -8955,7 +8955,7 @@ func (i *ItimerVal) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, err // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit func (i *ItimerVal) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - if !i.Interval.Packed() && i.Value.Packed() { + if !i.Value.Packed() && i.Interval.Packed() { // Type ItimerVal doesn't have a packed layout in memory, fall back to UnmarshalBytes. buf := cc.CopyScratchBuffer(i.SizeBytes()) // escapes: okay. length, err := cc.CopyInBytes(addr, buf) // escapes: okay. @@ -9128,7 +9128,7 @@ func (t *Tms) UnmarshalBytes(src []byte) { // Packed implements marshal.Marshallable.Packed. //go:nosplit func (t *Tms) Packed() bool { - return t.UTime.Packed() && t.STime.Packed() && t.CUTime.Packed() && t.CSTime.Packed() + return t.CUTime.Packed() && t.CSTime.Packed() && t.UTime.Packed() && t.STime.Packed() } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. @@ -9143,7 +9143,7 @@ func (t *Tms) MarshalUnsafe(dst []byte) { // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. func (t *Tms) UnmarshalUnsafe(src []byte) { - if t.CSTime.Packed() && t.UTime.Packed() && t.STime.Packed() && t.CUTime.Packed() { + if t.UTime.Packed() && t.STime.Packed() && t.CUTime.Packed() && t.CSTime.Packed() { safecopy.CopyOut(unsafe.Pointer(t), src) } else { // Type Tms doesn't have a packed layout in memory, fallback to UnmarshalBytes. @@ -9154,7 +9154,7 @@ func (t *Tms) UnmarshalUnsafe(src []byte) { // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit func (t *Tms) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { - if !t.UTime.Packed() && t.STime.Packed() && t.CUTime.Packed() && t.CSTime.Packed() { + if !t.STime.Packed() && t.CUTime.Packed() && t.CSTime.Packed() && t.UTime.Packed() { // Type Tms doesn't have a packed layout in memory, fall back to MarshalBytes. buf := cc.CopyScratchBuffer(t.SizeBytes()) // escapes: okay. t.MarshalBytes(buf) // escapes: fallback. diff --git a/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go b/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go index 4b53f9566..2b7f34c09 100644 --- a/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go +++ b/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go @@ -288,7 +288,7 @@ func (s *Stat) UnmarshalBytes(src []byte) { // Packed implements marshal.Marshallable.Packed. //go:nosplit func (s *Stat) Packed() bool { - return s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() + return s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. @@ -303,7 +303,7 @@ func (s *Stat) MarshalUnsafe(dst []byte) { // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. func (s *Stat) UnmarshalUnsafe(src []byte) { - if s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { + if s.MTime.Packed() && s.CTime.Packed() && s.ATime.Packed() { safecopy.CopyOut(unsafe.Pointer(s), src) } else { // Type Stat doesn't have a packed layout in memory, fallback to UnmarshalBytes. @@ -344,7 +344,7 @@ func (s *Stat) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit func (s *Stat) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { + if !s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() { // Type Stat doesn't have a packed layout in memory, fall back to UnmarshalBytes. buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. length, err := cc.CopyInBytes(addr, buf) // escapes: okay. @@ -370,7 +370,7 @@ func (s *Stat) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // WriteTo implements io.WriterTo.WriteTo. func (s *Stat) WriteTo(writer io.Writer) (int64, error) { - if !s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() { + if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. buf := make([]byte, s.SizeBytes()) s.MarshalBytes(buf) diff --git a/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go b/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go index 66f38e7e6..57ae6b620 100644 --- a/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go +++ b/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go @@ -295,12 +295,12 @@ func (s *Stat) UnmarshalBytes(src []byte) { // Packed implements marshal.Marshallable.Packed. //go:nosplit func (s *Stat) Packed() bool { - return s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() + return s.MTime.Packed() && s.CTime.Packed() && s.ATime.Packed() } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. func (s *Stat) MarshalUnsafe(dst []byte) { - if s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() { + if s.MTime.Packed() && s.CTime.Packed() && s.ATime.Packed() { safecopy.CopyIn(dst, unsafe.Pointer(s)) } else { // Type Stat doesn't have a packed layout in memory, fallback to MarshalBytes. @@ -351,7 +351,7 @@ func (s *Stat) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit func (s *Stat) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - if !s.MTime.Packed() && s.CTime.Packed() && s.ATime.Packed() { + if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { // Type Stat doesn't have a packed layout in memory, fall back to UnmarshalBytes. buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. length, err := cc.CopyInBytes(addr, buf) // escapes: okay. diff --git a/pkg/sentry/kernel/seqatomic_taskgoroutineschedinfo_unsafe.go b/pkg/sentry/kernel/seqatomic_taskgoroutineschedinfo_unsafe.go index 90148bbb2..8a2418c41 100644 --- a/pkg/sentry/kernel/seqatomic_taskgoroutineschedinfo_unsafe.go +++ b/pkg/sentry/kernel/seqatomic_taskgoroutineschedinfo_unsafe.go @@ -1,12 +1,12 @@ package kernel import ( - "fmt" - "reflect" "strings" "unsafe" + "fmt" "gvisor.dev/gvisor/pkg/sync" + "reflect" ) // SeqAtomicLoad returns a copy of *ptr, ensuring that the read does not race diff --git a/pkg/sentry/platform/ring0/defs_impl_arm64.go b/pkg/sentry/platform/ring0/defs_impl_arm64.go index dbf1ef5cf..78547069f 100644 --- a/pkg/sentry/platform/ring0/defs_impl_arm64.go +++ b/pkg/sentry/platform/ring0/defs_impl_arm64.go @@ -1,13 +1,13 @@ package ring0 import ( - "fmt" "gvisor.dev/gvisor/pkg/sentry/arch" "gvisor.dev/gvisor/pkg/sentry/platform/ring0/pagetables" - "io" - "reflect" + "fmt" "gvisor.dev/gvisor/pkg/usermem" + "io" + "reflect" ) // Useful bits. diff --git a/pkg/sentry/time/seqatomic_parameters_unsafe.go b/pkg/sentry/time/seqatomic_parameters_unsafe.go index 2cb001080..14978ed91 100644 --- a/pkg/sentry/time/seqatomic_parameters_unsafe.go +++ b/pkg/sentry/time/seqatomic_parameters_unsafe.go @@ -1,12 +1,12 @@ package time import ( - "fmt" - "reflect" "strings" "unsafe" + "fmt" "gvisor.dev/gvisor/pkg/sync" + "reflect" ) // SeqAtomicLoad returns a copy of *ptr, ensuring that the read does not race |