summaryrefslogtreecommitdiffhomepage
path: root/pkg/abi/linux
diff options
context:
space:
mode:
authorgVisor bot <gvisor-bot@google.com>2020-02-25 21:40:09 +0000
committergVisor bot <gvisor-bot@google.com>2020-02-25 21:40:09 +0000
commited9085d4e5d4944ab662a58ee3a33d3b5210bd3e (patch)
treeae6b893d1b673b9020cf1cf3d66c5071e492d7fb /pkg/abi/linux
parenta59ee560978159013ade1bfb878d39cf9c551476 (diff)
parent471b15b212831af31c2fe36cd42cea7ec7b7785b (diff)
Merge release-20200219.0-37-g471b15b (automated)
Diffstat (limited to 'pkg/abi/linux')
-rwxr-xr-xpkg/abi/linux/epoll_amd64.go2
-rwxr-xr-xpkg/abi/linux/epoll_arm64.go2
-rw-r--r--pkg/abi/linux/file.go2
-rw-r--r--pkg/abi/linux/fs.go2
-rwxr-xr-xpkg/abi/linux/linux_abi_autogen_unsafe.go794
-rwxr-xr-xpkg/abi/linux/linux_amd64_abi_autogen_unsafe.go116
-rwxr-xr-xpkg/abi/linux/linux_arm64_abi_autogen_unsafe.go120
-rw-r--r--pkg/abi/linux/signal.go2
-rw-r--r--pkg/abi/linux/time.go6
-rwxr-xr-xpkg/abi/linux/xattr.go1
10 files changed, 1041 insertions, 6 deletions
diff --git a/pkg/abi/linux/epoll_amd64.go b/pkg/abi/linux/epoll_amd64.go
index 57041491c..34ff18009 100755
--- a/pkg/abi/linux/epoll_amd64.go
+++ b/pkg/abi/linux/epoll_amd64.go
@@ -15,6 +15,8 @@
package linux
// EpollEvent is equivalent to struct epoll_event from epoll(2).
+//
+// +marshal
type EpollEvent struct {
Events uint32
// Linux makes struct epoll_event::data a __u64. We represent it as
diff --git a/pkg/abi/linux/epoll_arm64.go b/pkg/abi/linux/epoll_arm64.go
index 62ef5821e..f86c35329 100755
--- a/pkg/abi/linux/epoll_arm64.go
+++ b/pkg/abi/linux/epoll_arm64.go
@@ -15,6 +15,8 @@
package linux
// EpollEvent is equivalent to struct epoll_event from epoll(2).
+//
+// +marshal
type EpollEvent struct {
Events uint32
// Linux makes struct epoll_event a __u64, necessitating 4 bytes of padding
diff --git a/pkg/abi/linux/file.go b/pkg/abi/linux/file.go
index c3ab15a4f..e229ac21c 100644
--- a/pkg/abi/linux/file.go
+++ b/pkg/abi/linux/file.go
@@ -241,6 +241,8 @@ const (
)
// Statx represents struct statx.
+//
+// +marshal
type Statx struct {
Mask uint32
Blksize uint32
diff --git a/pkg/abi/linux/fs.go b/pkg/abi/linux/fs.go
index 2c652baa2..158d2db5b 100644
--- a/pkg/abi/linux/fs.go
+++ b/pkg/abi/linux/fs.go
@@ -38,6 +38,8 @@ const (
)
// Statfs is struct statfs, from uapi/asm-generic/statfs.h.
+//
+// +marshal
type Statfs struct {
// Type is one of the filesystem magic values, defined above.
Type uint64
diff --git a/pkg/abi/linux/linux_abi_autogen_unsafe.go b/pkg/abi/linux/linux_abi_autogen_unsafe.go
index 59290f5a0..69bfe135f 100755
--- a/pkg/abi/linux/linux_abi_autogen_unsafe.go
+++ b/pkg/abi/linux/linux_abi_autogen_unsafe.go
@@ -14,7 +14,382 @@ import (
// Marshallable types used by this file.
var _ marshal.Marshallable = (*RSeqCriticalSection)(nil)
+var _ marshal.Marshallable = (*SignalSet)(nil)
+var _ marshal.Marshallable = (*Statfs)(nil)
+var _ marshal.Marshallable = (*Statx)(nil)
+var _ marshal.Marshallable = (*StatxTimestamp)(nil)
var _ marshal.Marshallable = (*Timespec)(nil)
+var _ marshal.Marshallable = (*Timeval)(nil)
+var _ marshal.Marshallable = (*Utime)(nil)
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (s *Statx) SizeBytes() int {
+ return 80 +
+ (*StatxTimestamp)(nil).SizeBytes() +
+ (*StatxTimestamp)(nil).SizeBytes() +
+ (*StatxTimestamp)(nil).SizeBytes() +
+ (*StatxTimestamp)(nil).SizeBytes()
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (s *Statx) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Mask))
+ dst = dst[4:]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Blksize))
+ dst = dst[4:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Attributes))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Nlink))
+ dst = dst[4:]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.UID))
+ dst = dst[4:]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.GID))
+ dst = dst[4:]
+ usermem.ByteOrder.PutUint16(dst[:2], uint16(s.Mode))
+ dst = dst[2:]
+ // Padding: dst[:sizeof(uint16)] ~= uint16(0)
+ dst = dst[2:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Ino))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Size))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Blocks))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.AttributesMask))
+ dst = dst[8:]
+ s.Atime.MarshalBytes(dst[:s.Atime.SizeBytes()])
+ dst = dst[s.Atime.SizeBytes():]
+ s.Btime.MarshalBytes(dst[:s.Btime.SizeBytes()])
+ dst = dst[s.Btime.SizeBytes():]
+ s.Ctime.MarshalBytes(dst[:s.Ctime.SizeBytes()])
+ dst = dst[s.Ctime.SizeBytes():]
+ s.Mtime.MarshalBytes(dst[:s.Mtime.SizeBytes()])
+ dst = dst[s.Mtime.SizeBytes():]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.RdevMajor))
+ dst = dst[4:]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.RdevMinor))
+ dst = dst[4:]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.DevMajor))
+ dst = dst[4:]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.DevMinor))
+ dst = dst[4:]
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (s *Statx) UnmarshalBytes(src []byte) {
+ s.Mask = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ s.Blksize = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ s.Attributes = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Nlink = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ s.UID = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ s.GID = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ s.Mode = uint16(usermem.ByteOrder.Uint16(src[:2]))
+ src = src[2:]
+ // Padding: var _ uint16 ~= src[:sizeof(uint16)]
+ src = src[2:]
+ s.Ino = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Size = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Blocks = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.AttributesMask = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Atime.UnmarshalBytes(src[:s.Atime.SizeBytes()])
+ src = src[s.Atime.SizeBytes():]
+ s.Btime.UnmarshalBytes(src[:s.Btime.SizeBytes()])
+ src = src[s.Btime.SizeBytes():]
+ s.Ctime.UnmarshalBytes(src[:s.Ctime.SizeBytes()])
+ src = src[s.Ctime.SizeBytes():]
+ s.Mtime.UnmarshalBytes(src[:s.Mtime.SizeBytes()])
+ src = src[s.Mtime.SizeBytes():]
+ s.RdevMajor = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ s.RdevMinor = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ s.DevMajor = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ s.DevMinor = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+}
+
+// Packed implements marshal.Marshallable.Packed.
+func (s *Statx) Packed() bool {
+ return s.Ctime.Packed() && s.Mtime.Packed() && s.Atime.Packed() && s.Btime.Packed()
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (s *Statx) MarshalUnsafe(dst []byte) {
+ if s.Ctime.Packed() && s.Mtime.Packed() && s.Atime.Packed() && s.Btime.Packed() {
+ safecopy.CopyIn(dst, unsafe.Pointer(s))
+ } else {
+ s.MarshalBytes(dst)
+ }
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (s *Statx) UnmarshalUnsafe(src []byte) {
+ if s.Mtime.Packed() && s.Atime.Packed() && s.Btime.Packed() && s.Ctime.Packed() {
+ safecopy.CopyOut(unsafe.Pointer(s), src)
+ } else {
+ s.UnmarshalBytes(src)
+ }
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+func (s *Statx) CopyOut(task marshal.Task, addr usermem.Addr) error {
+ if !s.Mtime.Packed() && s.Atime.Packed() && s.Btime.Packed() && s.Ctime.Packed() {
+ // Type Statx doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := task.CopyScratchBuffer(s.SizeBytes())
+ s.MarshalBytes(buf)
+ _, err := task.CopyOutBytes(addr, buf)
+ return err
+ }
+
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ _, err := task.CopyOutBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the CopyOutBytes.
+ runtime.KeepAlive(s)
+ return err
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+func (s *Statx) CopyIn(task marshal.Task, addr usermem.Addr) error {
+ if !s.Atime.Packed() && s.Btime.Packed() && s.Ctime.Packed() && s.Mtime.Packed() {
+ // Type Statx doesn't have a packed layout in memory, fall back to UnmarshalBytes.
+ buf := task.CopyScratchBuffer(s.SizeBytes())
+ _, err := task.CopyInBytes(addr, buf)
+ if err != nil {
+ return err
+ }
+ s.UnmarshalBytes(buf)
+ return nil
+ }
+
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ _, err := task.CopyInBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the CopyInBytes.
+ runtime.KeepAlive(s)
+ return err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (s *Statx) WriteTo(w io.Writer) (int64, error) {
+ if !s.Ctime.Packed() && s.Mtime.Packed() && s.Atime.Packed() && s.Btime.Packed() {
+ // Type Statx doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := make([]byte, s.SizeBytes())
+ s.MarshalBytes(buf)
+ n, err := w.Write(buf)
+ return int64(n), err
+ }
+
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ len, err := w.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the Write.
+ runtime.KeepAlive(s)
+ return int64(len), err
+}
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (s *Statfs) SizeBytes() int {
+ return 120
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (s *Statfs) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Type))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.BlockSize))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Blocks))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.BlocksFree))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.BlocksAvailable))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Files))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.FilesFree))
+ dst = dst[8:]
+ for idx := 0; idx < 2; idx++ {
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.FSID[idx]))
+ dst = dst[4:]
+ }
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.NameLength))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.FragmentSize))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Flags))
+ dst = dst[8:]
+ for idx := 0; idx < 4; idx++ {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Spare[idx]))
+ dst = dst[8:]
+ }
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (s *Statfs) UnmarshalBytes(src []byte) {
+ s.Type = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.BlockSize = int64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Blocks = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.BlocksFree = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.BlocksAvailable = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Files = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.FilesFree = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ for idx := 0; idx < 2; idx++ {
+ s.FSID[idx] = int32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ }
+ s.NameLength = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.FragmentSize = int64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Flags = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ for idx := 0; idx < 4; idx++ {
+ s.Spare[idx] = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ }
+}
+
+// Packed implements marshal.Marshallable.Packed.
+func (s *Statfs) Packed() bool {
+ return true
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (s *Statfs) MarshalUnsafe(dst []byte) {
+ safecopy.CopyIn(dst, unsafe.Pointer(s))
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (s *Statfs) UnmarshalUnsafe(src []byte) {
+ safecopy.CopyOut(unsafe.Pointer(s), src)
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+func (s *Statfs) CopyOut(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ _, err := task.CopyOutBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the CopyOutBytes.
+ runtime.KeepAlive(s)
+ return err
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+func (s *Statfs) CopyIn(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ _, err := task.CopyInBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the CopyInBytes.
+ runtime.KeepAlive(s)
+ return err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (s *Statfs) WriteTo(w io.Writer) (int64, error) {
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ len, err := w.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the Write.
+ runtime.KeepAlive(s)
+ return int64(len), err
+}
// SizeBytes implements marshal.Marshallable.SizeBytes.
func (r *RSeqCriticalSection) SizeBytes() int {
@@ -134,6 +509,106 @@ func (r *RSeqCriticalSection) WriteTo(w io.Writer) (int64, error) {
}
// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (s *SignalSet) SizeBytes() int {
+ return 8
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (s *SignalSet) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(*s))
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (s *SignalSet) UnmarshalBytes(src []byte) {
+ *s = SignalSet(uint64(usermem.ByteOrder.Uint64(src[:8])))
+}
+
+// Packed implements marshal.Marshallable.Packed.
+func (s *SignalSet) Packed() bool {
+ // Scalar newtypes are always packed.
+ return true
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (s *SignalSet) MarshalUnsafe(dst []byte) {
+ safecopy.CopyIn(dst, unsafe.Pointer(s))
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (s *SignalSet) UnmarshalUnsafe(src []byte) {
+ safecopy.CopyOut(unsafe.Pointer(s), src)
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+func (s *SignalSet) CopyOut(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ _, err := task.CopyOutBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the CopyOutBytes.
+ runtime.KeepAlive(s)
+ return err
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+func (s *SignalSet) CopyIn(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ _, err := task.CopyInBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the CopyInBytes.
+ runtime.KeepAlive(s)
+ return err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (s *SignalSet) WriteTo(w io.Writer) (int64, error) {
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ len, err := w.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the Write.
+ runtime.KeepAlive(s)
+ return int64(len), err
+}
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
func (t *Timespec) SizeBytes() int {
return 16
}
@@ -238,3 +713,322 @@ func (t *Timespec) WriteTo(w io.Writer) (int64, error) {
return int64(len), err
}
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (t *Timeval) SizeBytes() int {
+ return 16
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (t *Timeval) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(t.Sec))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(t.Usec))
+ dst = dst[8:]
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (t *Timeval) UnmarshalBytes(src []byte) {
+ t.Sec = int64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ t.Usec = int64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+}
+
+// Packed implements marshal.Marshallable.Packed.
+func (t *Timeval) Packed() bool {
+ return true
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (t *Timeval) MarshalUnsafe(dst []byte) {
+ safecopy.CopyIn(dst, unsafe.Pointer(t))
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (t *Timeval) UnmarshalUnsafe(src []byte) {
+ safecopy.CopyOut(unsafe.Pointer(t), src)
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+func (t *Timeval) CopyOut(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on t. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on t.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(t)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by t's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = t.SizeBytes()
+ hdr.Cap = t.SizeBytes()
+
+ _, err := task.CopyOutBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that t
+ // must live until after the CopyOutBytes.
+ runtime.KeepAlive(t)
+ return err
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+func (t *Timeval) CopyIn(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on t. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on t.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(t)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by t's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = t.SizeBytes()
+ hdr.Cap = t.SizeBytes()
+
+ _, err := task.CopyInBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that t
+ // must live until after the CopyInBytes.
+ runtime.KeepAlive(t)
+ return err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (t *Timeval) WriteTo(w io.Writer) (int64, error) {
+ // Bypass escape analysis on t. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on t.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(t)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by t's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = t.SizeBytes()
+ hdr.Cap = t.SizeBytes()
+
+ len, err := w.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that t
+ // must live until after the Write.
+ runtime.KeepAlive(t)
+ return int64(len), err
+}
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (s *StatxTimestamp) SizeBytes() int {
+ return 16
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (s *StatxTimestamp) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Sec))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Nsec))
+ dst = dst[4:]
+ // Padding: dst[:sizeof(int32)] ~= int32(0)
+ dst = dst[4:]
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (s *StatxTimestamp) UnmarshalBytes(src []byte) {
+ s.Sec = int64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Nsec = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ // Padding: var _ int32 ~= src[:sizeof(int32)]
+ src = src[4:]
+}
+
+// Packed implements marshal.Marshallable.Packed.
+func (s *StatxTimestamp) Packed() bool {
+ return true
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (s *StatxTimestamp) MarshalUnsafe(dst []byte) {
+ safecopy.CopyIn(dst, unsafe.Pointer(s))
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (s *StatxTimestamp) UnmarshalUnsafe(src []byte) {
+ safecopy.CopyOut(unsafe.Pointer(s), src)
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+func (s *StatxTimestamp) CopyOut(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ _, err := task.CopyOutBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the CopyOutBytes.
+ runtime.KeepAlive(s)
+ return err
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+func (s *StatxTimestamp) CopyIn(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ _, err := task.CopyInBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the CopyInBytes.
+ runtime.KeepAlive(s)
+ return err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (s *StatxTimestamp) WriteTo(w io.Writer) (int64, error) {
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ len, err := w.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the Write.
+ runtime.KeepAlive(s)
+ return int64(len), err
+}
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (u *Utime) SizeBytes() int {
+ return 16
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (u *Utime) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(u.Actime))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(u.Modtime))
+ dst = dst[8:]
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (u *Utime) UnmarshalBytes(src []byte) {
+ u.Actime = int64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ u.Modtime = int64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+}
+
+// Packed implements marshal.Marshallable.Packed.
+func (u *Utime) Packed() bool {
+ return true
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (u *Utime) MarshalUnsafe(dst []byte) {
+ safecopy.CopyIn(dst, unsafe.Pointer(u))
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (u *Utime) UnmarshalUnsafe(src []byte) {
+ safecopy.CopyOut(unsafe.Pointer(u), src)
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+func (u *Utime) CopyOut(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on u. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on u.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(u)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by u's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = u.SizeBytes()
+ hdr.Cap = u.SizeBytes()
+
+ _, err := task.CopyOutBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that u
+ // must live until after the CopyOutBytes.
+ runtime.KeepAlive(u)
+ return err
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+func (u *Utime) CopyIn(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on u. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on u.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(u)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by u's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = u.SizeBytes()
+ hdr.Cap = u.SizeBytes()
+
+ _, err := task.CopyInBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that u
+ // must live until after the CopyInBytes.
+ runtime.KeepAlive(u)
+ return err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (u *Utime) WriteTo(w io.Writer) (int64, error) {
+ // Bypass escape analysis on u. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on u.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(u)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by u's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = u.SizeBytes()
+ hdr.Cap = u.SizeBytes()
+
+ len, err := w.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that u
+ // must live until after the Write.
+ runtime.KeepAlive(u)
+ return int64(len), err
+}
+
diff --git a/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go b/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go
index 6abfea58c..bbfe76aa0 100755
--- a/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go
+++ b/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go
@@ -15,10 +15,120 @@ import (
)
// Marshallable types used by this file.
+var _ marshal.Marshallable = (*EpollEvent)(nil)
var _ marshal.Marshallable = (*Stat)(nil)
var _ marshal.Marshallable = (*Timespec)(nil)
// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (e *EpollEvent) SizeBytes() int {
+ return 12
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (e *EpollEvent) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(e.Events))
+ dst = dst[4:]
+ for idx := 0; idx < 2; idx++ {
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(e.Data[idx]))
+ dst = dst[4:]
+ }
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (e *EpollEvent) UnmarshalBytes(src []byte) {
+ e.Events = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ for idx := 0; idx < 2; idx++ {
+ e.Data[idx] = int32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ }
+}
+
+// Packed implements marshal.Marshallable.Packed.
+func (e *EpollEvent) Packed() bool {
+ return true
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (e *EpollEvent) MarshalUnsafe(dst []byte) {
+ safecopy.CopyIn(dst, unsafe.Pointer(e))
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (e *EpollEvent) UnmarshalUnsafe(src []byte) {
+ safecopy.CopyOut(unsafe.Pointer(e), src)
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+func (e *EpollEvent) CopyOut(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on e. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on e.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(e)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by e's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = e.SizeBytes()
+ hdr.Cap = e.SizeBytes()
+
+ _, err := task.CopyOutBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that e
+ // must live until after the CopyOutBytes.
+ runtime.KeepAlive(e)
+ return err
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+func (e *EpollEvent) CopyIn(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on e. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on e.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(e)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by e's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = e.SizeBytes()
+ hdr.Cap = e.SizeBytes()
+
+ _, err := task.CopyInBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that e
+ // must live until after the CopyInBytes.
+ runtime.KeepAlive(e)
+ return err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (e *EpollEvent) WriteTo(w io.Writer) (int64, error) {
+ // Bypass escape analysis on e. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on e.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(e)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by e's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = e.SizeBytes()
+ hdr.Cap = e.SizeBytes()
+
+ len, err := w.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that e
+ // must live until after the Write.
+ runtime.KeepAlive(e)
+ return int64(len), err
+}
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
func (s *Stat) SizeBytes() int {
return 96 +
(*Timespec)(nil).SizeBytes() +
@@ -101,7 +211,7 @@ func (s *Stat) Packed() bool {
// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
func (s *Stat) MarshalUnsafe(dst []byte) {
- if s.MTime.Packed() && s.CTime.Packed() && s.ATime.Packed() {
+ if s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() {
safecopy.CopyIn(dst, unsafe.Pointer(s))
} else {
s.MarshalBytes(dst)
@@ -119,7 +229,7 @@ func (s *Stat) UnmarshalUnsafe(src []byte) {
// CopyOut implements marshal.Marshallable.CopyOut.
func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) error {
- if !s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() {
+ if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() {
// Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes.
buf := task.CopyScratchBuffer(s.SizeBytes())
s.MarshalBytes(buf)
@@ -150,7 +260,7 @@ func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) error {
// CopyIn implements marshal.Marshallable.CopyIn.
func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) error {
- if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() {
+ if !s.MTime.Packed() && s.CTime.Packed() && s.ATime.Packed() {
// Type Stat doesn't have a packed layout in memory, fall back to UnmarshalBytes.
buf := task.CopyScratchBuffer(s.SizeBytes())
_, err := task.CopyInBytes(addr, buf)
diff --git a/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go b/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go
index d57e8e48d..d80716abb 100755
--- a/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go
+++ b/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go
@@ -15,10 +15,124 @@ import (
)
// Marshallable types used by this file.
+var _ marshal.Marshallable = (*EpollEvent)(nil)
var _ marshal.Marshallable = (*Stat)(nil)
var _ marshal.Marshallable = (*Timespec)(nil)
// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (e *EpollEvent) SizeBytes() int {
+ return 16
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (e *EpollEvent) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(e.Events))
+ dst = dst[4:]
+ // Padding: dst[:sizeof(int32)] ~= int32(0)
+ dst = dst[4:]
+ for idx := 0; idx < 2; idx++ {
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(e.Data[idx]))
+ dst = dst[4:]
+ }
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (e *EpollEvent) UnmarshalBytes(src []byte) {
+ e.Events = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ // Padding: var _ int32 ~= src[:sizeof(int32)]
+ src = src[4:]
+ for idx := 0; idx < 2; idx++ {
+ e.Data[idx] = int32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ }
+}
+
+// Packed implements marshal.Marshallable.Packed.
+func (e *EpollEvent) Packed() bool {
+ return true
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (e *EpollEvent) MarshalUnsafe(dst []byte) {
+ safecopy.CopyIn(dst, unsafe.Pointer(e))
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (e *EpollEvent) UnmarshalUnsafe(src []byte) {
+ safecopy.CopyOut(unsafe.Pointer(e), src)
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+func (e *EpollEvent) CopyOut(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on e. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on e.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(e)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by e's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = e.SizeBytes()
+ hdr.Cap = e.SizeBytes()
+
+ _, err := task.CopyOutBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that e
+ // must live until after the CopyOutBytes.
+ runtime.KeepAlive(e)
+ return err
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+func (e *EpollEvent) CopyIn(task marshal.Task, addr usermem.Addr) error {
+ // Bypass escape analysis on e. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on e.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(e)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by e's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = e.SizeBytes()
+ hdr.Cap = e.SizeBytes()
+
+ _, err := task.CopyInBytes(addr, buf)
+ // Since we bypassed the compiler's escape analysis, indicate that e
+ // must live until after the CopyInBytes.
+ runtime.KeepAlive(e)
+ return err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (e *EpollEvent) WriteTo(w io.Writer) (int64, error) {
+ // Bypass escape analysis on e. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on e.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(e)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by e's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = e.SizeBytes()
+ hdr.Cap = e.SizeBytes()
+
+ len, err := w.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that e
+ // must live until after the Write.
+ runtime.KeepAlive(e)
+ return int64(len), err
+}
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
func (s *Stat) SizeBytes() int {
return 80 +
(*Timespec)(nil).SizeBytes() +
@@ -105,7 +219,7 @@ func (s *Stat) Packed() bool {
// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
func (s *Stat) MarshalUnsafe(dst []byte) {
- if s.MTime.Packed() && s.CTime.Packed() && s.ATime.Packed() {
+ if s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() {
safecopy.CopyIn(dst, unsafe.Pointer(s))
} else {
s.MarshalBytes(dst)
@@ -114,7 +228,7 @@ func (s *Stat) MarshalUnsafe(dst []byte) {
// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
func (s *Stat) UnmarshalUnsafe(src []byte) {
- if s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() {
+ if s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() {
safecopy.CopyOut(unsafe.Pointer(s), src)
} else {
s.UnmarshalBytes(src)
@@ -154,7 +268,7 @@ func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) error {
// CopyIn implements marshal.Marshallable.CopyIn.
func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) error {
- if !s.MTime.Packed() && s.CTime.Packed() && s.ATime.Packed() {
+ if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() {
// Type Stat doesn't have a packed layout in memory, fall back to UnmarshalBytes.
buf := task.CopyScratchBuffer(s.SizeBytes())
_, err := task.CopyInBytes(addr, buf)
diff --git a/pkg/abi/linux/signal.go b/pkg/abi/linux/signal.go
index c69b04ea9..1c330e763 100644
--- a/pkg/abi/linux/signal.go
+++ b/pkg/abi/linux/signal.go
@@ -115,6 +115,8 @@ const (
)
// SignalSet is a signal mask with a bit corresponding to each signal.
+//
+// +marshal
type SignalSet uint64
// SignalSetSize is the size in bytes of a SignalSet.
diff --git a/pkg/abi/linux/time.go b/pkg/abi/linux/time.go
index e562b46d9..e6860ed49 100644
--- a/pkg/abi/linux/time.go
+++ b/pkg/abi/linux/time.go
@@ -157,6 +157,8 @@ func DurationToTimespec(dur time.Duration) Timespec {
const SizeOfTimeval = 16
// Timeval represents struct timeval in <time.h>.
+//
+// +marshal
type Timeval struct {
Sec int64
Usec int64
@@ -230,6 +232,8 @@ type Tms struct {
type TimerID int32
// StatxTimestamp represents struct statx_timestamp.
+//
+// +marshal
type StatxTimestamp struct {
Sec int64
Nsec uint32
@@ -258,6 +262,8 @@ func NsecToStatxTimestamp(nsec int64) (ts StatxTimestamp) {
}
// Utime represents struct utimbuf used by utimes(2).
+//
+// +marshal
type Utime struct {
Actime int64
Modtime int64
diff --git a/pkg/abi/linux/xattr.go b/pkg/abi/linux/xattr.go
index a3b6406fa..99180b208 100755
--- a/pkg/abi/linux/xattr.go
+++ b/pkg/abi/linux/xattr.go
@@ -18,6 +18,7 @@ package linux
const (
XATTR_NAME_MAX = 255
XATTR_SIZE_MAX = 65536
+ XATTR_LIST_MAX = 65536
XATTR_CREATE = 1
XATTR_REPLACE = 2