diff options
Diffstat (limited to 'pkg/abi/linux')
-rw-r--r-- | pkg/abi/linux/BUILD | 82 | ||||
-rwxr-xr-x[-rw-r--r--] | pkg/abi/linux/epoll_amd64.go | 0 | ||||
-rwxr-xr-x[-rw-r--r--] | pkg/abi/linux/epoll_arm64.go | 0 | ||||
-rwxr-xr-x[-rw-r--r--] | pkg/abi/linux/file_amd64.go | 0 | ||||
-rwxr-xr-x[-rw-r--r--] | pkg/abi/linux/file_arm64.go | 0 | ||||
-rwxr-xr-x[-rw-r--r--] | pkg/abi/linux/ioctl_tun.go | 0 | ||||
-rwxr-xr-x | pkg/abi/linux/linux_abi_autogen_unsafe.go | 1034 | ||||
-rwxr-xr-x | pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go | 325 | ||||
-rwxr-xr-x | pkg/abi/linux/linux_amd64_state_autogen.go | 5 | ||||
-rwxr-xr-x | pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go | 333 | ||||
-rwxr-xr-x | pkg/abi/linux/linux_arm64_state_autogen.go | 5 | ||||
-rwxr-xr-x | pkg/abi/linux/linux_state_autogen.go | 68 | ||||
-rw-r--r-- | pkg/abi/linux/netfilter_test.go | 45 | ||||
-rwxr-xr-x[-rw-r--r--] | pkg/abi/linux/rseq.go | 0 | ||||
-rwxr-xr-x[-rw-r--r--] | pkg/abi/linux/signalfd.go | 0 | ||||
-rwxr-xr-x[-rw-r--r--] | pkg/abi/linux/xattr.go | 0 |
16 files changed, 1770 insertions, 127 deletions
diff --git a/pkg/abi/linux/BUILD b/pkg/abi/linux/BUILD deleted file mode 100644 index 322d1ccc4..000000000 --- a/pkg/abi/linux/BUILD +++ /dev/null @@ -1,82 +0,0 @@ -load("//tools:defs.bzl", "go_library", "go_test") - -# Package linux contains the constants and types needed to interface with a -# Linux kernel. It should be used instead of syscall or golang.org/x/sys/unix -# when the host OS may not be Linux. - -package(licenses = ["notice"]) - -go_library( - name = "linux", - srcs = [ - "aio.go", - "audit.go", - "bpf.go", - "capability.go", - "clone.go", - "dev.go", - "elf.go", - "epoll.go", - "epoll_amd64.go", - "epoll_arm64.go", - "errors.go", - "eventfd.go", - "exec.go", - "fcntl.go", - "file.go", - "file_amd64.go", - "file_arm64.go", - "fs.go", - "futex.go", - "inotify.go", - "ioctl.go", - "ioctl_tun.go", - "ip.go", - "ipc.go", - "limits.go", - "linux.go", - "mm.go", - "netdevice.go", - "netfilter.go", - "netlink.go", - "netlink_route.go", - "poll.go", - "prctl.go", - "ptrace.go", - "rseq.go", - "rusage.go", - "sched.go", - "seccomp.go", - "sem.go", - "shm.go", - "signal.go", - "signalfd.go", - "socket.go", - "splice.go", - "tcp.go", - "time.go", - "timer.go", - "tty.go", - "uio.go", - "utsname.go", - "wait.go", - "xattr.go", - ], - marshal = True, - visibility = ["//visibility:public"], - deps = [ - "//pkg/abi", - "//pkg/binary", - "//pkg/bits", - ], -) - -go_test( - name = "linux_test", - size = "small", - srcs = ["netfilter_test.go"], - library = ":linux", - deps = [ - "//pkg/binary", - ], -) diff --git a/pkg/abi/linux/epoll_amd64.go b/pkg/abi/linux/epoll_amd64.go index 34ff18009..34ff18009 100644..100755 --- a/pkg/abi/linux/epoll_amd64.go +++ b/pkg/abi/linux/epoll_amd64.go diff --git a/pkg/abi/linux/epoll_arm64.go b/pkg/abi/linux/epoll_arm64.go index f86c35329..f86c35329 100644..100755 --- a/pkg/abi/linux/epoll_arm64.go +++ b/pkg/abi/linux/epoll_arm64.go diff --git a/pkg/abi/linux/file_amd64.go b/pkg/abi/linux/file_amd64.go index 6b72364ea..6b72364ea 100644..100755 --- a/pkg/abi/linux/file_amd64.go +++ b/pkg/abi/linux/file_amd64.go diff --git a/pkg/abi/linux/file_arm64.go b/pkg/abi/linux/file_arm64.go index 6492c9038..6492c9038 100644..100755 --- a/pkg/abi/linux/file_arm64.go +++ b/pkg/abi/linux/file_arm64.go diff --git a/pkg/abi/linux/ioctl_tun.go b/pkg/abi/linux/ioctl_tun.go index c59c9c136..c59c9c136 100644..100755 --- a/pkg/abi/linux/ioctl_tun.go +++ b/pkg/abi/linux/ioctl_tun.go diff --git a/pkg/abi/linux/linux_abi_autogen_unsafe.go b/pkg/abi/linux/linux_abi_autogen_unsafe.go new file mode 100755 index 000000000..46f7c1197 --- /dev/null +++ b/pkg/abi/linux/linux_abi_autogen_unsafe.go @@ -0,0 +1,1034 @@ +// Automatically generated marshal implementation. See tools/go_marshal. + +package linux + +import ( + "gvisor.dev/gvisor/pkg/safecopy" + "gvisor.dev/gvisor/pkg/usermem" + "gvisor.dev/gvisor/tools/go_marshal/marshal" + "io" + "reflect" + "runtime" + "unsafe" +) + +// Marshallable types used by this file. +var _ marshal.Marshallable = (*RSeqCriticalSection)(nil) +var _ marshal.Marshallable = (*SignalSet)(nil) +var _ marshal.Marshallable = (*Statfs)(nil) +var _ marshal.Marshallable = (*Statx)(nil) +var _ marshal.Marshallable = (*StatxTimestamp)(nil) +var _ marshal.Marshallable = (*Timespec)(nil) +var _ marshal.Marshallable = (*Timeval)(nil) +var _ marshal.Marshallable = (*Utime)(nil) + +// SizeBytes implements marshal.Marshallable.SizeBytes. +func (s *Statx) SizeBytes() int { + return 80 + + (*StatxTimestamp)(nil).SizeBytes() + + (*StatxTimestamp)(nil).SizeBytes() + + (*StatxTimestamp)(nil).SizeBytes() + + (*StatxTimestamp)(nil).SizeBytes() +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (s *Statx) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Mask)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Blksize)) + dst = dst[4:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Attributes)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Nlink)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.UID)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.GID)) + dst = dst[4:] + usermem.ByteOrder.PutUint16(dst[:2], uint16(s.Mode)) + dst = dst[2:] + // Padding: dst[:sizeof(uint16)] ~= uint16(0) + dst = dst[2:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Ino)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Size)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Blocks)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.AttributesMask)) + dst = dst[8:] + s.Atime.MarshalBytes(dst[:s.Atime.SizeBytes()]) + dst = dst[s.Atime.SizeBytes():] + s.Btime.MarshalBytes(dst[:s.Btime.SizeBytes()]) + dst = dst[s.Btime.SizeBytes():] + s.Ctime.MarshalBytes(dst[:s.Ctime.SizeBytes()]) + dst = dst[s.Ctime.SizeBytes():] + s.Mtime.MarshalBytes(dst[:s.Mtime.SizeBytes()]) + dst = dst[s.Mtime.SizeBytes():] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.RdevMajor)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.RdevMinor)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.DevMajor)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.DevMinor)) + dst = dst[4:] +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (s *Statx) UnmarshalBytes(src []byte) { + s.Mask = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.Blksize = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.Attributes = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Nlink = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.UID = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.GID = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.Mode = uint16(usermem.ByteOrder.Uint16(src[:2])) + src = src[2:] + // Padding: var _ uint16 ~= src[:sizeof(uint16)] + src = src[2:] + s.Ino = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Size = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Blocks = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.AttributesMask = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Atime.UnmarshalBytes(src[:s.Atime.SizeBytes()]) + src = src[s.Atime.SizeBytes():] + s.Btime.UnmarshalBytes(src[:s.Btime.SizeBytes()]) + src = src[s.Btime.SizeBytes():] + s.Ctime.UnmarshalBytes(src[:s.Ctime.SizeBytes()]) + src = src[s.Ctime.SizeBytes():] + s.Mtime.UnmarshalBytes(src[:s.Mtime.SizeBytes()]) + src = src[s.Mtime.SizeBytes():] + s.RdevMajor = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.RdevMinor = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.DevMajor = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.DevMinor = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] +} + +// Packed implements marshal.Marshallable.Packed. +func (s *Statx) Packed() bool { + return s.Atime.Packed() && s.Btime.Packed() && s.Ctime.Packed() && s.Mtime.Packed() +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (s *Statx) MarshalUnsafe(dst []byte) { + if s.Ctime.Packed() && s.Mtime.Packed() && s.Atime.Packed() && s.Btime.Packed() { + safecopy.CopyIn(dst, unsafe.Pointer(s)) + } else { + s.MarshalBytes(dst) + } +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (s *Statx) UnmarshalUnsafe(src []byte) { + if s.Atime.Packed() && s.Btime.Packed() && s.Ctime.Packed() && s.Mtime.Packed() { + safecopy.CopyOut(unsafe.Pointer(s), src) + } else { + s.UnmarshalBytes(src) + } +} + +// CopyOut implements marshal.Marshallable.CopyOut. +func (s *Statx) CopyOut(task marshal.Task, addr usermem.Addr) error { + if !s.Mtime.Packed() && s.Atime.Packed() && s.Btime.Packed() && s.Ctime.Packed() { + // Type Statx doesn't have a packed layout in memory, fall back to MarshalBytes. + buf := task.CopyScratchBuffer(s.SizeBytes()) + s.MarshalBytes(buf) + _, err := task.CopyOutBytes(addr, buf) + return err + } + + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + _, err := task.CopyOutBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the CopyOutBytes. + runtime.KeepAlive(s) + return err +} + +// CopyIn implements marshal.Marshallable.CopyIn. +func (s *Statx) CopyIn(task marshal.Task, addr usermem.Addr) error { + if !s.Atime.Packed() && s.Btime.Packed() && s.Ctime.Packed() && s.Mtime.Packed() { + // Type Statx doesn't have a packed layout in memory, fall back to UnmarshalBytes. + buf := task.CopyScratchBuffer(s.SizeBytes()) + _, err := task.CopyInBytes(addr, buf) + if err != nil { + return err + } + s.UnmarshalBytes(buf) + return nil + } + + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + _, err := task.CopyInBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the CopyInBytes. + runtime.KeepAlive(s) + return err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (s *Statx) WriteTo(w io.Writer) (int64, error) { + if !s.Ctime.Packed() && s.Mtime.Packed() && s.Atime.Packed() && s.Btime.Packed() { + // Type Statx doesn't have a packed layout in memory, fall back to MarshalBytes. + buf := make([]byte, s.SizeBytes()) + s.MarshalBytes(buf) + n, err := w.Write(buf) + return int64(n), err + } + + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + len, err := w.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the Write. + runtime.KeepAlive(s) + return int64(len), err +} + +// SizeBytes implements marshal.Marshallable.SizeBytes. +func (s *Statfs) SizeBytes() int { + return 120 +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (s *Statfs) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Type)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.BlockSize)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Blocks)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.BlocksFree)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.BlocksAvailable)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Files)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.FilesFree)) + dst = dst[8:] + for idx := 0; idx < 2; idx++ { + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.FSID[idx])) + dst = dst[4:] + } + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.NameLength)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.FragmentSize)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Flags)) + dst = dst[8:] + for idx := 0; idx < 4; idx++ { + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Spare[idx])) + dst = dst[8:] + } +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (s *Statfs) UnmarshalBytes(src []byte) { + s.Type = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.BlockSize = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Blocks = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.BlocksFree = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.BlocksAvailable = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Files = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.FilesFree = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + for idx := 0; idx < 2; idx++ { + s.FSID[idx] = int32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + } + s.NameLength = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.FragmentSize = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Flags = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + for idx := 0; idx < 4; idx++ { + s.Spare[idx] = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + } +} + +// Packed implements marshal.Marshallable.Packed. +func (s *Statfs) Packed() bool { + return true +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (s *Statfs) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(s)) +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (s *Statfs) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(s), src) +} + +// CopyOut implements marshal.Marshallable.CopyOut. +func (s *Statfs) CopyOut(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + _, err := task.CopyOutBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the CopyOutBytes. + runtime.KeepAlive(s) + return err +} + +// CopyIn implements marshal.Marshallable.CopyIn. +func (s *Statfs) CopyIn(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + _, err := task.CopyInBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the CopyInBytes. + runtime.KeepAlive(s) + return err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (s *Statfs) WriteTo(w io.Writer) (int64, error) { + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + len, err := w.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the Write. + runtime.KeepAlive(s) + return int64(len), err +} + +// SizeBytes implements marshal.Marshallable.SizeBytes. +func (r *RSeqCriticalSection) SizeBytes() int { + return 32 +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (r *RSeqCriticalSection) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(r.Version)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(r.Flags)) + dst = dst[4:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(r.Start)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(r.PostCommitOffset)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(r.Abort)) + dst = dst[8:] +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (r *RSeqCriticalSection) UnmarshalBytes(src []byte) { + r.Version = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + r.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + r.Start = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + r.PostCommitOffset = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + r.Abort = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] +} + +// Packed implements marshal.Marshallable.Packed. +func (r *RSeqCriticalSection) Packed() bool { + return true +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (r *RSeqCriticalSection) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(r)) +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (r *RSeqCriticalSection) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(r), src) +} + +// CopyOut implements marshal.Marshallable.CopyOut. +func (r *RSeqCriticalSection) CopyOut(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on r. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on r. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(r) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by r's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = r.SizeBytes() + hdr.Cap = r.SizeBytes() + + _, err := task.CopyOutBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that r + // must live until after the CopyOutBytes. + runtime.KeepAlive(r) + return err +} + +// CopyIn implements marshal.Marshallable.CopyIn. +func (r *RSeqCriticalSection) CopyIn(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on r. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on r. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(r) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by r's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = r.SizeBytes() + hdr.Cap = r.SizeBytes() + + _, err := task.CopyInBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that r + // must live until after the CopyInBytes. + runtime.KeepAlive(r) + return err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (r *RSeqCriticalSection) WriteTo(w io.Writer) (int64, error) { + // Bypass escape analysis on r. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on r. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(r) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by r's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = r.SizeBytes() + hdr.Cap = r.SizeBytes() + + len, err := w.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that r + // must live until after the Write. + runtime.KeepAlive(r) + return int64(len), err +} + +// SizeBytes implements marshal.Marshallable.SizeBytes. +func (s *SignalSet) SizeBytes() int { + return 8 +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (s *SignalSet) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(*s)) +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (s *SignalSet) UnmarshalBytes(src []byte) { + *s = SignalSet(uint64(usermem.ByteOrder.Uint64(src[:8]))) +} + +// Packed implements marshal.Marshallable.Packed. +func (s *SignalSet) Packed() bool { + // Scalar newtypes are always packed. + return true +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (s *SignalSet) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(s)) +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (s *SignalSet) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(s), src) +} + +// CopyOut implements marshal.Marshallable.CopyOut. +func (s *SignalSet) CopyOut(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + _, err := task.CopyOutBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the CopyOutBytes. + runtime.KeepAlive(s) + return err +} + +// CopyIn implements marshal.Marshallable.CopyIn. +func (s *SignalSet) CopyIn(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + _, err := task.CopyInBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the CopyInBytes. + runtime.KeepAlive(s) + return err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (s *SignalSet) WriteTo(w io.Writer) (int64, error) { + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + len, err := w.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the Write. + runtime.KeepAlive(s) + return int64(len), err +} + +// SizeBytes implements marshal.Marshallable.SizeBytes. +func (t *Timespec) SizeBytes() int { + return 16 +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (t *Timespec) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(t.Sec)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(t.Nsec)) + dst = dst[8:] +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (t *Timespec) UnmarshalBytes(src []byte) { + t.Sec = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + t.Nsec = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] +} + +// Packed implements marshal.Marshallable.Packed. +func (t *Timespec) Packed() bool { + return true +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (t *Timespec) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(t)) +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (t *Timespec) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(t), src) +} + +// CopyOut implements marshal.Marshallable.CopyOut. +func (t *Timespec) CopyOut(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on t. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on t. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(t) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by t's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = t.SizeBytes() + hdr.Cap = t.SizeBytes() + + _, err := task.CopyOutBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that t + // must live until after the CopyOutBytes. + runtime.KeepAlive(t) + return err +} + +// CopyIn implements marshal.Marshallable.CopyIn. +func (t *Timespec) CopyIn(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on t. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on t. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(t) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by t's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = t.SizeBytes() + hdr.Cap = t.SizeBytes() + + _, err := task.CopyInBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that t + // must live until after the CopyInBytes. + runtime.KeepAlive(t) + return err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (t *Timespec) WriteTo(w io.Writer) (int64, error) { + // Bypass escape analysis on t. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on t. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(t) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by t's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = t.SizeBytes() + hdr.Cap = t.SizeBytes() + + len, err := w.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that t + // must live until after the Write. + runtime.KeepAlive(t) + return int64(len), err +} + +// SizeBytes implements marshal.Marshallable.SizeBytes. +func (t *Timeval) SizeBytes() int { + return 16 +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (t *Timeval) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(t.Sec)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(t.Usec)) + dst = dst[8:] +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (t *Timeval) UnmarshalBytes(src []byte) { + t.Sec = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + t.Usec = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] +} + +// Packed implements marshal.Marshallable.Packed. +func (t *Timeval) Packed() bool { + return true +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (t *Timeval) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(t)) +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (t *Timeval) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(t), src) +} + +// CopyOut implements marshal.Marshallable.CopyOut. +func (t *Timeval) CopyOut(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on t. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on t. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(t) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by t's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = t.SizeBytes() + hdr.Cap = t.SizeBytes() + + _, err := task.CopyOutBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that t + // must live until after the CopyOutBytes. + runtime.KeepAlive(t) + return err +} + +// CopyIn implements marshal.Marshallable.CopyIn. +func (t *Timeval) CopyIn(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on t. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on t. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(t) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by t's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = t.SizeBytes() + hdr.Cap = t.SizeBytes() + + _, err := task.CopyInBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that t + // must live until after the CopyInBytes. + runtime.KeepAlive(t) + return err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (t *Timeval) WriteTo(w io.Writer) (int64, error) { + // Bypass escape analysis on t. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on t. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(t) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by t's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = t.SizeBytes() + hdr.Cap = t.SizeBytes() + + len, err := w.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that t + // must live until after the Write. + runtime.KeepAlive(t) + return int64(len), err +} + +// SizeBytes implements marshal.Marshallable.SizeBytes. +func (s *StatxTimestamp) SizeBytes() int { + return 16 +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (s *StatxTimestamp) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Sec)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Nsec)) + dst = dst[4:] + // Padding: dst[:sizeof(int32)] ~= int32(0) + dst = dst[4:] +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (s *StatxTimestamp) UnmarshalBytes(src []byte) { + s.Sec = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Nsec = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + // Padding: var _ int32 ~= src[:sizeof(int32)] + src = src[4:] +} + +// Packed implements marshal.Marshallable.Packed. +func (s *StatxTimestamp) Packed() bool { + return true +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (s *StatxTimestamp) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(s)) +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (s *StatxTimestamp) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(s), src) +} + +// CopyOut implements marshal.Marshallable.CopyOut. +func (s *StatxTimestamp) CopyOut(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + _, err := task.CopyOutBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the CopyOutBytes. + runtime.KeepAlive(s) + return err +} + +// CopyIn implements marshal.Marshallable.CopyIn. +func (s *StatxTimestamp) CopyIn(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + _, err := task.CopyInBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the CopyInBytes. + runtime.KeepAlive(s) + return err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (s *StatxTimestamp) WriteTo(w io.Writer) (int64, error) { + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + len, err := w.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the Write. + runtime.KeepAlive(s) + return int64(len), err +} + +// SizeBytes implements marshal.Marshallable.SizeBytes. +func (u *Utime) SizeBytes() int { + return 16 +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (u *Utime) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(u.Actime)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(u.Modtime)) + dst = dst[8:] +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (u *Utime) UnmarshalBytes(src []byte) { + u.Actime = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + u.Modtime = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] +} + +// Packed implements marshal.Marshallable.Packed. +func (u *Utime) Packed() bool { + return true +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (u *Utime) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(u)) +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (u *Utime) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(u), src) +} + +// CopyOut implements marshal.Marshallable.CopyOut. +func (u *Utime) CopyOut(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on u. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on u. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(u) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by u's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = u.SizeBytes() + hdr.Cap = u.SizeBytes() + + _, err := task.CopyOutBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that u + // must live until after the CopyOutBytes. + runtime.KeepAlive(u) + return err +} + +// CopyIn implements marshal.Marshallable.CopyIn. +func (u *Utime) CopyIn(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on u. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on u. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(u) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by u's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = u.SizeBytes() + hdr.Cap = u.SizeBytes() + + _, err := task.CopyInBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that u + // must live until after the CopyInBytes. + runtime.KeepAlive(u) + return err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (u *Utime) WriteTo(w io.Writer) (int64, error) { + // Bypass escape analysis on u. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on u. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(u) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by u's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = u.SizeBytes() + hdr.Cap = u.SizeBytes() + + len, err := w.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that u + // must live until after the Write. + runtime.KeepAlive(u) + return int64(len), err +} + diff --git a/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go b/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go new file mode 100755 index 000000000..9b9faaa36 --- /dev/null +++ b/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go @@ -0,0 +1,325 @@ +// Automatically generated marshal implementation. See tools/go_marshal. + +// +build amd64 + +package linux + +import ( + "gvisor.dev/gvisor/pkg/safecopy" + "gvisor.dev/gvisor/pkg/usermem" + "gvisor.dev/gvisor/tools/go_marshal/marshal" + "io" + "reflect" + "runtime" + "unsafe" +) + +// Marshallable types used by this file. +var _ marshal.Marshallable = (*EpollEvent)(nil) +var _ marshal.Marshallable = (*Stat)(nil) +var _ marshal.Marshallable = (*Timespec)(nil) + +// SizeBytes implements marshal.Marshallable.SizeBytes. +func (e *EpollEvent) SizeBytes() int { + return 12 +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (e *EpollEvent) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(e.Events)) + dst = dst[4:] + for idx := 0; idx < 2; idx++ { + usermem.ByteOrder.PutUint32(dst[:4], uint32(e.Data[idx])) + dst = dst[4:] + } +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (e *EpollEvent) UnmarshalBytes(src []byte) { + e.Events = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + for idx := 0; idx < 2; idx++ { + e.Data[idx] = int32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + } +} + +// Packed implements marshal.Marshallable.Packed. +func (e *EpollEvent) Packed() bool { + return true +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (e *EpollEvent) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(e)) +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (e *EpollEvent) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(e), src) +} + +// CopyOut implements marshal.Marshallable.CopyOut. +func (e *EpollEvent) CopyOut(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on e. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on e. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(e) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by e's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = e.SizeBytes() + hdr.Cap = e.SizeBytes() + + _, err := task.CopyOutBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that e + // must live until after the CopyOutBytes. + runtime.KeepAlive(e) + return err +} + +// CopyIn implements marshal.Marshallable.CopyIn. +func (e *EpollEvent) CopyIn(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on e. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on e. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(e) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by e's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = e.SizeBytes() + hdr.Cap = e.SizeBytes() + + _, err := task.CopyInBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that e + // must live until after the CopyInBytes. + runtime.KeepAlive(e) + return err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (e *EpollEvent) WriteTo(w io.Writer) (int64, error) { + // Bypass escape analysis on e. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on e. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(e) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by e's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = e.SizeBytes() + hdr.Cap = e.SizeBytes() + + len, err := w.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that e + // must live until after the Write. + runtime.KeepAlive(e) + return int64(len), err +} + +// SizeBytes implements marshal.Marshallable.SizeBytes. +func (s *Stat) SizeBytes() int { + return 96 + + (*Timespec)(nil).SizeBytes() + + (*Timespec)(nil).SizeBytes() + + (*Timespec)(nil).SizeBytes() +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (s *Stat) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Dev)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Ino)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Nlink)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Mode)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.UID)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.GID)) + dst = dst[4:] + // Padding: dst[:sizeof(int32)] ~= int32(0) + dst = dst[4:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Rdev)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Size)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Blksize)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Blocks)) + dst = dst[8:] + s.ATime.MarshalBytes(dst[:s.ATime.SizeBytes()]) + dst = dst[s.ATime.SizeBytes():] + s.MTime.MarshalBytes(dst[:s.MTime.SizeBytes()]) + dst = dst[s.MTime.SizeBytes():] + s.CTime.MarshalBytes(dst[:s.CTime.SizeBytes()]) + dst = dst[s.CTime.SizeBytes():] + // Padding: dst[:sizeof(int64)*3] ~= [3]int64{0} + dst = dst[24:] +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (s *Stat) UnmarshalBytes(src []byte) { + s.Dev = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Ino = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Nlink = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Mode = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.UID = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.GID = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + // Padding: var _ int32 ~= src[:sizeof(int32)] + src = src[4:] + s.Rdev = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Size = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Blksize = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Blocks = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.ATime.UnmarshalBytes(src[:s.ATime.SizeBytes()]) + src = src[s.ATime.SizeBytes():] + s.MTime.UnmarshalBytes(src[:s.MTime.SizeBytes()]) + src = src[s.MTime.SizeBytes():] + s.CTime.UnmarshalBytes(src[:s.CTime.SizeBytes()]) + src = src[s.CTime.SizeBytes():] + // Padding: ~ copy([3]int64(s._), src[:sizeof(int64)*3]) + src = src[24:] +} + +// Packed implements marshal.Marshallable.Packed. +func (s *Stat) Packed() bool { + return s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (s *Stat) MarshalUnsafe(dst []byte) { + if s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { + safecopy.CopyIn(dst, unsafe.Pointer(s)) + } else { + s.MarshalBytes(dst) + } +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (s *Stat) UnmarshalUnsafe(src []byte) { + if s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { + safecopy.CopyOut(unsafe.Pointer(s), src) + } else { + s.UnmarshalBytes(src) + } +} + +// CopyOut implements marshal.Marshallable.CopyOut. +func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) error { + if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { + // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. + buf := task.CopyScratchBuffer(s.SizeBytes()) + s.MarshalBytes(buf) + _, err := task.CopyOutBytes(addr, buf) + return err + } + + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + _, err := task.CopyOutBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the CopyOutBytes. + runtime.KeepAlive(s) + return err +} + +// CopyIn implements marshal.Marshallable.CopyIn. +func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) error { + if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { + // Type Stat doesn't have a packed layout in memory, fall back to UnmarshalBytes. + buf := task.CopyScratchBuffer(s.SizeBytes()) + _, err := task.CopyInBytes(addr, buf) + if err != nil { + return err + } + s.UnmarshalBytes(buf) + return nil + } + + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + _, err := task.CopyInBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the CopyInBytes. + runtime.KeepAlive(s) + return err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (s *Stat) WriteTo(w io.Writer) (int64, error) { + if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { + // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. + buf := make([]byte, s.SizeBytes()) + s.MarshalBytes(buf) + n, err := w.Write(buf) + return int64(n), err + } + + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + len, err := w.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the Write. + runtime.KeepAlive(s) + return int64(len), err +} + diff --git a/pkg/abi/linux/linux_amd64_state_autogen.go b/pkg/abi/linux/linux_amd64_state_autogen.go new file mode 100755 index 000000000..a5f55a80b --- /dev/null +++ b/pkg/abi/linux/linux_amd64_state_autogen.go @@ -0,0 +1,5 @@ +// automatically generated by stateify. + +// +build amd64 + +package linux diff --git a/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go b/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go new file mode 100755 index 000000000..63dd339c2 --- /dev/null +++ b/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go @@ -0,0 +1,333 @@ +// Automatically generated marshal implementation. See tools/go_marshal. + +// +build arm64 + +package linux + +import ( + "gvisor.dev/gvisor/pkg/safecopy" + "gvisor.dev/gvisor/pkg/usermem" + "gvisor.dev/gvisor/tools/go_marshal/marshal" + "io" + "reflect" + "runtime" + "unsafe" +) + +// Marshallable types used by this file. +var _ marshal.Marshallable = (*EpollEvent)(nil) +var _ marshal.Marshallable = (*Stat)(nil) +var _ marshal.Marshallable = (*Timespec)(nil) + +// SizeBytes implements marshal.Marshallable.SizeBytes. +func (e *EpollEvent) SizeBytes() int { + return 16 +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (e *EpollEvent) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(e.Events)) + dst = dst[4:] + // Padding: dst[:sizeof(int32)] ~= int32(0) + dst = dst[4:] + for idx := 0; idx < 2; idx++ { + usermem.ByteOrder.PutUint32(dst[:4], uint32(e.Data[idx])) + dst = dst[4:] + } +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (e *EpollEvent) UnmarshalBytes(src []byte) { + e.Events = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + // Padding: var _ int32 ~= src[:sizeof(int32)] + src = src[4:] + for idx := 0; idx < 2; idx++ { + e.Data[idx] = int32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + } +} + +// Packed implements marshal.Marshallable.Packed. +func (e *EpollEvent) Packed() bool { + return true +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (e *EpollEvent) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(e)) +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (e *EpollEvent) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(e), src) +} + +// CopyOut implements marshal.Marshallable.CopyOut. +func (e *EpollEvent) CopyOut(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on e. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on e. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(e) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by e's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = e.SizeBytes() + hdr.Cap = e.SizeBytes() + + _, err := task.CopyOutBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that e + // must live until after the CopyOutBytes. + runtime.KeepAlive(e) + return err +} + +// CopyIn implements marshal.Marshallable.CopyIn. +func (e *EpollEvent) CopyIn(task marshal.Task, addr usermem.Addr) error { + // Bypass escape analysis on e. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on e. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(e) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by e's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = e.SizeBytes() + hdr.Cap = e.SizeBytes() + + _, err := task.CopyInBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that e + // must live until after the CopyInBytes. + runtime.KeepAlive(e) + return err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (e *EpollEvent) WriteTo(w io.Writer) (int64, error) { + // Bypass escape analysis on e. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on e. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(e) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by e's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = e.SizeBytes() + hdr.Cap = e.SizeBytes() + + len, err := w.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that e + // must live until after the Write. + runtime.KeepAlive(e) + return int64(len), err +} + +// SizeBytes implements marshal.Marshallable.SizeBytes. +func (s *Stat) SizeBytes() int { + return 80 + + (*Timespec)(nil).SizeBytes() + + (*Timespec)(nil).SizeBytes() + + (*Timespec)(nil).SizeBytes() +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (s *Stat) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Dev)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Ino)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Mode)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Nlink)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.UID)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.GID)) + dst = dst[4:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Rdev)) + dst = dst[8:] + // Padding: dst[:sizeof(uint64)] ~= uint64(0) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Size)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Blksize)) + dst = dst[4:] + // Padding: dst[:sizeof(int32)] ~= int32(0) + dst = dst[4:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Blocks)) + dst = dst[8:] + s.ATime.MarshalBytes(dst[:s.ATime.SizeBytes()]) + dst = dst[s.ATime.SizeBytes():] + s.MTime.MarshalBytes(dst[:s.MTime.SizeBytes()]) + dst = dst[s.MTime.SizeBytes():] + s.CTime.MarshalBytes(dst[:s.CTime.SizeBytes()]) + dst = dst[s.CTime.SizeBytes():] + // Padding: dst[:sizeof(int32)*2] ~= [2]int32{0} + dst = dst[8:] +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (s *Stat) UnmarshalBytes(src []byte) { + s.Dev = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Ino = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Mode = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.Nlink = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.UID = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.GID = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.Rdev = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + // Padding: var _ uint64 ~= src[:sizeof(uint64)] + src = src[8:] + s.Size = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.Blksize = int32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + // Padding: var _ int32 ~= src[:sizeof(int32)] + src = src[4:] + s.Blocks = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + s.ATime.UnmarshalBytes(src[:s.ATime.SizeBytes()]) + src = src[s.ATime.SizeBytes():] + s.MTime.UnmarshalBytes(src[:s.MTime.SizeBytes()]) + src = src[s.MTime.SizeBytes():] + s.CTime.UnmarshalBytes(src[:s.CTime.SizeBytes()]) + src = src[s.CTime.SizeBytes():] + // Padding: ~ copy([2]int32(s._), src[:sizeof(int32)*2]) + src = src[8:] +} + +// Packed implements marshal.Marshallable.Packed. +func (s *Stat) Packed() bool { + return s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (s *Stat) MarshalUnsafe(dst []byte) { + if s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { + safecopy.CopyIn(dst, unsafe.Pointer(s)) + } else { + s.MarshalBytes(dst) + } +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (s *Stat) UnmarshalUnsafe(src []byte) { + if s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { + safecopy.CopyOut(unsafe.Pointer(s), src) + } else { + s.UnmarshalBytes(src) + } +} + +// CopyOut implements marshal.Marshallable.CopyOut. +func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) error { + if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { + // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. + buf := task.CopyScratchBuffer(s.SizeBytes()) + s.MarshalBytes(buf) + _, err := task.CopyOutBytes(addr, buf) + return err + } + + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + _, err := task.CopyOutBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the CopyOutBytes. + runtime.KeepAlive(s) + return err +} + +// CopyIn implements marshal.Marshallable.CopyIn. +func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) error { + if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { + // Type Stat doesn't have a packed layout in memory, fall back to UnmarshalBytes. + buf := task.CopyScratchBuffer(s.SizeBytes()) + _, err := task.CopyInBytes(addr, buf) + if err != nil { + return err + } + s.UnmarshalBytes(buf) + return nil + } + + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + _, err := task.CopyInBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the CopyInBytes. + runtime.KeepAlive(s) + return err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (s *Stat) WriteTo(w io.Writer) (int64, error) { + if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { + // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. + buf := make([]byte, s.SizeBytes()) + s.MarshalBytes(buf) + n, err := w.Write(buf) + return int64(n), err + } + + // Bypass escape analysis on s. The no-op arithmetic operation on the + // pointer makes the compiler think val doesn't depend on s. + // See src/runtime/stubs.go:noescape() in the golang toolchain. + ptr := unsafe.Pointer(s) + val := uintptr(ptr) + val = val^0 + + // Construct a slice backed by s's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = val + hdr.Len = s.SizeBytes() + hdr.Cap = s.SizeBytes() + + len, err := w.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that s + // must live until after the Write. + runtime.KeepAlive(s) + return int64(len), err +} + diff --git a/pkg/abi/linux/linux_arm64_state_autogen.go b/pkg/abi/linux/linux_arm64_state_autogen.go new file mode 100755 index 000000000..7b31374fe --- /dev/null +++ b/pkg/abi/linux/linux_arm64_state_autogen.go @@ -0,0 +1,5 @@ +// automatically generated by stateify. + +// +build arm64 + +package linux diff --git a/pkg/abi/linux/linux_state_autogen.go b/pkg/abi/linux/linux_state_autogen.go new file mode 100755 index 000000000..b8e488a11 --- /dev/null +++ b/pkg/abi/linux/linux_state_autogen.go @@ -0,0 +1,68 @@ +// automatically generated by stateify. + +package linux + +import ( + "gvisor.dev/gvisor/pkg/state" +) + +func (x *BPFInstruction) beforeSave() {} +func (x *BPFInstruction) save(m state.Map) { + x.beforeSave() + m.Save("OpCode", &x.OpCode) + m.Save("JumpIfTrue", &x.JumpIfTrue) + m.Save("JumpIfFalse", &x.JumpIfFalse) + m.Save("K", &x.K) +} + +func (x *BPFInstruction) afterLoad() {} +func (x *BPFInstruction) load(m state.Map) { + m.Load("OpCode", &x.OpCode) + m.Load("JumpIfTrue", &x.JumpIfTrue) + m.Load("JumpIfFalse", &x.JumpIfFalse) + m.Load("K", &x.K) +} + +func (x *KernelTermios) beforeSave() {} +func (x *KernelTermios) save(m state.Map) { + x.beforeSave() + m.Save("InputFlags", &x.InputFlags) + m.Save("OutputFlags", &x.OutputFlags) + m.Save("ControlFlags", &x.ControlFlags) + m.Save("LocalFlags", &x.LocalFlags) + m.Save("LineDiscipline", &x.LineDiscipline) + m.Save("ControlCharacters", &x.ControlCharacters) + m.Save("InputSpeed", &x.InputSpeed) + m.Save("OutputSpeed", &x.OutputSpeed) +} + +func (x *KernelTermios) afterLoad() {} +func (x *KernelTermios) load(m state.Map) { + m.Load("InputFlags", &x.InputFlags) + m.Load("OutputFlags", &x.OutputFlags) + m.Load("ControlFlags", &x.ControlFlags) + m.Load("LocalFlags", &x.LocalFlags) + m.Load("LineDiscipline", &x.LineDiscipline) + m.Load("ControlCharacters", &x.ControlCharacters) + m.Load("InputSpeed", &x.InputSpeed) + m.Load("OutputSpeed", &x.OutputSpeed) +} + +func (x *WindowSize) beforeSave() {} +func (x *WindowSize) save(m state.Map) { + x.beforeSave() + m.Save("Rows", &x.Rows) + m.Save("Cols", &x.Cols) +} + +func (x *WindowSize) afterLoad() {} +func (x *WindowSize) load(m state.Map) { + m.Load("Rows", &x.Rows) + m.Load("Cols", &x.Cols) +} + +func init() { + state.Register("pkg/abi/linux.BPFInstruction", (*BPFInstruction)(nil), state.Fns{Save: (*BPFInstruction).save, Load: (*BPFInstruction).load}) + state.Register("pkg/abi/linux.KernelTermios", (*KernelTermios)(nil), state.Fns{Save: (*KernelTermios).save, Load: (*KernelTermios).load}) + state.Register("pkg/abi/linux.WindowSize", (*WindowSize)(nil), state.Fns{Save: (*WindowSize).save, Load: (*WindowSize).load}) +} diff --git a/pkg/abi/linux/netfilter_test.go b/pkg/abi/linux/netfilter_test.go deleted file mode 100644 index 21e237f92..000000000 --- a/pkg/abi/linux/netfilter_test.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2019 The gVisor Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package linux - -import ( - "testing" - - "gvisor.dev/gvisor/pkg/binary" -) - -func TestSizes(t *testing.T) { - testCases := []struct { - typ interface{} - defined uintptr - }{ - {IPTEntry{}, SizeOfIPTEntry}, - {IPTGetEntries{}, SizeOfIPTGetEntries}, - {IPTGetinfo{}, SizeOfIPTGetinfo}, - {IPTIP{}, SizeOfIPTIP}, - {IPTReplace{}, SizeOfIPTReplace}, - {XTCounters{}, SizeOfXTCounters}, - {XTEntryMatch{}, SizeOfXTEntryMatch}, - {XTEntryTarget{}, SizeOfXTEntryTarget}, - {XTErrorTarget{}, SizeOfXTErrorTarget}, - {XTStandardTarget{}, SizeOfXTStandardTarget}, - } - - for _, tc := range testCases { - if calculated := binary.Size(tc.typ); calculated != tc.defined { - t.Errorf("%T has a defined size of %d and calculated size of %d", tc.typ, tc.defined, calculated) - } - } -} diff --git a/pkg/abi/linux/rseq.go b/pkg/abi/linux/rseq.go index 76253ba30..76253ba30 100644..100755 --- a/pkg/abi/linux/rseq.go +++ b/pkg/abi/linux/rseq.go diff --git a/pkg/abi/linux/signalfd.go b/pkg/abi/linux/signalfd.go index 85fad9956..85fad9956 100644..100755 --- a/pkg/abi/linux/signalfd.go +++ b/pkg/abi/linux/signalfd.go diff --git a/pkg/abi/linux/xattr.go b/pkg/abi/linux/xattr.go index 99180b208..99180b208 100644..100755 --- a/pkg/abi/linux/xattr.go +++ b/pkg/abi/linux/xattr.go |