summaryrefslogtreecommitdiffhomepage
path: root/pkg/abi/linux
diff options
context:
space:
mode:
Diffstat (limited to 'pkg/abi/linux')
-rwxr-xr-xpkg/abi/linux/linux_abi_autogen_unsafe.go47
-rwxr-xr-xpkg/abi/linux/linux_amd64_abi_autogen_unsafe.go36
-rwxr-xr-xpkg/abi/linux/linux_arm64_abi_autogen_unsafe.go34
3 files changed, 114 insertions, 3 deletions
diff --git a/pkg/abi/linux/linux_abi_autogen_unsafe.go b/pkg/abi/linux/linux_abi_autogen_unsafe.go
index d8d4cb088..73853a078 100755
--- a/pkg/abi/linux/linux_abi_autogen_unsafe.go
+++ b/pkg/abi/linux/linux_abi_autogen_unsafe.go
@@ -6,6 +6,7 @@ import (
"gvisor.dev/gvisor/pkg/safecopy"
"gvisor.dev/gvisor/pkg/usermem"
"gvisor.dev/gvisor/tools/go_marshal/marshal"
+ "io"
"reflect"
"runtime"
"unsafe"
@@ -109,6 +110,29 @@ func (r *RSeqCriticalSection) CopyIn(task marshal.Task, addr usermem.Addr) (int,
return len, err
}
+// WriteTo implements io.WriterTo.WriteTo.
+func (r *RSeqCriticalSection) WriteTo(w io.Writer) (int64, error) {
+ // Bypass escape analysis on r. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on r.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(r)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by r's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = r.SizeBytes()
+ hdr.Cap = r.SizeBytes()
+
+ len, err := w.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that r
+ // must live until after the Write.
+ runtime.KeepAlive(r)
+ return int64(len), err
+}
+
// SizeBytes implements marshal.Marshallable.SizeBytes.
func (t *Timespec) SizeBytes() int {
return 16
@@ -191,3 +215,26 @@ func (t *Timespec) CopyIn(task marshal.Task, addr usermem.Addr) (int, error) {
return len, err
}
+// WriteTo implements io.WriterTo.WriteTo.
+func (t *Timespec) WriteTo(w io.Writer) (int64, error) {
+ // Bypass escape analysis on t. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on t.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(t)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by t's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = t.SizeBytes()
+ hdr.Cap = t.SizeBytes()
+
+ len, err := w.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that t
+ // must live until after the Write.
+ runtime.KeepAlive(t)
+ return int64(len), err
+}
+
diff --git a/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go b/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go
index 1fdf61fa0..226b5db1c 100755
--- a/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go
+++ b/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go
@@ -8,6 +8,7 @@ import (
"gvisor.dev/gvisor/pkg/safecopy"
"gvisor.dev/gvisor/pkg/usermem"
"gvisor.dev/gvisor/tools/go_marshal/marshal"
+ "io"
"reflect"
"runtime"
"unsafe"
@@ -95,7 +96,7 @@ func (s *Stat) UnmarshalBytes(src []byte) {
// Packed implements marshal.Marshallable.Packed.
func (s *Stat) Packed() bool {
- return s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed()
+ return s.MTime.Packed() && s.CTime.Packed() && s.ATime.Packed()
}
// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
@@ -118,7 +119,7 @@ func (s *Stat) UnmarshalUnsafe(src []byte) {
// CopyOut implements marshal.Marshallable.CopyOut.
func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) (int, error) {
- if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() {
+ if !s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() {
// Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes.
buf := task.CopyScratchBuffer(s.SizeBytes())
s.MarshalBytes(buf)
@@ -180,3 +181,34 @@ func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) (int, error) {
return len, err
}
+// WriteTo implements io.WriterTo.WriteTo.
+func (s *Stat) WriteTo(w io.Writer) (int64, error) {
+ if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() {
+ // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := make([]byte, s.SizeBytes())
+ s.MarshalBytes(buf)
+ n, err := w.Write(buf)
+ return int64(n), err
+ }
+
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ len, err := w.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the Write.
+ runtime.KeepAlive(s)
+ return int64(len), err
+}
+
diff --git a/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go b/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go
index 7d1a9e913..bfc740e92 100755
--- a/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go
+++ b/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go
@@ -8,6 +8,7 @@ import (
"gvisor.dev/gvisor/pkg/safecopy"
"gvisor.dev/gvisor/pkg/usermem"
"gvisor.dev/gvisor/tools/go_marshal/marshal"
+ "io"
"reflect"
"runtime"
"unsafe"
@@ -104,7 +105,7 @@ func (s *Stat) Packed() bool {
// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
func (s *Stat) MarshalUnsafe(dst []byte) {
- if s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() {
+ if s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() {
safecopy.CopyIn(dst, unsafe.Pointer(s))
} else {
s.MarshalBytes(dst)
@@ -184,3 +185,34 @@ func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) (int, error) {
return len, err
}
+// WriteTo implements io.WriterTo.WriteTo.
+func (s *Stat) WriteTo(w io.Writer) (int64, error) {
+ if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() {
+ // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := make([]byte, s.SizeBytes())
+ s.MarshalBytes(buf)
+ n, err := w.Write(buf)
+ return int64(n), err
+ }
+
+ // Bypass escape analysis on s. The no-op arithmetic operation on the
+ // pointer makes the compiler think val doesn't depend on s.
+ // See src/runtime/stubs.go:noescape() in the golang toolchain.
+ ptr := unsafe.Pointer(s)
+ val := uintptr(ptr)
+ val = val^0
+
+ // Construct a slice backed by s's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = val
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ len, err := w.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until after the Write.
+ runtime.KeepAlive(s)
+ return int64(len), err
+}
+