diff options
author | gVisor bot <gvisor-bot@google.com> | 2020-02-20 22:31:43 +0000 |
---|---|---|
committer | gVisor bot <gvisor-bot@google.com> | 2020-02-20 22:31:43 +0000 |
commit | 1ef036583b04a3a3ac625899d124e4b732281ab3 (patch) | |
tree | 6aae8adee759b0a3fb015a997391c9d0733559b8 /pkg/abi/linux | |
parent | d3b40e45b3bdc3e444cda9816c4e30924d7c9122 (diff) | |
parent | d90d71474f4c82f742140fdf026821709845cece (diff) |
Merge release-20200211.0-55-gd90d714 (automated)
Diffstat (limited to 'pkg/abi/linux')
-rwxr-xr-x | pkg/abi/linux/linux_abi_autogen_unsafe.go | 24 | ||||
-rwxr-xr-x | pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go | 25 | ||||
-rwxr-xr-x | pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go | 25 |
3 files changed, 38 insertions, 36 deletions
diff --git a/pkg/abi/linux/linux_abi_autogen_unsafe.go b/pkg/abi/linux/linux_abi_autogen_unsafe.go index 73853a078..82f1100cc 100755 --- a/pkg/abi/linux/linux_abi_autogen_unsafe.go +++ b/pkg/abi/linux/linux_abi_autogen_unsafe.go @@ -65,7 +65,7 @@ func (r *RSeqCriticalSection) UnmarshalUnsafe(src []byte) { } // CopyOut implements marshal.Marshallable.CopyOut. -func (r *RSeqCriticalSection) CopyOut(task marshal.Task, addr usermem.Addr) (int, error) { +func (r *RSeqCriticalSection) CopyOut(task marshal.Task, addr usermem.Addr) error { // Bypass escape analysis on r. The no-op arithmetic operation on the // pointer makes the compiler think val doesn't depend on r. // See src/runtime/stubs.go:noescape() in the golang toolchain. @@ -80,15 +80,15 @@ func (r *RSeqCriticalSection) CopyOut(task marshal.Task, addr usermem.Addr) (int hdr.Len = r.SizeBytes() hdr.Cap = r.SizeBytes() - len, err := task.CopyOutBytes(addr, buf) + _, err := task.CopyOutBytes(addr, buf) // Since we bypassed the compiler's escape analysis, indicate that r // must live until after the CopyOutBytes. runtime.KeepAlive(r) - return len, err + return err } // CopyIn implements marshal.Marshallable.CopyIn. -func (r *RSeqCriticalSection) CopyIn(task marshal.Task, addr usermem.Addr) (int, error) { +func (r *RSeqCriticalSection) CopyIn(task marshal.Task, addr usermem.Addr) error { // Bypass escape analysis on r. The no-op arithmetic operation on the // pointer makes the compiler think val doesn't depend on r. // See src/runtime/stubs.go:noescape() in the golang toolchain. @@ -103,11 +103,11 @@ func (r *RSeqCriticalSection) CopyIn(task marshal.Task, addr usermem.Addr) (int, hdr.Len = r.SizeBytes() hdr.Cap = r.SizeBytes() - len, err := task.CopyInBytes(addr, buf) + _, err := task.CopyInBytes(addr, buf) // Since we bypassed the compiler's escape analysis, indicate that r // must live until after the CopyInBytes. runtime.KeepAlive(r) - return len, err + return err } // WriteTo implements io.WriterTo.WriteTo. @@ -170,7 +170,7 @@ func (t *Timespec) UnmarshalUnsafe(src []byte) { } // CopyOut implements marshal.Marshallable.CopyOut. -func (t *Timespec) CopyOut(task marshal.Task, addr usermem.Addr) (int, error) { +func (t *Timespec) CopyOut(task marshal.Task, addr usermem.Addr) error { // Bypass escape analysis on t. The no-op arithmetic operation on the // pointer makes the compiler think val doesn't depend on t. // See src/runtime/stubs.go:noescape() in the golang toolchain. @@ -185,15 +185,15 @@ func (t *Timespec) CopyOut(task marshal.Task, addr usermem.Addr) (int, error) { hdr.Len = t.SizeBytes() hdr.Cap = t.SizeBytes() - len, err := task.CopyOutBytes(addr, buf) + _, err := task.CopyOutBytes(addr, buf) // Since we bypassed the compiler's escape analysis, indicate that t // must live until after the CopyOutBytes. runtime.KeepAlive(t) - return len, err + return err } // CopyIn implements marshal.Marshallable.CopyIn. -func (t *Timespec) CopyIn(task marshal.Task, addr usermem.Addr) (int, error) { +func (t *Timespec) CopyIn(task marshal.Task, addr usermem.Addr) error { // Bypass escape analysis on t. The no-op arithmetic operation on the // pointer makes the compiler think val doesn't depend on t. // See src/runtime/stubs.go:noescape() in the golang toolchain. @@ -208,11 +208,11 @@ func (t *Timespec) CopyIn(task marshal.Task, addr usermem.Addr) (int, error) { hdr.Len = t.SizeBytes() hdr.Cap = t.SizeBytes() - len, err := task.CopyInBytes(addr, buf) + _, err := task.CopyInBytes(addr, buf) // Since we bypassed the compiler's escape analysis, indicate that t // must live until after the CopyInBytes. runtime.KeepAlive(t) - return len, err + return err } // WriteTo implements io.WriterTo.WriteTo. diff --git a/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go b/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go index bf002d73f..cefe4941a 100755 --- a/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go +++ b/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go @@ -110,7 +110,7 @@ func (s *Stat) MarshalUnsafe(dst []byte) { // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. func (s *Stat) UnmarshalUnsafe(src []byte) { - if s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() { + if s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { safecopy.CopyOut(unsafe.Pointer(s), src) } else { s.UnmarshalBytes(src) @@ -118,12 +118,13 @@ func (s *Stat) UnmarshalUnsafe(src []byte) { } // CopyOut implements marshal.Marshallable.CopyOut. -func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) (int, error) { +func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) error { if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. buf := task.CopyScratchBuffer(s.SizeBytes()) s.MarshalBytes(buf) - return task.CopyOutBytes(addr, buf) + _, err := task.CopyOutBytes(addr, buf) + return err } // Bypass escape analysis on s. The no-op arithmetic operation on the @@ -140,24 +141,24 @@ func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) (int, error) { hdr.Len = s.SizeBytes() hdr.Cap = s.SizeBytes() - len, err := task.CopyOutBytes(addr, buf) + _, err := task.CopyOutBytes(addr, buf) // Since we bypassed the compiler's escape analysis, indicate that s // must live until after the CopyOutBytes. runtime.KeepAlive(s) - return len, err + return err } // CopyIn implements marshal.Marshallable.CopyIn. -func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) (int, error) { +func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) error { if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { // Type Stat doesn't have a packed layout in memory, fall back to UnmarshalBytes. buf := task.CopyScratchBuffer(s.SizeBytes()) - n, err := task.CopyInBytes(addr, buf) + _, err := task.CopyInBytes(addr, buf) if err != nil { - return n, err + return err } s.UnmarshalBytes(buf) - return n, nil + return nil } // Bypass escape analysis on s. The no-op arithmetic operation on the @@ -174,16 +175,16 @@ func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) (int, error) { hdr.Len = s.SizeBytes() hdr.Cap = s.SizeBytes() - len, err := task.CopyInBytes(addr, buf) + _, err := task.CopyInBytes(addr, buf) // Since we bypassed the compiler's escape analysis, indicate that s // must live until after the CopyInBytes. runtime.KeepAlive(s) - return len, err + return err } // WriteTo implements io.WriterTo.WriteTo. func (s *Stat) WriteTo(w io.Writer) (int64, error) { - if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { + if !s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() { // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. buf := make([]byte, s.SizeBytes()) s.MarshalBytes(buf) diff --git a/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go b/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go index daee765f3..83d655d30 100755 --- a/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go +++ b/pkg/abi/linux/linux_arm64_abi_autogen_unsafe.go @@ -122,12 +122,13 @@ func (s *Stat) UnmarshalUnsafe(src []byte) { } // CopyOut implements marshal.Marshallable.CopyOut. -func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) (int, error) { - if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { +func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) error { + if !s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() { // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. buf := task.CopyScratchBuffer(s.SizeBytes()) s.MarshalBytes(buf) - return task.CopyOutBytes(addr, buf) + _, err := task.CopyOutBytes(addr, buf) + return err } // Bypass escape analysis on s. The no-op arithmetic operation on the @@ -144,24 +145,24 @@ func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) (int, error) { hdr.Len = s.SizeBytes() hdr.Cap = s.SizeBytes() - len, err := task.CopyOutBytes(addr, buf) + _, err := task.CopyOutBytes(addr, buf) // Since we bypassed the compiler's escape analysis, indicate that s // must live until after the CopyOutBytes. runtime.KeepAlive(s) - return len, err + return err } // CopyIn implements marshal.Marshallable.CopyIn. -func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) (int, error) { - if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() { +func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) error { + if !s.MTime.Packed() && s.CTime.Packed() && s.ATime.Packed() { // Type Stat doesn't have a packed layout in memory, fall back to UnmarshalBytes. buf := task.CopyScratchBuffer(s.SizeBytes()) - n, err := task.CopyInBytes(addr, buf) + _, err := task.CopyInBytes(addr, buf) if err != nil { - return n, err + return err } s.UnmarshalBytes(buf) - return n, nil + return nil } // Bypass escape analysis on s. The no-op arithmetic operation on the @@ -178,11 +179,11 @@ func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) (int, error) { hdr.Len = s.SizeBytes() hdr.Cap = s.SizeBytes() - len, err := task.CopyInBytes(addr, buf) + _, err := task.CopyInBytes(addr, buf) // Since we bypassed the compiler's escape analysis, indicate that s // must live until after the CopyInBytes. runtime.KeepAlive(s) - return len, err + return err } // WriteTo implements io.WriterTo.WriteTo. |