summaryrefslogtreecommitdiffhomepage
path: root/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go
diff options
context:
space:
mode:
authorgVisor bot <gvisor-bot@google.com>2020-02-20 22:31:43 +0000
committergVisor bot <gvisor-bot@google.com>2020-02-20 22:31:43 +0000
commit1ef036583b04a3a3ac625899d124e4b732281ab3 (patch)
tree6aae8adee759b0a3fb015a997391c9d0733559b8 /pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go
parentd3b40e45b3bdc3e444cda9816c4e30924d7c9122 (diff)
parentd90d71474f4c82f742140fdf026821709845cece (diff)
Merge release-20200211.0-55-gd90d714 (automated)
Diffstat (limited to 'pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go')
-rwxr-xr-xpkg/abi/linux/linux_amd64_abi_autogen_unsafe.go25
1 files changed, 13 insertions, 12 deletions
diff --git a/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go b/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go
index bf002d73f..cefe4941a 100755
--- a/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go
+++ b/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go
@@ -110,7 +110,7 @@ func (s *Stat) MarshalUnsafe(dst []byte) {
// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
func (s *Stat) UnmarshalUnsafe(src []byte) {
- if s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() {
+ if s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() {
safecopy.CopyOut(unsafe.Pointer(s), src)
} else {
s.UnmarshalBytes(src)
@@ -118,12 +118,13 @@ func (s *Stat) UnmarshalUnsafe(src []byte) {
}
// CopyOut implements marshal.Marshallable.CopyOut.
-func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) (int, error) {
+func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) error {
if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() {
// Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes.
buf := task.CopyScratchBuffer(s.SizeBytes())
s.MarshalBytes(buf)
- return task.CopyOutBytes(addr, buf)
+ _, err := task.CopyOutBytes(addr, buf)
+ return err
}
// Bypass escape analysis on s. The no-op arithmetic operation on the
@@ -140,24 +141,24 @@ func (s *Stat) CopyOut(task marshal.Task, addr usermem.Addr) (int, error) {
hdr.Len = s.SizeBytes()
hdr.Cap = s.SizeBytes()
- len, err := task.CopyOutBytes(addr, buf)
+ _, err := task.CopyOutBytes(addr, buf)
// Since we bypassed the compiler's escape analysis, indicate that s
// must live until after the CopyOutBytes.
runtime.KeepAlive(s)
- return len, err
+ return err
}
// CopyIn implements marshal.Marshallable.CopyIn.
-func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) (int, error) {
+func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) error {
if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() {
// Type Stat doesn't have a packed layout in memory, fall back to UnmarshalBytes.
buf := task.CopyScratchBuffer(s.SizeBytes())
- n, err := task.CopyInBytes(addr, buf)
+ _, err := task.CopyInBytes(addr, buf)
if err != nil {
- return n, err
+ return err
}
s.UnmarshalBytes(buf)
- return n, nil
+ return nil
}
// Bypass escape analysis on s. The no-op arithmetic operation on the
@@ -174,16 +175,16 @@ func (s *Stat) CopyIn(task marshal.Task, addr usermem.Addr) (int, error) {
hdr.Len = s.SizeBytes()
hdr.Cap = s.SizeBytes()
- len, err := task.CopyInBytes(addr, buf)
+ _, err := task.CopyInBytes(addr, buf)
// Since we bypassed the compiler's escape analysis, indicate that s
// must live until after the CopyInBytes.
runtime.KeepAlive(s)
- return len, err
+ return err
}
// WriteTo implements io.WriterTo.WriteTo.
func (s *Stat) WriteTo(w io.Writer) (int64, error) {
- if !s.ATime.Packed() && s.MTime.Packed() && s.CTime.Packed() {
+ if !s.CTime.Packed() && s.ATime.Packed() && s.MTime.Packed() {
// Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes.
buf := make([]byte, s.SizeBytes())
s.MarshalBytes(buf)