diff options
author | gVisor bot <gvisor-bot@google.com> | 2020-12-11 21:38:36 +0000 |
---|---|---|
committer | gVisor bot <gvisor-bot@google.com> | 2020-12-11 21:38:36 +0000 |
commit | 3c8ccf6f4509a53312e48b102e4de993c7c31374 (patch) | |
tree | a00e2224ad5189cb6cfaf71af7640135145a7860 /pkg/abi/linux | |
parent | 119a97e008cdef89ce5bb4d67bf7738c6840ac48 (diff) | |
parent | d45420b1528b8ad23e8f12fe81fb9cc148b83012 (diff) |
Merge release-20201208.0-34-gd45420b15 (automated)
Diffstat (limited to 'pkg/abi/linux')
-rw-r--r-- | pkg/abi/linux/linux_abi_autogen_unsafe.go | 2882 |
1 files changed, 1441 insertions, 1441 deletions
diff --git a/pkg/abi/linux/linux_abi_autogen_unsafe.go b/pkg/abi/linux/linux_abi_autogen_unsafe.go index 2535e17f3..16f5880e2 100644 --- a/pkg/abi/linux/linux_abi_autogen_unsafe.go +++ b/pkg/abi/linux/linux_abi_autogen_unsafe.go @@ -521,6 +521,99 @@ func UnmarshalUnsafeBPFInstructionSlice(dst []BPFInstruction, src []byte) (int, } // SizeBytes implements marshal.Marshallable.SizeBytes. +func (c *CapUserHeader) SizeBytes() int { + return 8 +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (c *CapUserHeader) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(c.Version)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(c.Pid)) + dst = dst[4:] +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (c *CapUserHeader) UnmarshalBytes(src []byte) { + c.Version = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + c.Pid = int32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] +} + +// Packed implements marshal.Marshallable.Packed. +//go:nosplit +func (c *CapUserHeader) Packed() bool { + return true +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (c *CapUserHeader) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(c)) +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (c *CapUserHeader) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(c), src) +} + +// CopyOutN implements marshal.Marshallable.CopyOutN. +//go:nosplit +func (c *CapUserHeader) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { + // Construct a slice backed by dst's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(c))) + hdr.Len = c.SizeBytes() + hdr.Cap = c.SizeBytes() + + length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. + // Since we bypassed the compiler's escape analysis, indicate that c + // must live until the use above. + runtime.KeepAlive(c) // escapes: replaced by intrinsic. + return length, err +} + +// CopyOut implements marshal.Marshallable.CopyOut. +//go:nosplit +func (c *CapUserHeader) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { + return c.CopyOutN(cc, addr, c.SizeBytes()) +} + +// CopyIn implements marshal.Marshallable.CopyIn. +//go:nosplit +func (c *CapUserHeader) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { + // Construct a slice backed by dst's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(c))) + hdr.Len = c.SizeBytes() + hdr.Cap = c.SizeBytes() + + length, err := cc.CopyInBytes(addr, buf) // escapes: okay. + // Since we bypassed the compiler's escape analysis, indicate that c + // must live until the use above. + runtime.KeepAlive(c) // escapes: replaced by intrinsic. + return length, err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (c *CapUserHeader) WriteTo(writer io.Writer) (int64, error) { + // Construct a slice backed by dst's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(c))) + hdr.Len = c.SizeBytes() + hdr.Cap = c.SizeBytes() + + length, err := writer.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that c + // must live until the use above. + runtime.KeepAlive(c) // escapes: replaced by intrinsic. + return int64(length), err +} + +// SizeBytes implements marshal.Marshallable.SizeBytes. func (c *CapUserData) SizeBytes() int { return 12 } @@ -704,99 +797,6 @@ func UnmarshalUnsafeCapUserDataSlice(dst []CapUserData, src []byte) (int, error) } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (c *CapUserHeader) SizeBytes() int { - return 8 -} - -// MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (c *CapUserHeader) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint32(dst[:4], uint32(c.Version)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(c.Pid)) - dst = dst[4:] -} - -// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (c *CapUserHeader) UnmarshalBytes(src []byte) { - c.Version = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - c.Pid = int32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] -} - -// Packed implements marshal.Marshallable.Packed. -//go:nosplit -func (c *CapUserHeader) Packed() bool { - return true -} - -// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (c *CapUserHeader) MarshalUnsafe(dst []byte) { - safecopy.CopyIn(dst, unsafe.Pointer(c)) -} - -// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (c *CapUserHeader) UnmarshalUnsafe(src []byte) { - safecopy.CopyOut(unsafe.Pointer(c), src) -} - -// CopyOutN implements marshal.Marshallable.CopyOutN. -//go:nosplit -func (c *CapUserHeader) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { - // Construct a slice backed by dst's underlying memory. - var buf []byte - hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(c))) - hdr.Len = c.SizeBytes() - hdr.Cap = c.SizeBytes() - - length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. - // Since we bypassed the compiler's escape analysis, indicate that c - // must live until the use above. - runtime.KeepAlive(c) // escapes: replaced by intrinsic. - return length, err -} - -// CopyOut implements marshal.Marshallable.CopyOut. -//go:nosplit -func (c *CapUserHeader) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - return c.CopyOutN(cc, addr, c.SizeBytes()) -} - -// CopyIn implements marshal.Marshallable.CopyIn. -//go:nosplit -func (c *CapUserHeader) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - // Construct a slice backed by dst's underlying memory. - var buf []byte - hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(c))) - hdr.Len = c.SizeBytes() - hdr.Cap = c.SizeBytes() - - length, err := cc.CopyInBytes(addr, buf) // escapes: okay. - // Since we bypassed the compiler's escape analysis, indicate that c - // must live until the use above. - runtime.KeepAlive(c) // escapes: replaced by intrinsic. - return length, err -} - -// WriteTo implements io.WriterTo.WriteTo. -func (c *CapUserHeader) WriteTo(writer io.Writer) (int64, error) { - // Construct a slice backed by dst's underlying memory. - var buf []byte - hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(c))) - hdr.Len = c.SizeBytes() - hdr.Cap = c.SizeBytes() - - length, err := writer.Write(buf) - // Since we bypassed the compiler's escape analysis, indicate that c - // must live until the use above. - runtime.KeepAlive(c) // escapes: replaced by intrinsic. - return int64(length), err -} - -// SizeBytes implements marshal.Marshallable.SizeBytes. func (f *Flock) SizeBytes() int { return 24 + 1*4 + @@ -1352,53 +1352,71 @@ func (s *Statfs) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEInitIn) SizeBytes() int { - return 16 +func (f *FUSEGetAttrOut) SizeBytes() int { + return 16 + + (*FUSEAttr)(nil).SizeBytes() } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEInitIn) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Major)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Minor)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.MaxReadahead)) +func (f *FUSEGetAttrOut) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.AttrValid)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.AttrValidNsec)) dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) + // Padding: dst[:sizeof(uint32)] ~= uint32(0) dst = dst[4:] + f.Attr.MarshalBytes(dst[:f.Attr.SizeBytes()]) + dst = dst[f.Attr.SizeBytes():] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEInitIn) UnmarshalBytes(src []byte) { - f.Major = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.Minor = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.MaxReadahead = uint32(usermem.ByteOrder.Uint32(src[:4])) +func (f *FUSEGetAttrOut) UnmarshalBytes(src []byte) { + f.AttrValid = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.AttrValidNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) + // Padding: var _ uint32 ~= src[:sizeof(uint32)] src = src[4:] + f.Attr.UnmarshalBytes(src[:f.Attr.SizeBytes()]) + src = src[f.Attr.SizeBytes():] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEInitIn) Packed() bool { - return true +func (f *FUSEGetAttrOut) Packed() bool { + return f.Attr.Packed() } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEInitIn) MarshalUnsafe(dst []byte) { - safecopy.CopyIn(dst, unsafe.Pointer(f)) +func (f *FUSEGetAttrOut) MarshalUnsafe(dst []byte) { + if f.Attr.Packed() { + safecopy.CopyIn(dst, unsafe.Pointer(f)) + } else { + // Type FUSEGetAttrOut doesn't have a packed layout in memory, fallback to MarshalBytes. + f.MarshalBytes(dst) + } } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEInitIn) UnmarshalUnsafe(src []byte) { - safecopy.CopyOut(unsafe.Pointer(f), src) +func (f *FUSEGetAttrOut) UnmarshalUnsafe(src []byte) { + if f.Attr.Packed() { + safecopy.CopyOut(unsafe.Pointer(f), src) + } else { + // Type FUSEGetAttrOut doesn't have a packed layout in memory, fallback to UnmarshalBytes. + f.UnmarshalBytes(src) + } } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEInitIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEGetAttrOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { + if !f.Attr.Packed() { + // Type FUSEGetAttrOut doesn't have a packed layout in memory, fall back to MarshalBytes. + buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. + f.MarshalBytes(buf) // escapes: fallback. + return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. + } + // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1415,13 +1433,23 @@ func (f *FUSEInitIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit i // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEInitIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEGetAttrOut) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEInitIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEGetAttrOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { + if !f.Attr.Packed() { + // Type FUSEGetAttrOut doesn't have a packed layout in memory, fall back to UnmarshalBytes. + buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. + length, err := cc.CopyInBytes(addr, buf) // escapes: okay. + // Unmarshal unconditionally. If we had a short copy-in, this results in a + // partially unmarshalled struct. + f.UnmarshalBytes(buf) // escapes: fallback. + return length, err + } + // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1437,7 +1465,15 @@ func (f *FUSEInitIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, err } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEInitIn) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSEGetAttrOut) WriteTo(writer io.Writer) (int64, error) { + if !f.Attr.Packed() { + // Type FUSEGetAttrOut doesn't have a packed layout in memory, fall back to MarshalBytes. + buf := make([]byte, f.SizeBytes()) + f.MarshalBytes(buf) + length, err := writer.Write(buf) + return int64(length), err + } + // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1453,82 +1489,45 @@ func (f *FUSEInitIn) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEInitOut) SizeBytes() int { - return 32 + - 4*8 +func (f *FUSEWriteOut) SizeBytes() int { + return 8 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEInitOut) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Major)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Minor)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.MaxReadahead)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) - dst = dst[4:] - usermem.ByteOrder.PutUint16(dst[:2], uint16(f.MaxBackground)) - dst = dst[2:] - usermem.ByteOrder.PutUint16(dst[:2], uint16(f.CongestionThreshold)) - dst = dst[2:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.MaxWrite)) +func (f *FUSEWriteOut) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Size)) dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.TimeGran)) + // Padding: dst[:sizeof(uint32)] ~= uint32(0) dst = dst[4:] - usermem.ByteOrder.PutUint16(dst[:2], uint16(f.MaxPages)) - dst = dst[2:] - // Padding: dst[:sizeof(uint16)] ~= uint16(0) - dst = dst[2:] - // Padding: dst[:sizeof(uint32)*8] ~= [8]uint32{0} - dst = dst[4*(8):] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEInitOut) UnmarshalBytes(src []byte) { - f.Major = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.Minor = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.MaxReadahead = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.MaxBackground = uint16(usermem.ByteOrder.Uint16(src[:2])) - src = src[2:] - f.CongestionThreshold = uint16(usermem.ByteOrder.Uint16(src[:2])) - src = src[2:] - f.MaxWrite = uint32(usermem.ByteOrder.Uint32(src[:4])) +func (f *FUSEWriteOut) UnmarshalBytes(src []byte) { + f.Size = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - f.TimeGran = uint32(usermem.ByteOrder.Uint32(src[:4])) + // Padding: var _ uint32 ~= src[:sizeof(uint32)] src = src[4:] - f.MaxPages = uint16(usermem.ByteOrder.Uint16(src[:2])) - src = src[2:] - // Padding: var _ uint16 ~= src[:sizeof(uint16)] - src = src[2:] - // Padding: ~ copy([8]uint32(f._), src[:sizeof(uint32)*8]) - src = src[4*(8):] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEInitOut) Packed() bool { +func (f *FUSEWriteOut) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEInitOut) MarshalUnsafe(dst []byte) { +func (f *FUSEWriteOut) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEInitOut) UnmarshalUnsafe(src []byte) { +func (f *FUSEWriteOut) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEInitOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEWriteOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1545,13 +1544,13 @@ func (f *FUSEInitOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEInitOut) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEWriteOut) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEInitOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEWriteOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1567,7 +1566,7 @@ func (f *FUSEInitOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, er } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEInitOut) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSEWriteOut) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1583,71 +1582,53 @@ func (f *FUSEInitOut) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEGetAttrOut) SizeBytes() int { - return 16 + - (*FUSEAttr)(nil).SizeBytes() +func (f *FUSEReleaseIn) SizeBytes() int { + return 24 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEGetAttrOut) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.AttrValid)) +func (f *FUSEReleaseIn) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Fh)) dst = dst[8:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.AttrValidNsec)) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) dst = dst[4:] - // Padding: dst[:sizeof(uint32)] ~= uint32(0) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.ReleaseFlags)) dst = dst[4:] - f.Attr.MarshalBytes(dst[:f.Attr.SizeBytes()]) - dst = dst[f.Attr.SizeBytes():] + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.LockOwner)) + dst = dst[8:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEGetAttrOut) UnmarshalBytes(src []byte) { - f.AttrValid = uint64(usermem.ByteOrder.Uint64(src[:8])) +func (f *FUSEReleaseIn) UnmarshalBytes(src []byte) { + f.Fh = uint64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] - f.AttrValidNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) + f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - // Padding: var _ uint32 ~= src[:sizeof(uint32)] + f.ReleaseFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - f.Attr.UnmarshalBytes(src[:f.Attr.SizeBytes()]) - src = src[f.Attr.SizeBytes():] + f.LockOwner = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEGetAttrOut) Packed() bool { - return f.Attr.Packed() +func (f *FUSEReleaseIn) Packed() bool { + return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEGetAttrOut) MarshalUnsafe(dst []byte) { - if f.Attr.Packed() { - safecopy.CopyIn(dst, unsafe.Pointer(f)) - } else { - // Type FUSEGetAttrOut doesn't have a packed layout in memory, fallback to MarshalBytes. - f.MarshalBytes(dst) - } +func (f *FUSEReleaseIn) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEGetAttrOut) UnmarshalUnsafe(src []byte) { - if f.Attr.Packed() { - safecopy.CopyOut(unsafe.Pointer(f), src) - } else { - // Type FUSEGetAttrOut doesn't have a packed layout in memory, fallback to UnmarshalBytes. - f.UnmarshalBytes(src) - } +func (f *FUSEReleaseIn) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEGetAttrOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { - if !f.Attr.Packed() { - // Type FUSEGetAttrOut doesn't have a packed layout in memory, fall back to MarshalBytes. - buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. - f.MarshalBytes(buf) // escapes: fallback. - return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. - } - +func (f *FUSEReleaseIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1664,23 +1645,13 @@ func (f *FUSEGetAttrOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, lim // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEGetAttrOut) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEReleaseIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEGetAttrOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - if !f.Attr.Packed() { - // Type FUSEGetAttrOut doesn't have a packed layout in memory, fall back to UnmarshalBytes. - buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. - length, err := cc.CopyInBytes(addr, buf) // escapes: okay. - // Unmarshal unconditionally. If we had a short copy-in, this results in a - // partially unmarshalled struct. - f.UnmarshalBytes(buf) // escapes: fallback. - return length, err - } - +func (f *FUSEReleaseIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1696,15 +1667,7 @@ func (f *FUSEGetAttrOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEGetAttrOut) WriteTo(writer io.Writer) (int64, error) { - if !f.Attr.Packed() { - // Type FUSEGetAttrOut doesn't have a packed layout in memory, fall back to MarshalBytes. - buf := make([]byte, f.SizeBytes()) - f.MarshalBytes(buf) - length, err := writer.Write(buf) - return int64(length), err - } - +func (f *FUSEReleaseIn) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1720,49 +1683,45 @@ func (f *FUSEGetAttrOut) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEOpenOut) SizeBytes() int { - return 16 +func (f *FUSEMkdirMeta) SizeBytes() int { + return 8 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEOpenOut) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Fh)) - dst = dst[8:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.OpenFlag)) +func (f *FUSEMkdirMeta) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Mode)) dst = dst[4:] - // Padding: dst[:sizeof(uint32)] ~= uint32(0) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Umask)) dst = dst[4:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEOpenOut) UnmarshalBytes(src []byte) { - f.Fh = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.OpenFlag = uint32(usermem.ByteOrder.Uint32(src[:4])) +func (f *FUSEMkdirMeta) UnmarshalBytes(src []byte) { + f.Mode = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - // Padding: var _ uint32 ~= src[:sizeof(uint32)] + f.Umask = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEOpenOut) Packed() bool { +func (f *FUSEMkdirMeta) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEOpenOut) MarshalUnsafe(dst []byte) { +func (f *FUSEMkdirMeta) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEOpenOut) UnmarshalUnsafe(src []byte) { +func (f *FUSEMkdirMeta) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEOpenOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEMkdirMeta) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1779,13 +1738,13 @@ func (f *FUSEOpenOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEOpenOut) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEMkdirMeta) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEOpenOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEMkdirMeta) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1801,7 +1760,7 @@ func (f *FUSEOpenOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, er } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEOpenOut) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSEMkdirMeta) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1817,41 +1776,77 @@ func (f *FUSEOpenOut) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEWriteIn) SizeBytes() int { - return 40 +func (f *FUSESetAttrIn) SizeBytes() int { + return 88 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEWriteIn) MarshalBytes(dst []byte) { +func (f *FUSESetAttrIn) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Valid)) + dst = dst[4:] + // Padding: dst[:sizeof(uint32)] ~= uint32(0) + dst = dst[4:] usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Fh)) dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Offset)) + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Size)) dst = dst[8:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Size)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.WriteFlags)) - dst = dst[4:] usermem.ByteOrder.PutUint64(dst[:8], uint64(f.LockOwner)) dst = dst[8:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Atime)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Mtime)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Ctime)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.AtimeNsec)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.MtimeNsec)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.CtimeNsec)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Mode)) + dst = dst[4:] + // Padding: dst[:sizeof(uint32)] ~= uint32(0) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.UID)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.GID)) dst = dst[4:] // Padding: dst[:sizeof(uint32)] ~= uint32(0) dst = dst[4:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEWriteIn) UnmarshalBytes(src []byte) { +func (f *FUSESetAttrIn) UnmarshalBytes(src []byte) { + f.Valid = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + // Padding: var _ uint32 ~= src[:sizeof(uint32)] + src = src[4:] f.Fh = uint64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] - f.Offset = uint64(usermem.ByteOrder.Uint64(src[:8])) + f.Size = uint64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] - f.Size = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.WriteFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] f.LockOwner = uint64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] - f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) + f.Atime = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.Mtime = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.Ctime = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.AtimeNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.MtimeNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.CtimeNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.Mode = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + // Padding: var _ uint32 ~= src[:sizeof(uint32)] + src = src[4:] + f.UID = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.GID = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] // Padding: var _ uint32 ~= src[:sizeof(uint32)] src = src[4:] @@ -1859,23 +1854,23 @@ func (f *FUSEWriteIn) UnmarshalBytes(src []byte) { // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEWriteIn) Packed() bool { +func (f *FUSESetAttrIn) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEWriteIn) MarshalUnsafe(dst []byte) { +func (f *FUSESetAttrIn) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEWriteIn) UnmarshalUnsafe(src []byte) { +func (f *FUSESetAttrIn) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEWriteIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSESetAttrIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1892,13 +1887,13 @@ func (f *FUSEWriteIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEWriteIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSESetAttrIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEWriteIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSESetAttrIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1914,7 +1909,7 @@ func (f *FUSEWriteIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, er } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEWriteIn) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSESetAttrIn) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -1930,62 +1925,83 @@ func (f *FUSEWriteIn) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEHeaderOut) SizeBytes() int { - return 8 + +func (f *FUSEHeaderIn) SizeBytes() int { + return 28 + + (*FUSEOpcode)(nil).SizeBytes() + (*FUSEOpID)(nil).SizeBytes() } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEHeaderOut) MarshalBytes(dst []byte) { +func (f *FUSEHeaderIn) MarshalBytes(dst []byte) { usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Len)) dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Error)) - dst = dst[4:] + f.Opcode.MarshalBytes(dst[:f.Opcode.SizeBytes()]) + dst = dst[f.Opcode.SizeBytes():] f.Unique.MarshalBytes(dst[:f.Unique.SizeBytes()]) dst = dst[f.Unique.SizeBytes():] + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.NodeID)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.UID)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.GID)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.PID)) + dst = dst[4:] + // Padding: dst[:sizeof(uint32)] ~= uint32(0) + dst = dst[4:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEHeaderOut) UnmarshalBytes(src []byte) { +func (f *FUSEHeaderIn) UnmarshalBytes(src []byte) { f.Len = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - f.Error = int32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] + f.Opcode.UnmarshalBytes(src[:f.Opcode.SizeBytes()]) + src = src[f.Opcode.SizeBytes():] f.Unique.UnmarshalBytes(src[:f.Unique.SizeBytes()]) src = src[f.Unique.SizeBytes():] + f.NodeID = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.UID = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.GID = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.PID = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + // Padding: var _ uint32 ~= src[:sizeof(uint32)] + src = src[4:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEHeaderOut) Packed() bool { - return f.Unique.Packed() +func (f *FUSEHeaderIn) Packed() bool { + return f.Opcode.Packed() && f.Unique.Packed() } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEHeaderOut) MarshalUnsafe(dst []byte) { - if f.Unique.Packed() { +func (f *FUSEHeaderIn) MarshalUnsafe(dst []byte) { + if f.Opcode.Packed() && f.Unique.Packed() { safecopy.CopyIn(dst, unsafe.Pointer(f)) } else { - // Type FUSEHeaderOut doesn't have a packed layout in memory, fallback to MarshalBytes. + // Type FUSEHeaderIn doesn't have a packed layout in memory, fallback to MarshalBytes. f.MarshalBytes(dst) } } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEHeaderOut) UnmarshalUnsafe(src []byte) { - if f.Unique.Packed() { +func (f *FUSEHeaderIn) UnmarshalUnsafe(src []byte) { + if f.Opcode.Packed() && f.Unique.Packed() { safecopy.CopyOut(unsafe.Pointer(f), src) } else { - // Type FUSEHeaderOut doesn't have a packed layout in memory, fallback to UnmarshalBytes. + // Type FUSEHeaderIn doesn't have a packed layout in memory, fallback to UnmarshalBytes. f.UnmarshalBytes(src) } } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEHeaderOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { - if !f.Unique.Packed() { - // Type FUSEHeaderOut doesn't have a packed layout in memory, fall back to MarshalBytes. +func (f *FUSEHeaderIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { + if !f.Opcode.Packed() && f.Unique.Packed() { + // Type FUSEHeaderIn doesn't have a packed layout in memory, fall back to MarshalBytes. buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. f.MarshalBytes(buf) // escapes: fallback. return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. @@ -2007,15 +2023,15 @@ func (f *FUSEHeaderOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limi // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEHeaderOut) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEHeaderIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEHeaderOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - if !f.Unique.Packed() { - // Type FUSEHeaderOut doesn't have a packed layout in memory, fall back to UnmarshalBytes. +func (f *FUSEHeaderIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { + if !f.Opcode.Packed() && f.Unique.Packed() { + // Type FUSEHeaderIn doesn't have a packed layout in memory, fall back to UnmarshalBytes. buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. length, err := cc.CopyInBytes(addr, buf) // escapes: okay. // Unmarshal unconditionally. If we had a short copy-in, this results in a @@ -2039,9 +2055,9 @@ func (f *FUSEHeaderOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEHeaderOut) WriteTo(writer io.Writer) (int64, error) { - if !f.Unique.Packed() { - // Type FUSEHeaderOut doesn't have a packed layout in memory, fall back to MarshalBytes. +func (f *FUSEHeaderIn) WriteTo(writer io.Writer) (int64, error) { + if !f.Opcode.Packed() && f.Unique.Packed() { + // Type FUSEHeaderIn doesn't have a packed layout in memory, fall back to MarshalBytes. buf := make([]byte, f.SizeBytes()) f.MarshalBytes(buf) length, err := writer.Write(buf) @@ -2063,53 +2079,49 @@ func (f *FUSEHeaderOut) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEReleaseIn) SizeBytes() int { - return 24 +func (f *FUSEGetAttrIn) SizeBytes() int { + return 16 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEReleaseIn) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Fh)) - dst = dst[8:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) +func (f *FUSEGetAttrIn) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.GetAttrFlags)) dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.ReleaseFlags)) + // Padding: dst[:sizeof(uint32)] ~= uint32(0) dst = dst[4:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.LockOwner)) + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Fh)) dst = dst[8:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEReleaseIn) UnmarshalBytes(src []byte) { - f.Fh = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) +func (f *FUSEGetAttrIn) UnmarshalBytes(src []byte) { + f.GetAttrFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - f.ReleaseFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) + // Padding: var _ uint32 ~= src[:sizeof(uint32)] src = src[4:] - f.LockOwner = uint64(usermem.ByteOrder.Uint64(src[:8])) + f.Fh = uint64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEReleaseIn) Packed() bool { +func (f *FUSEGetAttrIn) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEReleaseIn) MarshalUnsafe(dst []byte) { +func (f *FUSEGetAttrIn) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEReleaseIn) UnmarshalUnsafe(src []byte) { +func (f *FUSEGetAttrIn) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEReleaseIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEGetAttrIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2126,13 +2138,13 @@ func (f *FUSEReleaseIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limi // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEReleaseIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEGetAttrIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEReleaseIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEGetAttrIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2148,7 +2160,7 @@ func (f *FUSEReleaseIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEReleaseIn) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSEGetAttrIn) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2164,16 +2176,16 @@ func (f *FUSEReleaseIn) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSECreateMeta) SizeBytes() int { +func (f *FUSEMknodMeta) SizeBytes() int { return 16 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSECreateMeta) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) - dst = dst[4:] +func (f *FUSEMknodMeta) MarshalBytes(dst []byte) { usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Mode)) dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Rdev)) + dst = dst[4:] usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Umask)) dst = dst[4:] // Padding: dst[:sizeof(uint32)] ~= uint32(0) @@ -2181,11 +2193,11 @@ func (f *FUSECreateMeta) MarshalBytes(dst []byte) { } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSECreateMeta) UnmarshalBytes(src []byte) { - f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] +func (f *FUSEMknodMeta) UnmarshalBytes(src []byte) { f.Mode = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] + f.Rdev = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] f.Umask = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] // Padding: var _ uint32 ~= src[:sizeof(uint32)] @@ -2194,23 +2206,23 @@ func (f *FUSECreateMeta) UnmarshalBytes(src []byte) { // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSECreateMeta) Packed() bool { +func (f *FUSEMknodMeta) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSECreateMeta) MarshalUnsafe(dst []byte) { +func (f *FUSEMknodMeta) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSECreateMeta) UnmarshalUnsafe(src []byte) { +func (f *FUSEMknodMeta) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSECreateMeta) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEMknodMeta) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2227,13 +2239,13 @@ func (f *FUSECreateMeta) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, lim // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSECreateMeta) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEMknodMeta) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSECreateMeta) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEMknodMeta) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2249,7 +2261,7 @@ func (f *FUSECreateMeta) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSECreateMeta) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSEMknodMeta) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2265,41 +2277,53 @@ func (f *FUSECreateMeta) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -//go:nosplit -func (f *FUSEOpID) SizeBytes() int { - return 8 +func (f *FUSEDirentMeta) SizeBytes() int { + return 24 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEOpID) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint64(dst[:8], uint64(*f)) +func (f *FUSEDirentMeta) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Ino)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Off)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.NameLen)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Type)) + dst = dst[4:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEOpID) UnmarshalBytes(src []byte) { - *f = FUSEOpID(uint64(usermem.ByteOrder.Uint64(src[:8]))) +func (f *FUSEDirentMeta) UnmarshalBytes(src []byte) { + f.Ino = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.Off = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.NameLen = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.Type = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEOpID) Packed() bool { - // Scalar newtypes are always packed. +func (f *FUSEDirentMeta) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEOpID) MarshalUnsafe(dst []byte) { +func (f *FUSEDirentMeta) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEOpID) UnmarshalUnsafe(src []byte) { +func (f *FUSEDirentMeta) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEOpID) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEDirentMeta) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2316,13 +2340,13 @@ func (f *FUSEOpID) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEOpID) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEDirentMeta) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEOpID) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEDirentMeta) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2338,7 +2362,7 @@ func (f *FUSEOpID) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEOpID) WriteTo(w io.Writer) (int64, error) { +func (f *FUSEDirentMeta) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2346,7 +2370,7 @@ func (f *FUSEOpID) WriteTo(w io.Writer) (int64, error) { hdr.Len = f.SizeBytes() hdr.Cap = f.SizeBytes() - length, err := w.Write(buf) + length, err := writer.Write(buf) // Since we bypassed the compiler's escape analysis, indicate that f // must live until the use above. runtime.KeepAlive(f) // escapes: replaced by intrinsic. @@ -2354,47 +2378,77 @@ func (f *FUSEOpID) WriteTo(w io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEHeaderIn) SizeBytes() int { - return 28 + - (*FUSEOpcode)(nil).SizeBytes() + - (*FUSEOpID)(nil).SizeBytes() +func (f *FUSEAttr) SizeBytes() int { + return 88 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEHeaderIn) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Len)) - dst = dst[4:] - f.Opcode.MarshalBytes(dst[:f.Opcode.SizeBytes()]) - dst = dst[f.Opcode.SizeBytes():] - f.Unique.MarshalBytes(dst[:f.Unique.SizeBytes()]) - dst = dst[f.Unique.SizeBytes():] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.NodeID)) +func (f *FUSEAttr) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Ino)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Size)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Blocks)) dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Atime)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Mtime)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Ctime)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.AtimeNsec)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.MtimeNsec)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.CtimeNsec)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Mode)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Nlink)) + dst = dst[4:] usermem.ByteOrder.PutUint32(dst[:4], uint32(f.UID)) dst = dst[4:] usermem.ByteOrder.PutUint32(dst[:4], uint32(f.GID)) dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.PID)) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Rdev)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.BlkSize)) dst = dst[4:] // Padding: dst[:sizeof(uint32)] ~= uint32(0) dst = dst[4:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEHeaderIn) UnmarshalBytes(src []byte) { - f.Len = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.Opcode.UnmarshalBytes(src[:f.Opcode.SizeBytes()]) - src = src[f.Opcode.SizeBytes():] - f.Unique.UnmarshalBytes(src[:f.Unique.SizeBytes()]) - src = src[f.Unique.SizeBytes():] - f.NodeID = uint64(usermem.ByteOrder.Uint64(src[:8])) +func (f *FUSEAttr) UnmarshalBytes(src []byte) { + f.Ino = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.Size = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.Blocks = uint64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] + f.Atime = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.Mtime = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.Ctime = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.AtimeNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.MtimeNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.CtimeNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.Mode = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.Nlink = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] f.UID = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] f.GID = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - f.PID = uint32(usermem.ByteOrder.Uint32(src[:4])) + f.Rdev = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.BlkSize = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] // Padding: var _ uint32 ~= src[:sizeof(uint32)] src = src[4:] @@ -2402,40 +2456,23 @@ func (f *FUSEHeaderIn) UnmarshalBytes(src []byte) { // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEHeaderIn) Packed() bool { - return f.Opcode.Packed() && f.Unique.Packed() +func (f *FUSEAttr) Packed() bool { + return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEHeaderIn) MarshalUnsafe(dst []byte) { - if f.Opcode.Packed() && f.Unique.Packed() { - safecopy.CopyIn(dst, unsafe.Pointer(f)) - } else { - // Type FUSEHeaderIn doesn't have a packed layout in memory, fallback to MarshalBytes. - f.MarshalBytes(dst) - } +func (f *FUSEAttr) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEHeaderIn) UnmarshalUnsafe(src []byte) { - if f.Opcode.Packed() && f.Unique.Packed() { - safecopy.CopyOut(unsafe.Pointer(f), src) - } else { - // Type FUSEHeaderIn doesn't have a packed layout in memory, fallback to UnmarshalBytes. - f.UnmarshalBytes(src) - } +func (f *FUSEAttr) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEHeaderIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { - if !f.Opcode.Packed() && f.Unique.Packed() { - // Type FUSEHeaderIn doesn't have a packed layout in memory, fall back to MarshalBytes. - buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. - f.MarshalBytes(buf) // escapes: fallback. - return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. - } - +func (f *FUSEAttr) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2452,23 +2489,13 @@ func (f *FUSEHeaderIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEHeaderIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEAttr) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEHeaderIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - if !f.Opcode.Packed() && f.Unique.Packed() { - // Type FUSEHeaderIn doesn't have a packed layout in memory, fall back to UnmarshalBytes. - buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. - length, err := cc.CopyInBytes(addr, buf) // escapes: okay. - // Unmarshal unconditionally. If we had a short copy-in, this results in a - // partially unmarshalled struct. - f.UnmarshalBytes(buf) // escapes: fallback. - return length, err - } - +func (f *FUSEAttr) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2484,15 +2511,7 @@ func (f *FUSEHeaderIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, e } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEHeaderIn) WriteTo(writer io.Writer) (int64, error) { - if !f.Opcode.Packed() && f.Unique.Packed() { - // Type FUSEHeaderIn doesn't have a packed layout in memory, fall back to MarshalBytes. - buf := make([]byte, f.SizeBytes()) - f.MarshalBytes(buf) - length, err := writer.Write(buf) - return int64(length), err - } - +func (f *FUSEAttr) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2508,45 +2527,65 @@ func (f *FUSEHeaderIn) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEWriteOut) SizeBytes() int { - return 8 +func (f *FUSEReadIn) SizeBytes() int { + return 40 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEWriteOut) MarshalBytes(dst []byte) { +func (f *FUSEReadIn) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Fh)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Offset)) + dst = dst[8:] usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Size)) dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.ReadFlags)) + dst = dst[4:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.LockOwner)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) + dst = dst[4:] // Padding: dst[:sizeof(uint32)] ~= uint32(0) dst = dst[4:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEWriteOut) UnmarshalBytes(src []byte) { +func (f *FUSEReadIn) UnmarshalBytes(src []byte) { + f.Fh = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.Offset = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] f.Size = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] + f.ReadFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.LockOwner = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] // Padding: var _ uint32 ~= src[:sizeof(uint32)] src = src[4:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEWriteOut) Packed() bool { +func (f *FUSEReadIn) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEWriteOut) MarshalUnsafe(dst []byte) { +func (f *FUSEReadIn) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEWriteOut) UnmarshalUnsafe(src []byte) { +func (f *FUSEReadIn) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEWriteOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEReadIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2563,13 +2602,13 @@ func (f *FUSEWriteOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEWriteOut) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEReadIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEWriteOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEReadIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2585,7 +2624,7 @@ func (f *FUSEWriteOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, e } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEWriteOut) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSEReadIn) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2601,53 +2640,82 @@ func (f *FUSEWriteOut) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEMknodMeta) SizeBytes() int { - return 16 +func (f *FUSEInitOut) SizeBytes() int { + return 32 + + 4*8 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEMknodMeta) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Mode)) +func (f *FUSEInitOut) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Major)) dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Rdev)) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Minor)) dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Umask)) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.MaxReadahead)) dst = dst[4:] - // Padding: dst[:sizeof(uint32)] ~= uint32(0) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) dst = dst[4:] + usermem.ByteOrder.PutUint16(dst[:2], uint16(f.MaxBackground)) + dst = dst[2:] + usermem.ByteOrder.PutUint16(dst[:2], uint16(f.CongestionThreshold)) + dst = dst[2:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.MaxWrite)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.TimeGran)) + dst = dst[4:] + usermem.ByteOrder.PutUint16(dst[:2], uint16(f.MaxPages)) + dst = dst[2:] + // Padding: dst[:sizeof(uint16)] ~= uint16(0) + dst = dst[2:] + // Padding: dst[:sizeof(uint32)*8] ~= [8]uint32{0} + dst = dst[4*(8):] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEMknodMeta) UnmarshalBytes(src []byte) { - f.Mode = uint32(usermem.ByteOrder.Uint32(src[:4])) +func (f *FUSEInitOut) UnmarshalBytes(src []byte) { + f.Major = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - f.Rdev = uint32(usermem.ByteOrder.Uint32(src[:4])) + f.Minor = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - f.Umask = uint32(usermem.ByteOrder.Uint32(src[:4])) + f.MaxReadahead = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - // Padding: var _ uint32 ~= src[:sizeof(uint32)] + f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.MaxBackground = uint16(usermem.ByteOrder.Uint16(src[:2])) + src = src[2:] + f.CongestionThreshold = uint16(usermem.ByteOrder.Uint16(src[:2])) + src = src[2:] + f.MaxWrite = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] + f.TimeGran = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + f.MaxPages = uint16(usermem.ByteOrder.Uint16(src[:2])) + src = src[2:] + // Padding: var _ uint16 ~= src[:sizeof(uint16)] + src = src[2:] + // Padding: ~ copy([8]uint32(f._), src[:sizeof(uint32)*8]) + src = src[4*(8):] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEMknodMeta) Packed() bool { +func (f *FUSEInitOut) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEMknodMeta) MarshalUnsafe(dst []byte) { +func (f *FUSEInitOut) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEMknodMeta) UnmarshalUnsafe(src []byte) { +func (f *FUSEInitOut) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEMknodMeta) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEInitOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2664,13 +2732,13 @@ func (f *FUSEMknodMeta) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limi // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEMknodMeta) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEInitOut) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEMknodMeta) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEInitOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2686,7 +2754,7 @@ func (f *FUSEMknodMeta) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEMknodMeta) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSEInitOut) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2702,41 +2770,45 @@ func (f *FUSEMknodMeta) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -//go:nosplit -func (f *FUSEOpcode) SizeBytes() int { - return 4 +func (f *FUSEOpenIn) SizeBytes() int { + return 8 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEOpcode) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint32(dst[:4], uint32(*f)) +func (f *FUSEOpenIn) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) + dst = dst[4:] + // Padding: dst[:sizeof(uint32)] ~= uint32(0) + dst = dst[4:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEOpcode) UnmarshalBytes(src []byte) { - *f = FUSEOpcode(uint32(usermem.ByteOrder.Uint32(src[:4]))) +func (f *FUSEOpenIn) UnmarshalBytes(src []byte) { + f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + // Padding: var _ uint32 ~= src[:sizeof(uint32)] + src = src[4:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEOpcode) Packed() bool { - // Scalar newtypes are always packed. +func (f *FUSEOpenIn) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEOpcode) MarshalUnsafe(dst []byte) { +func (f *FUSEOpenIn) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEOpcode) UnmarshalUnsafe(src []byte) { +func (f *FUSEOpenIn) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEOpcode) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEOpenIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2753,13 +2825,13 @@ func (f *FUSEOpcode) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit i // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEOpcode) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEOpenIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEOpcode) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEOpenIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2775,7 +2847,7 @@ func (f *FUSEOpcode) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, err } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEOpcode) WriteTo(w io.Writer) (int64, error) { +func (f *FUSEOpenIn) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2783,7 +2855,7 @@ func (f *FUSEOpcode) WriteTo(w io.Writer) (int64, error) { hdr.Len = f.SizeBytes() hdr.Cap = f.SizeBytes() - length, err := w.Write(buf) + length, err := writer.Write(buf) // Since we bypassed the compiler's escape analysis, indicate that f // must live until the use above. runtime.KeepAlive(f) // escapes: replaced by intrinsic. @@ -2791,49 +2863,49 @@ func (f *FUSEOpcode) WriteTo(w io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEGetAttrIn) SizeBytes() int { +func (f *FUSEOpenOut) SizeBytes() int { return 16 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEGetAttrIn) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.GetAttrFlags)) +func (f *FUSEOpenOut) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Fh)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.OpenFlag)) dst = dst[4:] // Padding: dst[:sizeof(uint32)] ~= uint32(0) dst = dst[4:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Fh)) - dst = dst[8:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEGetAttrIn) UnmarshalBytes(src []byte) { - f.GetAttrFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) +func (f *FUSEOpenOut) UnmarshalBytes(src []byte) { + f.Fh = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.OpenFlag = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] // Padding: var _ uint32 ~= src[:sizeof(uint32)] src = src[4:] - f.Fh = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEGetAttrIn) Packed() bool { +func (f *FUSEOpenOut) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEGetAttrIn) MarshalUnsafe(dst []byte) { +func (f *FUSEOpenOut) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEGetAttrIn) UnmarshalUnsafe(src []byte) { +func (f *FUSEOpenOut) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEGetAttrIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEOpenOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2850,13 +2922,13 @@ func (f *FUSEGetAttrIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limi // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEGetAttrIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEOpenOut) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEGetAttrIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEOpenOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2872,7 +2944,7 @@ func (f *FUSEGetAttrIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEGetAttrIn) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSEOpenOut) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2888,77 +2960,41 @@ func (f *FUSEGetAttrIn) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEAttr) SizeBytes() int { - return 88 +func (f *FUSEWriteIn) SizeBytes() int { + return 40 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEAttr) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Ino)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Size)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Blocks)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Atime)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Mtime)) +func (f *FUSEWriteIn) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Fh)) dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Ctime)) + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Offset)) dst = dst[8:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.AtimeNsec)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.MtimeNsec)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.CtimeNsec)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Mode)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Nlink)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.UID)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.GID)) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Size)) dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Rdev)) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.WriteFlags)) dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.BlkSize)) + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.LockOwner)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) dst = dst[4:] // Padding: dst[:sizeof(uint32)] ~= uint32(0) dst = dst[4:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEAttr) UnmarshalBytes(src []byte) { - f.Ino = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.Size = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.Blocks = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.Atime = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.Mtime = uint64(usermem.ByteOrder.Uint64(src[:8])) +func (f *FUSEWriteIn) UnmarshalBytes(src []byte) { + f.Fh = uint64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] - f.Ctime = uint64(usermem.ByteOrder.Uint64(src[:8])) + f.Offset = uint64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] - f.AtimeNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.MtimeNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.CtimeNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.Mode = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.Nlink = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.UID = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.GID = uint32(usermem.ByteOrder.Uint32(src[:4])) + f.Size = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - f.Rdev = uint32(usermem.ByteOrder.Uint32(src[:4])) + f.WriteFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - f.BlkSize = uint32(usermem.ByteOrder.Uint32(src[:4])) + f.LockOwner = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] // Padding: var _ uint32 ~= src[:sizeof(uint32)] src = src[4:] @@ -2966,23 +3002,23 @@ func (f *FUSEAttr) UnmarshalBytes(src []byte) { // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEAttr) Packed() bool { +func (f *FUSEWriteIn) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEAttr) MarshalUnsafe(dst []byte) { +func (f *FUSEWriteIn) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEAttr) UnmarshalUnsafe(src []byte) { +func (f *FUSEWriteIn) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEAttr) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEWriteIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -2999,13 +3035,13 @@ func (f *FUSEAttr) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEAttr) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEWriteIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEAttr) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEWriteIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3021,7 +3057,7 @@ func (f *FUSEAttr) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEAttr) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSEWriteIn) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3037,83 +3073,41 @@ func (f *FUSEAttr) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEEntryOut) SizeBytes() int { - return 40 + - (*FUSEAttr)(nil).SizeBytes() +//go:nosplit +func (f *FUSEOpcode) SizeBytes() int { + return 4 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEEntryOut) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.NodeID)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Generation)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.EntryValid)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.AttrValid)) - dst = dst[8:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.EntryValidNSec)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.AttrValidNSec)) - dst = dst[4:] - f.Attr.MarshalBytes(dst[:f.Attr.SizeBytes()]) - dst = dst[f.Attr.SizeBytes():] +func (f *FUSEOpcode) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(*f)) } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEEntryOut) UnmarshalBytes(src []byte) { - f.NodeID = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.Generation = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.EntryValid = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.AttrValid = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.EntryValidNSec = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.AttrValidNSec = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.Attr.UnmarshalBytes(src[:f.Attr.SizeBytes()]) - src = src[f.Attr.SizeBytes():] +func (f *FUSEOpcode) UnmarshalBytes(src []byte) { + *f = FUSEOpcode(uint32(usermem.ByteOrder.Uint32(src[:4]))) } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEEntryOut) Packed() bool { - return f.Attr.Packed() +func (f *FUSEOpcode) Packed() bool { + // Scalar newtypes are always packed. + return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEEntryOut) MarshalUnsafe(dst []byte) { - if f.Attr.Packed() { - safecopy.CopyIn(dst, unsafe.Pointer(f)) - } else { - // Type FUSEEntryOut doesn't have a packed layout in memory, fallback to MarshalBytes. - f.MarshalBytes(dst) - } +func (f *FUSEOpcode) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEEntryOut) UnmarshalUnsafe(src []byte) { - if f.Attr.Packed() { - safecopy.CopyOut(unsafe.Pointer(f), src) - } else { - // Type FUSEEntryOut doesn't have a packed layout in memory, fallback to UnmarshalBytes. - f.UnmarshalBytes(src) - } +func (f *FUSEOpcode) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEEntryOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { - if !f.Attr.Packed() { - // Type FUSEEntryOut doesn't have a packed layout in memory, fall back to MarshalBytes. - buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. - f.MarshalBytes(buf) // escapes: fallback. - return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. - } - +func (f *FUSEOpcode) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3130,23 +3124,13 @@ func (f *FUSEEntryOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEEntryOut) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEOpcode) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEEntryOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - if !f.Attr.Packed() { - // Type FUSEEntryOut doesn't have a packed layout in memory, fall back to UnmarshalBytes. - buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. - length, err := cc.CopyInBytes(addr, buf) // escapes: okay. - // Unmarshal unconditionally. If we had a short copy-in, this results in a - // partially unmarshalled struct. - f.UnmarshalBytes(buf) // escapes: fallback. - return length, err - } - +func (f *FUSEOpcode) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3162,15 +3146,7 @@ func (f *FUSEEntryOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, e } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEEntryOut) WriteTo(writer io.Writer) (int64, error) { - if !f.Attr.Packed() { - // Type FUSEEntryOut doesn't have a packed layout in memory, fall back to MarshalBytes. - buf := make([]byte, f.SizeBytes()) - f.MarshalBytes(buf) - length, err := writer.Write(buf) - return int64(length), err - } - +func (f *FUSEOpcode) WriteTo(w io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3178,7 +3154,7 @@ func (f *FUSEEntryOut) WriteTo(writer io.Writer) (int64, error) { hdr.Len = f.SizeBytes() hdr.Cap = f.SizeBytes() - length, err := writer.Write(buf) + length, err := w.Write(buf) // Since we bypassed the compiler's escape analysis, indicate that f // must live until the use above. runtime.KeepAlive(f) // escapes: replaced by intrinsic. @@ -3186,45 +3162,41 @@ func (f *FUSEEntryOut) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEOpenIn) SizeBytes() int { +//go:nosplit +func (f *FUSEOpID) SizeBytes() int { return 8 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEOpenIn) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) - dst = dst[4:] - // Padding: dst[:sizeof(uint32)] ~= uint32(0) - dst = dst[4:] +func (f *FUSEOpID) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(*f)) } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEOpenIn) UnmarshalBytes(src []byte) { - f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - // Padding: var _ uint32 ~= src[:sizeof(uint32)] - src = src[4:] +func (f *FUSEOpID) UnmarshalBytes(src []byte) { + *f = FUSEOpID(uint64(usermem.ByteOrder.Uint64(src[:8]))) } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEOpenIn) Packed() bool { +func (f *FUSEOpID) Packed() bool { + // Scalar newtypes are always packed. return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEOpenIn) MarshalUnsafe(dst []byte) { +func (f *FUSEOpID) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEOpenIn) UnmarshalUnsafe(src []byte) { +func (f *FUSEOpID) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEOpenIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEOpID) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3241,13 +3213,13 @@ func (f *FUSEOpenIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit i // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEOpenIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEOpID) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEOpenIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEOpID) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3263,7 +3235,7 @@ func (f *FUSEOpenIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, err } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEOpenIn) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSEOpID) WriteTo(w io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3271,7 +3243,7 @@ func (f *FUSEOpenIn) WriteTo(writer io.Writer) (int64, error) { hdr.Len = f.SizeBytes() hdr.Cap = f.SizeBytes() - length, err := writer.Write(buf) + length, err := w.Write(buf) // Since we bypassed the compiler's escape analysis, indicate that f // must live until the use above. runtime.KeepAlive(f) // escapes: replaced by intrinsic. @@ -3279,65 +3251,83 @@ func (f *FUSEOpenIn) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEReadIn) SizeBytes() int { - return 40 +func (f *FUSEEntryOut) SizeBytes() int { + return 40 + + (*FUSEAttr)(nil).SizeBytes() } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEReadIn) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Fh)) +func (f *FUSEEntryOut) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.NodeID)) dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Offset)) + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Generation)) dst = dst[8:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Size)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.ReadFlags)) - dst = dst[4:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.LockOwner)) + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.EntryValid)) dst = dst[8:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) + usermem.ByteOrder.PutUint64(dst[:8], uint64(f.AttrValid)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.EntryValidNSec)) dst = dst[4:] - // Padding: dst[:sizeof(uint32)] ~= uint32(0) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.AttrValidNSec)) dst = dst[4:] + f.Attr.MarshalBytes(dst[:f.Attr.SizeBytes()]) + dst = dst[f.Attr.SizeBytes():] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEReadIn) UnmarshalBytes(src []byte) { - f.Fh = uint64(usermem.ByteOrder.Uint64(src[:8])) +func (f *FUSEEntryOut) UnmarshalBytes(src []byte) { + f.NodeID = uint64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] - f.Offset = uint64(usermem.ByteOrder.Uint64(src[:8])) + f.Generation = uint64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] - f.Size = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.ReadFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.LockOwner = uint64(usermem.ByteOrder.Uint64(src[:8])) + f.EntryValid = uint64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] - f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) + f.AttrValid = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + f.EntryValidNSec = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - // Padding: var _ uint32 ~= src[:sizeof(uint32)] + f.AttrValidNSec = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] + f.Attr.UnmarshalBytes(src[:f.Attr.SizeBytes()]) + src = src[f.Attr.SizeBytes():] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEReadIn) Packed() bool { - return true +func (f *FUSEEntryOut) Packed() bool { + return f.Attr.Packed() } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEReadIn) MarshalUnsafe(dst []byte) { - safecopy.CopyIn(dst, unsafe.Pointer(f)) +func (f *FUSEEntryOut) MarshalUnsafe(dst []byte) { + if f.Attr.Packed() { + safecopy.CopyIn(dst, unsafe.Pointer(f)) + } else { + // Type FUSEEntryOut doesn't have a packed layout in memory, fallback to MarshalBytes. + f.MarshalBytes(dst) + } } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEReadIn) UnmarshalUnsafe(src []byte) { - safecopy.CopyOut(unsafe.Pointer(f), src) +func (f *FUSEEntryOut) UnmarshalUnsafe(src []byte) { + if f.Attr.Packed() { + safecopy.CopyOut(unsafe.Pointer(f), src) + } else { + // Type FUSEEntryOut doesn't have a packed layout in memory, fallback to UnmarshalBytes. + f.UnmarshalBytes(src) + } } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEReadIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEEntryOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { + if !f.Attr.Packed() { + // Type FUSEEntryOut doesn't have a packed layout in memory, fall back to MarshalBytes. + buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. + f.MarshalBytes(buf) // escapes: fallback. + return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. + } + // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3354,13 +3344,23 @@ func (f *FUSEReadIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit i // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEReadIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEEntryOut) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEReadIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEEntryOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { + if !f.Attr.Packed() { + // Type FUSEEntryOut doesn't have a packed layout in memory, fall back to UnmarshalBytes. + buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. + length, err := cc.CopyInBytes(addr, buf) // escapes: okay. + // Unmarshal unconditionally. If we had a short copy-in, this results in a + // partially unmarshalled struct. + f.UnmarshalBytes(buf) // escapes: fallback. + return length, err + } + // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3376,7 +3376,15 @@ func (f *FUSEReadIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, err } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEReadIn) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSEEntryOut) WriteTo(writer io.Writer) (int64, error) { + if !f.Attr.Packed() { + // Type FUSEEntryOut doesn't have a packed layout in memory, fall back to MarshalBytes. + buf := make([]byte, f.SizeBytes()) + f.MarshalBytes(buf) + length, err := writer.Write(buf) + return int64(length), err + } + // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3392,45 +3400,53 @@ func (f *FUSEReadIn) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEMkdirMeta) SizeBytes() int { - return 8 +func (f *FUSECreateMeta) SizeBytes() int { + return 16 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEMkdirMeta) MarshalBytes(dst []byte) { +func (f *FUSECreateMeta) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) + dst = dst[4:] usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Mode)) dst = dst[4:] usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Umask)) dst = dst[4:] + // Padding: dst[:sizeof(uint32)] ~= uint32(0) + dst = dst[4:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEMkdirMeta) UnmarshalBytes(src []byte) { +func (f *FUSECreateMeta) UnmarshalBytes(src []byte) { + f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] f.Mode = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] f.Umask = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] + // Padding: var _ uint32 ~= src[:sizeof(uint32)] + src = src[4:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEMkdirMeta) Packed() bool { +func (f *FUSECreateMeta) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEMkdirMeta) MarshalUnsafe(dst []byte) { +func (f *FUSECreateMeta) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEMkdirMeta) UnmarshalUnsafe(src []byte) { +func (f *FUSECreateMeta) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEMkdirMeta) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSECreateMeta) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3447,13 +3463,13 @@ func (f *FUSEMkdirMeta) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limi // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEMkdirMeta) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSECreateMeta) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEMkdirMeta) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSECreateMeta) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3469,7 +3485,7 @@ func (f *FUSEMkdirMeta) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEMkdirMeta) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSECreateMeta) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3485,53 +3501,67 @@ func (f *FUSEMkdirMeta) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSEDirentMeta) SizeBytes() int { - return 24 +func (f *FUSEHeaderOut) SizeBytes() int { + return 8 + + (*FUSEOpID)(nil).SizeBytes() } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSEDirentMeta) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Ino)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Off)) - dst = dst[8:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.NameLen)) +func (f *FUSEHeaderOut) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Len)) dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Type)) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Error)) dst = dst[4:] + f.Unique.MarshalBytes(dst[:f.Unique.SizeBytes()]) + dst = dst[f.Unique.SizeBytes():] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSEDirentMeta) UnmarshalBytes(src []byte) { - f.Ino = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.Off = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.NameLen = uint32(usermem.ByteOrder.Uint32(src[:4])) +func (f *FUSEHeaderOut) UnmarshalBytes(src []byte) { + f.Len = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - f.Type = uint32(usermem.ByteOrder.Uint32(src[:4])) + f.Error = int32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] + f.Unique.UnmarshalBytes(src[:f.Unique.SizeBytes()]) + src = src[f.Unique.SizeBytes():] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSEDirentMeta) Packed() bool { - return true +func (f *FUSEHeaderOut) Packed() bool { + return f.Unique.Packed() } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSEDirentMeta) MarshalUnsafe(dst []byte) { - safecopy.CopyIn(dst, unsafe.Pointer(f)) +func (f *FUSEHeaderOut) MarshalUnsafe(dst []byte) { + if f.Unique.Packed() { + safecopy.CopyIn(dst, unsafe.Pointer(f)) + } else { + // Type FUSEHeaderOut doesn't have a packed layout in memory, fallback to MarshalBytes. + f.MarshalBytes(dst) + } } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSEDirentMeta) UnmarshalUnsafe(src []byte) { - safecopy.CopyOut(unsafe.Pointer(f), src) +func (f *FUSEHeaderOut) UnmarshalUnsafe(src []byte) { + if f.Unique.Packed() { + safecopy.CopyOut(unsafe.Pointer(f), src) + } else { + // Type FUSEHeaderOut doesn't have a packed layout in memory, fallback to UnmarshalBytes. + f.UnmarshalBytes(src) + } } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSEDirentMeta) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEHeaderOut) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { + if !f.Unique.Packed() { + // Type FUSEHeaderOut doesn't have a packed layout in memory, fall back to MarshalBytes. + buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. + f.MarshalBytes(buf) // escapes: fallback. + return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. + } + // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3548,13 +3578,23 @@ func (f *FUSEDirentMeta) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, lim // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSEDirentMeta) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEHeaderOut) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSEDirentMeta) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEHeaderOut) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { + if !f.Unique.Packed() { + // Type FUSEHeaderOut doesn't have a packed layout in memory, fall back to UnmarshalBytes. + buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. + length, err := cc.CopyInBytes(addr, buf) // escapes: okay. + // Unmarshal unconditionally. If we had a short copy-in, this results in a + // partially unmarshalled struct. + f.UnmarshalBytes(buf) // escapes: fallback. + return length, err + } + // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3570,7 +3610,15 @@ func (f *FUSEDirentMeta) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSEDirentMeta) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSEHeaderOut) WriteTo(writer io.Writer) (int64, error) { + if !f.Unique.Packed() { + // Type FUSEHeaderOut doesn't have a packed layout in memory, fall back to MarshalBytes. + buf := make([]byte, f.SizeBytes()) + f.MarshalBytes(buf) + length, err := writer.Write(buf) + return int64(length), err + } + // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3586,101 +3634,53 @@ func (f *FUSEDirentMeta) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (f *FUSESetAttrIn) SizeBytes() int { - return 88 +func (f *FUSEInitIn) SizeBytes() int { + return 16 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (f *FUSESetAttrIn) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Valid)) - dst = dst[4:] - // Padding: dst[:sizeof(uint32)] ~= uint32(0) - dst = dst[4:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Fh)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Size)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.LockOwner)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Atime)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Mtime)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Ctime)) - dst = dst[8:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.AtimeNsec)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.MtimeNsec)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.CtimeNsec)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Mode)) - dst = dst[4:] - // Padding: dst[:sizeof(uint32)] ~= uint32(0) +func (f *FUSEInitIn) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Major)) dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.UID)) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Minor)) dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(f.GID)) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.MaxReadahead)) dst = dst[4:] - // Padding: dst[:sizeof(uint32)] ~= uint32(0) + usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Flags)) dst = dst[4:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (f *FUSESetAttrIn) UnmarshalBytes(src []byte) { - f.Valid = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - // Padding: var _ uint32 ~= src[:sizeof(uint32)] - src = src[4:] - f.Fh = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.Size = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.LockOwner = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.Atime = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.Mtime = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.Ctime = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - f.AtimeNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.MtimeNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.CtimeNsec = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - f.Mode = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - // Padding: var _ uint32 ~= src[:sizeof(uint32)] +func (f *FUSEInitIn) UnmarshalBytes(src []byte) { + f.Major = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - f.UID = uint32(usermem.ByteOrder.Uint32(src[:4])) + f.Minor = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - f.GID = uint32(usermem.ByteOrder.Uint32(src[:4])) + f.MaxReadahead = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - // Padding: var _ uint32 ~= src[:sizeof(uint32)] + f.Flags = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (f *FUSESetAttrIn) Packed() bool { +func (f *FUSEInitIn) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (f *FUSESetAttrIn) MarshalUnsafe(dst []byte) { +func (f *FUSEInitIn) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(f)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (f *FUSESetAttrIn) UnmarshalUnsafe(src []byte) { +func (f *FUSEInitIn) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(f), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (f *FUSESetAttrIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (f *FUSEInitIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3697,13 +3697,13 @@ func (f *FUSESetAttrIn) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limi // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (f *FUSESetAttrIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEInitIn) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return f.CopyOutN(cc, addr, f.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (f *FUSESetAttrIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (f *FUSEInitIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -3719,7 +3719,7 @@ func (f *FUSESetAttrIn) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, } // WriteTo implements io.WriterTo.WriteTo. -func (f *FUSESetAttrIn) WriteTo(writer io.Writer) (int64, error) { +func (f *FUSEInitIn) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -7730,70 +7730,62 @@ func (s *SignalfdSiginfo) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (s *SockAddrLink) SizeBytes() int { +func (s *SockAddrInet6) SizeBytes() int { return 12 + - 1*8 + 1*16 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (s *SockAddrLink) MarshalBytes(dst []byte) { +func (s *SockAddrInet6) MarshalBytes(dst []byte) { usermem.ByteOrder.PutUint16(dst[:2], uint16(s.Family)) dst = dst[2:] - usermem.ByteOrder.PutUint16(dst[:2], uint16(s.Protocol)) + usermem.ByteOrder.PutUint16(dst[:2], uint16(s.Port)) dst = dst[2:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(s.InterfaceIndex)) + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Flowinfo)) dst = dst[4:] - usermem.ByteOrder.PutUint16(dst[:2], uint16(s.ARPHardwareType)) - dst = dst[2:] - dst[0] = byte(s.PacketType) - dst = dst[1:] - dst[0] = byte(s.HardwareAddrLen) - dst = dst[1:] - for idx := 0; idx < 8; idx++ { - dst[0] = byte(s.HardwareAddr[idx]) + for idx := 0; idx < 16; idx++ { + dst[0] = byte(s.Addr[idx]) dst = dst[1:] } + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Scope_id)) + dst = dst[4:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (s *SockAddrLink) UnmarshalBytes(src []byte) { +func (s *SockAddrInet6) UnmarshalBytes(src []byte) { s.Family = uint16(usermem.ByteOrder.Uint16(src[:2])) src = src[2:] - s.Protocol = uint16(usermem.ByteOrder.Uint16(src[:2])) + s.Port = uint16(usermem.ByteOrder.Uint16(src[:2])) src = src[2:] - s.InterfaceIndex = int32(usermem.ByteOrder.Uint32(src[:4])) + s.Flowinfo = uint32(usermem.ByteOrder.Uint32(src[:4])) src = src[4:] - s.ARPHardwareType = uint16(usermem.ByteOrder.Uint16(src[:2])) - src = src[2:] - s.PacketType = src[0] - src = src[1:] - s.HardwareAddrLen = src[0] - src = src[1:] - for idx := 0; idx < 8; idx++ { - s.HardwareAddr[idx] = src[0] + for idx := 0; idx < 16; idx++ { + s.Addr[idx] = src[0] src = src[1:] } + s.Scope_id = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (s *SockAddrLink) Packed() bool { +func (s *SockAddrInet6) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (s *SockAddrLink) MarshalUnsafe(dst []byte) { +func (s *SockAddrInet6) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(s)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (s *SockAddrLink) UnmarshalUnsafe(src []byte) { +func (s *SockAddrInet6) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(s), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (s *SockAddrLink) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (s *SockAddrInet6) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -7810,13 +7802,13 @@ func (s *SockAddrLink) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (s *SockAddrLink) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (s *SockAddrInet6) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return s.CopyOutN(cc, addr, s.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (s *SockAddrLink) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (s *SockAddrInet6) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -7832,7 +7824,7 @@ func (s *SockAddrLink) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, e } // WriteTo implements io.WriterTo.WriteTo. -func (s *SockAddrLink) WriteTo(writer io.Writer) (int64, error) { +func (s *SockAddrInet6) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -7848,50 +7840,70 @@ func (s *SockAddrLink) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (s *SockAddrUnix) SizeBytes() int { - return 2 + - 1*UnixPathMax +func (s *SockAddrLink) SizeBytes() int { + return 12 + + 1*8 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (s *SockAddrUnix) MarshalBytes(dst []byte) { +func (s *SockAddrLink) MarshalBytes(dst []byte) { usermem.ByteOrder.PutUint16(dst[:2], uint16(s.Family)) dst = dst[2:] - for idx := 0; idx < UnixPathMax; idx++ { - dst[0] = byte(s.Path[idx]) + usermem.ByteOrder.PutUint16(dst[:2], uint16(s.Protocol)) + dst = dst[2:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(s.InterfaceIndex)) + dst = dst[4:] + usermem.ByteOrder.PutUint16(dst[:2], uint16(s.ARPHardwareType)) + dst = dst[2:] + dst[0] = byte(s.PacketType) + dst = dst[1:] + dst[0] = byte(s.HardwareAddrLen) + dst = dst[1:] + for idx := 0; idx < 8; idx++ { + dst[0] = byte(s.HardwareAddr[idx]) dst = dst[1:] } } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (s *SockAddrUnix) UnmarshalBytes(src []byte) { +func (s *SockAddrLink) UnmarshalBytes(src []byte) { s.Family = uint16(usermem.ByteOrder.Uint16(src[:2])) src = src[2:] - for idx := 0; idx < UnixPathMax; idx++ { - s.Path[idx] = int8(src[0]) + s.Protocol = uint16(usermem.ByteOrder.Uint16(src[:2])) + src = src[2:] + s.InterfaceIndex = int32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + s.ARPHardwareType = uint16(usermem.ByteOrder.Uint16(src[:2])) + src = src[2:] + s.PacketType = src[0] + src = src[1:] + s.HardwareAddrLen = src[0] + src = src[1:] + for idx := 0; idx < 8; idx++ { + s.HardwareAddr[idx] = src[0] src = src[1:] } } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (s *SockAddrUnix) Packed() bool { +func (s *SockAddrLink) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (s *SockAddrUnix) MarshalUnsafe(dst []byte) { +func (s *SockAddrLink) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(s)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (s *SockAddrUnix) UnmarshalUnsafe(src []byte) { +func (s *SockAddrLink) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(s), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (s *SockAddrUnix) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (s *SockAddrLink) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -7908,13 +7920,13 @@ func (s *SockAddrUnix) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (s *SockAddrUnix) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (s *SockAddrLink) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return s.CopyOutN(cc, addr, s.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (s *SockAddrUnix) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (s *SockAddrLink) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -7930,7 +7942,7 @@ func (s *SockAddrUnix) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, e } // WriteTo implements io.WriterTo.WriteTo. -func (s *SockAddrUnix) WriteTo(writer io.Writer) (int64, error) { +func (s *SockAddrLink) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -8039,275 +8051,6 @@ func (l *Linger) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (t *TCPInfo) SizeBytes() int { - return 192 -} - -// MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (t *TCPInfo) MarshalBytes(dst []byte) { - dst[0] = byte(t.State) - dst = dst[1:] - dst[0] = byte(t.CaState) - dst = dst[1:] - dst[0] = byte(t.Retransmits) - dst = dst[1:] - dst[0] = byte(t.Probes) - dst = dst[1:] - dst[0] = byte(t.Backoff) - dst = dst[1:] - dst[0] = byte(t.Options) - dst = dst[1:] - dst[0] = byte(t.WindowScale) - dst = dst[1:] - dst[0] = byte(t.DeliveryRateAppLimited) - dst = dst[1:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RTO)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.ATO)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.SndMss)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RcvMss)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Unacked)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Sacked)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Lost)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Retrans)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Fackets)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.LastDataSent)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.LastAckSent)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.LastDataRecv)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.LastAckRecv)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.PMTU)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RcvSsthresh)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RTT)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RTTVar)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.SndSsthresh)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.SndCwnd)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Advmss)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Reordering)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RcvRTT)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RcvSpace)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.TotalRetrans)) - dst = dst[4:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(t.PacingRate)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(t.MaxPacingRate)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(t.BytesAcked)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(t.BytesReceived)) - dst = dst[8:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.SegsOut)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.SegsIn)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.NotSentBytes)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.MinRTT)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.DataSegsIn)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.DataSegsOut)) - dst = dst[4:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(t.DeliveryRate)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(t.BusyTime)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(t.RwndLimited)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(t.SndBufLimited)) - dst = dst[8:] -} - -// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (t *TCPInfo) UnmarshalBytes(src []byte) { - t.State = uint8(src[0]) - src = src[1:] - t.CaState = uint8(src[0]) - src = src[1:] - t.Retransmits = uint8(src[0]) - src = src[1:] - t.Probes = uint8(src[0]) - src = src[1:] - t.Backoff = uint8(src[0]) - src = src[1:] - t.Options = uint8(src[0]) - src = src[1:] - t.WindowScale = uint8(src[0]) - src = src[1:] - t.DeliveryRateAppLimited = uint8(src[0]) - src = src[1:] - t.RTO = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.ATO = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.SndMss = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.RcvMss = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.Unacked = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.Sacked = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.Lost = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.Retrans = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.Fackets = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.LastDataSent = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.LastAckSent = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.LastDataRecv = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.LastAckRecv = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.PMTU = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.RcvSsthresh = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.RTT = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.RTTVar = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.SndSsthresh = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.SndCwnd = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.Advmss = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.Reordering = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.RcvRTT = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.RcvSpace = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.TotalRetrans = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.PacingRate = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - t.MaxPacingRate = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - t.BytesAcked = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - t.BytesReceived = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - t.SegsOut = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.SegsIn = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.NotSentBytes = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.MinRTT = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.DataSegsIn = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.DataSegsOut = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.DeliveryRate = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - t.BusyTime = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - t.RwndLimited = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - t.SndBufLimited = uint64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] -} - -// Packed implements marshal.Marshallable.Packed. -//go:nosplit -func (t *TCPInfo) Packed() bool { - return true -} - -// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (t *TCPInfo) MarshalUnsafe(dst []byte) { - safecopy.CopyIn(dst, unsafe.Pointer(t)) -} - -// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (t *TCPInfo) UnmarshalUnsafe(src []byte) { - safecopy.CopyOut(unsafe.Pointer(t), src) -} - -// CopyOutN implements marshal.Marshallable.CopyOutN. -//go:nosplit -func (t *TCPInfo) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { - // Construct a slice backed by dst's underlying memory. - var buf []byte - hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(t))) - hdr.Len = t.SizeBytes() - hdr.Cap = t.SizeBytes() - - length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. - // Since we bypassed the compiler's escape analysis, indicate that t - // must live until the use above. - runtime.KeepAlive(t) // escapes: replaced by intrinsic. - return length, err -} - -// CopyOut implements marshal.Marshallable.CopyOut. -//go:nosplit -func (t *TCPInfo) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - return t.CopyOutN(cc, addr, t.SizeBytes()) -} - -// CopyIn implements marshal.Marshallable.CopyIn. -//go:nosplit -func (t *TCPInfo) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - // Construct a slice backed by dst's underlying memory. - var buf []byte - hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(t))) - hdr.Len = t.SizeBytes() - hdr.Cap = t.SizeBytes() - - length, err := cc.CopyInBytes(addr, buf) // escapes: okay. - // Since we bypassed the compiler's escape analysis, indicate that t - // must live until the use above. - runtime.KeepAlive(t) // escapes: replaced by intrinsic. - return length, err -} - -// WriteTo implements io.WriterTo.WriteTo. -func (t *TCPInfo) WriteTo(writer io.Writer) (int64, error) { - // Construct a slice backed by dst's underlying memory. - var buf []byte - hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(t))) - hdr.Len = t.SizeBytes() - hdr.Cap = t.SizeBytes() - - length, err := writer.Write(buf) - // Since we bypassed the compiler's escape analysis, indicate that t - // must live until the use above. - runtime.KeepAlive(t) // escapes: replaced by intrinsic. - return int64(length), err -} - -// SizeBytes implements marshal.Marshallable.SizeBytes. func (c *ControlMessageCredentials) SizeBytes() int { return 12 } @@ -8733,62 +8476,50 @@ func (i *Inet6Addr) WriteTo(w io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (s *SockAddrInet6) SizeBytes() int { - return 12 + - 1*16 +func (s *SockAddrUnix) SizeBytes() int { + return 2 + + 1*UnixPathMax } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (s *SockAddrInet6) MarshalBytes(dst []byte) { +func (s *SockAddrUnix) MarshalBytes(dst []byte) { usermem.ByteOrder.PutUint16(dst[:2], uint16(s.Family)) dst = dst[2:] - usermem.ByteOrder.PutUint16(dst[:2], uint16(s.Port)) - dst = dst[2:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Flowinfo)) - dst = dst[4:] - for idx := 0; idx < 16; idx++ { - dst[0] = byte(s.Addr[idx]) + for idx := 0; idx < UnixPathMax; idx++ { + dst[0] = byte(s.Path[idx]) dst = dst[1:] } - usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Scope_id)) - dst = dst[4:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (s *SockAddrInet6) UnmarshalBytes(src []byte) { +func (s *SockAddrUnix) UnmarshalBytes(src []byte) { s.Family = uint16(usermem.ByteOrder.Uint16(src[:2])) src = src[2:] - s.Port = uint16(usermem.ByteOrder.Uint16(src[:2])) - src = src[2:] - s.Flowinfo = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - for idx := 0; idx < 16; idx++ { - s.Addr[idx] = src[0] + for idx := 0; idx < UnixPathMax; idx++ { + s.Path[idx] = int8(src[0]) src = src[1:] } - s.Scope_id = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (s *SockAddrInet6) Packed() bool { +func (s *SockAddrUnix) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (s *SockAddrInet6) MarshalUnsafe(dst []byte) { +func (s *SockAddrUnix) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(s)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (s *SockAddrInet6) UnmarshalUnsafe(src []byte) { +func (s *SockAddrUnix) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(s), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (s *SockAddrInet6) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (s *SockAddrUnix) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -8805,13 +8536,13 @@ func (s *SockAddrInet6) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limi // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (s *SockAddrInet6) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (s *SockAddrUnix) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return s.CopyOutN(cc, addr, s.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (s *SockAddrInet6) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (s *SockAddrUnix) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -8827,7 +8558,7 @@ func (s *SockAddrInet6) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, } // WriteTo implements io.WriterTo.WriteTo. -func (s *SockAddrInet6) WriteTo(writer io.Writer) (int64, error) { +func (s *SockAddrUnix) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -8843,41 +8574,221 @@ func (s *SockAddrInet6) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -//go:nosplit -func (t *TimeT) SizeBytes() int { - return 8 +func (t *TCPInfo) SizeBytes() int { + return 192 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (t *TimeT) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint64(dst[:8], uint64(*t)) +func (t *TCPInfo) MarshalBytes(dst []byte) { + dst[0] = byte(t.State) + dst = dst[1:] + dst[0] = byte(t.CaState) + dst = dst[1:] + dst[0] = byte(t.Retransmits) + dst = dst[1:] + dst[0] = byte(t.Probes) + dst = dst[1:] + dst[0] = byte(t.Backoff) + dst = dst[1:] + dst[0] = byte(t.Options) + dst = dst[1:] + dst[0] = byte(t.WindowScale) + dst = dst[1:] + dst[0] = byte(t.DeliveryRateAppLimited) + dst = dst[1:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RTO)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.ATO)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.SndMss)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RcvMss)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Unacked)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Sacked)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Lost)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Retrans)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Fackets)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.LastDataSent)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.LastAckSent)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.LastDataRecv)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.LastAckRecv)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.PMTU)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RcvSsthresh)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RTT)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RTTVar)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.SndSsthresh)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.SndCwnd)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Advmss)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.Reordering)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RcvRTT)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.RcvSpace)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.TotalRetrans)) + dst = dst[4:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(t.PacingRate)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(t.MaxPacingRate)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(t.BytesAcked)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(t.BytesReceived)) + dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.SegsOut)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.SegsIn)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.NotSentBytes)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.MinRTT)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.DataSegsIn)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.DataSegsOut)) + dst = dst[4:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(t.DeliveryRate)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(t.BusyTime)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(t.RwndLimited)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(t.SndBufLimited)) + dst = dst[8:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (t *TimeT) UnmarshalBytes(src []byte) { - *t = TimeT(int64(usermem.ByteOrder.Uint64(src[:8]))) +func (t *TCPInfo) UnmarshalBytes(src []byte) { + t.State = uint8(src[0]) + src = src[1:] + t.CaState = uint8(src[0]) + src = src[1:] + t.Retransmits = uint8(src[0]) + src = src[1:] + t.Probes = uint8(src[0]) + src = src[1:] + t.Backoff = uint8(src[0]) + src = src[1:] + t.Options = uint8(src[0]) + src = src[1:] + t.WindowScale = uint8(src[0]) + src = src[1:] + t.DeliveryRateAppLimited = uint8(src[0]) + src = src[1:] + t.RTO = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.ATO = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.SndMss = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.RcvMss = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.Unacked = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.Sacked = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.Lost = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.Retrans = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.Fackets = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.LastDataSent = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.LastAckSent = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.LastDataRecv = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.LastAckRecv = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.PMTU = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.RcvSsthresh = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.RTT = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.RTTVar = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.SndSsthresh = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.SndCwnd = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.Advmss = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.Reordering = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.RcvRTT = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.RcvSpace = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.TotalRetrans = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.PacingRate = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + t.MaxPacingRate = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + t.BytesAcked = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + t.BytesReceived = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + t.SegsOut = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.SegsIn = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.NotSentBytes = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.MinRTT = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.DataSegsIn = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.DataSegsOut = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.DeliveryRate = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + t.BusyTime = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + t.RwndLimited = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + t.SndBufLimited = uint64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (t *TimeT) Packed() bool { - // Scalar newtypes are always packed. +func (t *TCPInfo) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (t *TimeT) MarshalUnsafe(dst []byte) { +func (t *TCPInfo) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(t)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (t *TimeT) UnmarshalUnsafe(src []byte) { +func (t *TCPInfo) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(t), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (t *TimeT) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (t *TCPInfo) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -8894,13 +8805,13 @@ func (t *TimeT) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) ( // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (t *TimeT) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (t *TCPInfo) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return t.CopyOutN(cc, addr, t.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (t *TimeT) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (t *TCPInfo) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -8916,7 +8827,7 @@ func (t *TimeT) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { } // WriteTo implements io.WriterTo.WriteTo. -func (t *TimeT) WriteTo(w io.Writer) (int64, error) { +func (t *TCPInfo) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -8924,7 +8835,7 @@ func (t *TimeT) WriteTo(w io.Writer) (int64, error) { hdr.Len = t.SizeBytes() hdr.Cap = t.SizeBytes() - length, err := w.Write(buf) + length, err := writer.Write(buf) // Since we bypassed the compiler's escape analysis, indicate that t // must live until the use above. runtime.KeepAlive(t) // escapes: replaced by intrinsic. @@ -8932,381 +8843,291 @@ func (t *TimeT) WriteTo(w io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (tv *Timeval) SizeBytes() int { +func (sxts *StatxTimestamp) SizeBytes() int { return 16 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (tv *Timeval) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint64(dst[:8], uint64(tv.Sec)) - dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(tv.Usec)) +func (sxts *StatxTimestamp) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(sxts.Sec)) dst = dst[8:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(sxts.Nsec)) + dst = dst[4:] + // Padding: dst[:sizeof(int32)] ~= int32(0) + dst = dst[4:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (tv *Timeval) UnmarshalBytes(src []byte) { - tv.Sec = int64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - tv.Usec = int64(usermem.ByteOrder.Uint64(src[:8])) +func (sxts *StatxTimestamp) UnmarshalBytes(src []byte) { + sxts.Sec = int64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] + sxts.Nsec = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + // Padding: var _ int32 ~= src[:sizeof(int32)] + src = src[4:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (tv *Timeval) Packed() bool { +func (sxts *StatxTimestamp) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (tv *Timeval) MarshalUnsafe(dst []byte) { - safecopy.CopyIn(dst, unsafe.Pointer(tv)) +func (sxts *StatxTimestamp) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(sxts)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (tv *Timeval) UnmarshalUnsafe(src []byte) { - safecopy.CopyOut(unsafe.Pointer(tv), src) +func (sxts *StatxTimestamp) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(sxts), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (tv *Timeval) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (sxts *StatxTimestamp) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(tv))) - hdr.Len = tv.SizeBytes() - hdr.Cap = tv.SizeBytes() + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(sxts))) + hdr.Len = sxts.SizeBytes() + hdr.Cap = sxts.SizeBytes() length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. - // Since we bypassed the compiler's escape analysis, indicate that tv + // Since we bypassed the compiler's escape analysis, indicate that sxts // must live until the use above. - runtime.KeepAlive(tv) // escapes: replaced by intrinsic. + runtime.KeepAlive(sxts) // escapes: replaced by intrinsic. return length, err } // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (tv *Timeval) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - return tv.CopyOutN(cc, addr, tv.SizeBytes()) +func (sxts *StatxTimestamp) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { + return sxts.CopyOutN(cc, addr, sxts.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (tv *Timeval) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (sxts *StatxTimestamp) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(tv))) - hdr.Len = tv.SizeBytes() - hdr.Cap = tv.SizeBytes() + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(sxts))) + hdr.Len = sxts.SizeBytes() + hdr.Cap = sxts.SizeBytes() length, err := cc.CopyInBytes(addr, buf) // escapes: okay. - // Since we bypassed the compiler's escape analysis, indicate that tv + // Since we bypassed the compiler's escape analysis, indicate that sxts // must live until the use above. - runtime.KeepAlive(tv) // escapes: replaced by intrinsic. + runtime.KeepAlive(sxts) // escapes: replaced by intrinsic. return length, err } // WriteTo implements io.WriterTo.WriteTo. -func (tv *Timeval) WriteTo(writer io.Writer) (int64, error) { +func (sxts *StatxTimestamp) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(tv))) - hdr.Len = tv.SizeBytes() - hdr.Cap = tv.SizeBytes() + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(sxts))) + hdr.Len = sxts.SizeBytes() + hdr.Cap = sxts.SizeBytes() length, err := writer.Write(buf) - // Since we bypassed the compiler's escape analysis, indicate that tv + // Since we bypassed the compiler's escape analysis, indicate that sxts // must live until the use above. - runtime.KeepAlive(tv) // escapes: replaced by intrinsic. + runtime.KeepAlive(sxts) // escapes: replaced by intrinsic. return int64(length), err } -// CopyTimevalSliceIn copies in a slice of Timeval objects from the task's memory. -func CopyTimevalSliceIn(cc marshal.CopyContext, addr usermem.Addr, dst []Timeval) (int, error) { - count := len(dst) - if count == 0 { - return 0, nil - } - size := (*Timeval)(nil).SizeBytes() - - ptr := unsafe.Pointer(&dst) - val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) - - // Construct a slice backed by dst's underlying memory. - var buf []byte - hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(val) - hdr.Len = size * count - hdr.Cap = size * count - - length, err := cc.CopyInBytes(addr, buf) - // Since we bypassed the compiler's escape analysis, indicate that dst - // must live until the use above. - runtime.KeepAlive(dst) // escapes: replaced by intrinsic. - return length, err -} - -// CopyTimevalSliceOut copies a slice of Timeval objects to the task's memory. -func CopyTimevalSliceOut(cc marshal.CopyContext, addr usermem.Addr, src []Timeval) (int, error) { - count := len(src) - if count == 0 { - return 0, nil - } - size := (*Timeval)(nil).SizeBytes() - - ptr := unsafe.Pointer(&src) - val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) - - // Construct a slice backed by dst's underlying memory. - var buf []byte - hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(val) - hdr.Len = size * count - hdr.Cap = size * count - - length, err := cc.CopyOutBytes(addr, buf) - // Since we bypassed the compiler's escape analysis, indicate that src - // must live until the use above. - runtime.KeepAlive(src) // escapes: replaced by intrinsic. - return length, err -} - -// MarshalUnsafeTimevalSlice is like Timeval.MarshalUnsafe, but for a []Timeval. -func MarshalUnsafeTimevalSlice(src []Timeval, dst []byte) (int, error) { - count := len(src) - if count == 0 { - return 0, nil - } - size := (*Timeval)(nil).SizeBytes() - - ptr := unsafe.Pointer(&src) - val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) - - length, err := safecopy.CopyIn(dst[:(size*count)], val) - // Since we bypassed the compiler's escape analysis, indicate that src - // must live until the use above. - runtime.KeepAlive(src) // escapes: replaced by intrinsic. - return length, err -} - -// UnmarshalUnsafeTimevalSlice is like Timeval.UnmarshalUnsafe, but for a []Timeval. -func UnmarshalUnsafeTimevalSlice(dst []Timeval, src []byte) (int, error) { - count := len(dst) - if count == 0 { - return 0, nil - } - size := (*Timeval)(nil).SizeBytes() - - ptr := unsafe.Pointer(&dst) - val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) - - length, err := safecopy.CopyOut(val, src[:(size*count)]) - // Since we bypassed the compiler's escape analysis, indicate that dst - // must live until the use above. - runtime.KeepAlive(dst) // escapes: replaced by intrinsic. - return length, err -} - // SizeBytes implements marshal.Marshallable.SizeBytes. -func (sxts *StatxTimestamp) SizeBytes() int { - return 16 +//go:nosplit +func (t *TimeT) SizeBytes() int { + return 8 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (sxts *StatxTimestamp) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint64(dst[:8], uint64(sxts.Sec)) - dst = dst[8:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(sxts.Nsec)) - dst = dst[4:] - // Padding: dst[:sizeof(int32)] ~= int32(0) - dst = dst[4:] +func (t *TimeT) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(*t)) } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (sxts *StatxTimestamp) UnmarshalBytes(src []byte) { - sxts.Sec = int64(usermem.ByteOrder.Uint64(src[:8])) - src = src[8:] - sxts.Nsec = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - // Padding: var _ int32 ~= src[:sizeof(int32)] - src = src[4:] +func (t *TimeT) UnmarshalBytes(src []byte) { + *t = TimeT(int64(usermem.ByteOrder.Uint64(src[:8]))) } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (sxts *StatxTimestamp) Packed() bool { +func (t *TimeT) Packed() bool { + // Scalar newtypes are always packed. return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (sxts *StatxTimestamp) MarshalUnsafe(dst []byte) { - safecopy.CopyIn(dst, unsafe.Pointer(sxts)) +func (t *TimeT) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(t)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (sxts *StatxTimestamp) UnmarshalUnsafe(src []byte) { - safecopy.CopyOut(unsafe.Pointer(sxts), src) +func (t *TimeT) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(t), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (sxts *StatxTimestamp) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (t *TimeT) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(sxts))) - hdr.Len = sxts.SizeBytes() - hdr.Cap = sxts.SizeBytes() + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(t))) + hdr.Len = t.SizeBytes() + hdr.Cap = t.SizeBytes() length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. - // Since we bypassed the compiler's escape analysis, indicate that sxts + // Since we bypassed the compiler's escape analysis, indicate that t // must live until the use above. - runtime.KeepAlive(sxts) // escapes: replaced by intrinsic. + runtime.KeepAlive(t) // escapes: replaced by intrinsic. return length, err } // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (sxts *StatxTimestamp) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - return sxts.CopyOutN(cc, addr, sxts.SizeBytes()) +func (t *TimeT) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { + return t.CopyOutN(cc, addr, t.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (sxts *StatxTimestamp) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (t *TimeT) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(sxts))) - hdr.Len = sxts.SizeBytes() - hdr.Cap = sxts.SizeBytes() + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(t))) + hdr.Len = t.SizeBytes() + hdr.Cap = t.SizeBytes() length, err := cc.CopyInBytes(addr, buf) // escapes: okay. - // Since we bypassed the compiler's escape analysis, indicate that sxts + // Since we bypassed the compiler's escape analysis, indicate that t // must live until the use above. - runtime.KeepAlive(sxts) // escapes: replaced by intrinsic. + runtime.KeepAlive(t) // escapes: replaced by intrinsic. return length, err } // WriteTo implements io.WriterTo.WriteTo. -func (sxts *StatxTimestamp) WriteTo(writer io.Writer) (int64, error) { +func (t *TimeT) WriteTo(w io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(sxts))) - hdr.Len = sxts.SizeBytes() - hdr.Cap = sxts.SizeBytes() + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(t))) + hdr.Len = t.SizeBytes() + hdr.Cap = t.SizeBytes() - length, err := writer.Write(buf) - // Since we bypassed the compiler's escape analysis, indicate that sxts + length, err := w.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that t // must live until the use above. - runtime.KeepAlive(sxts) // escapes: replaced by intrinsic. + runtime.KeepAlive(t) // escapes: replaced by intrinsic. return int64(length), err } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (ts *Timespec) SizeBytes() int { +func (tv *Timeval) SizeBytes() int { return 16 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (ts *Timespec) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint64(dst[:8], uint64(ts.Sec)) +func (tv *Timeval) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(tv.Sec)) dst = dst[8:] - usermem.ByteOrder.PutUint64(dst[:8], uint64(ts.Nsec)) + usermem.ByteOrder.PutUint64(dst[:8], uint64(tv.Usec)) dst = dst[8:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (ts *Timespec) UnmarshalBytes(src []byte) { - ts.Sec = int64(usermem.ByteOrder.Uint64(src[:8])) +func (tv *Timeval) UnmarshalBytes(src []byte) { + tv.Sec = int64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] - ts.Nsec = int64(usermem.ByteOrder.Uint64(src[:8])) + tv.Usec = int64(usermem.ByteOrder.Uint64(src[:8])) src = src[8:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (ts *Timespec) Packed() bool { +func (tv *Timeval) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (ts *Timespec) MarshalUnsafe(dst []byte) { - safecopy.CopyIn(dst, unsafe.Pointer(ts)) +func (tv *Timeval) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(tv)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (ts *Timespec) UnmarshalUnsafe(src []byte) { - safecopy.CopyOut(unsafe.Pointer(ts), src) +func (tv *Timeval) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(tv), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (ts *Timespec) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (tv *Timeval) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(ts))) - hdr.Len = ts.SizeBytes() - hdr.Cap = ts.SizeBytes() + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(tv))) + hdr.Len = tv.SizeBytes() + hdr.Cap = tv.SizeBytes() length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. - // Since we bypassed the compiler's escape analysis, indicate that ts + // Since we bypassed the compiler's escape analysis, indicate that tv // must live until the use above. - runtime.KeepAlive(ts) // escapes: replaced by intrinsic. + runtime.KeepAlive(tv) // escapes: replaced by intrinsic. return length, err } // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (ts *Timespec) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - return ts.CopyOutN(cc, addr, ts.SizeBytes()) +func (tv *Timeval) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { + return tv.CopyOutN(cc, addr, tv.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (ts *Timespec) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (tv *Timeval) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(ts))) - hdr.Len = ts.SizeBytes() - hdr.Cap = ts.SizeBytes() + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(tv))) + hdr.Len = tv.SizeBytes() + hdr.Cap = tv.SizeBytes() length, err := cc.CopyInBytes(addr, buf) // escapes: okay. - // Since we bypassed the compiler's escape analysis, indicate that ts + // Since we bypassed the compiler's escape analysis, indicate that tv // must live until the use above. - runtime.KeepAlive(ts) // escapes: replaced by intrinsic. + runtime.KeepAlive(tv) // escapes: replaced by intrinsic. return length, err } // WriteTo implements io.WriterTo.WriteTo. -func (ts *Timespec) WriteTo(writer io.Writer) (int64, error) { +func (tv *Timeval) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(ts))) - hdr.Len = ts.SizeBytes() - hdr.Cap = ts.SizeBytes() + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(tv))) + hdr.Len = tv.SizeBytes() + hdr.Cap = tv.SizeBytes() length, err := writer.Write(buf) - // Since we bypassed the compiler's escape analysis, indicate that ts + // Since we bypassed the compiler's escape analysis, indicate that tv // must live until the use above. - runtime.KeepAlive(ts) // escapes: replaced by intrinsic. + runtime.KeepAlive(tv) // escapes: replaced by intrinsic. return int64(length), err } -// CopyTimespecSliceIn copies in a slice of Timespec objects from the task's memory. -func CopyTimespecSliceIn(cc marshal.CopyContext, addr usermem.Addr, dst []Timespec) (int, error) { +// CopyTimevalSliceIn copies in a slice of Timeval objects from the task's memory. +func CopyTimevalSliceIn(cc marshal.CopyContext, addr usermem.Addr, dst []Timeval) (int, error) { count := len(dst) if count == 0 { return 0, nil } - size := (*Timespec)(nil).SizeBytes() + size := (*Timeval)(nil).SizeBytes() ptr := unsafe.Pointer(&dst) val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) @@ -9325,13 +9146,13 @@ func CopyTimespecSliceIn(cc marshal.CopyContext, addr usermem.Addr, dst []Timesp return length, err } -// CopyTimespecSliceOut copies a slice of Timespec objects to the task's memory. -func CopyTimespecSliceOut(cc marshal.CopyContext, addr usermem.Addr, src []Timespec) (int, error) { +// CopyTimevalSliceOut copies a slice of Timeval objects to the task's memory. +func CopyTimevalSliceOut(cc marshal.CopyContext, addr usermem.Addr, src []Timeval) (int, error) { count := len(src) if count == 0 { return 0, nil } - size := (*Timespec)(nil).SizeBytes() + size := (*Timeval)(nil).SizeBytes() ptr := unsafe.Pointer(&src) val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) @@ -9350,13 +9171,13 @@ func CopyTimespecSliceOut(cc marshal.CopyContext, addr usermem.Addr, src []Times return length, err } -// MarshalUnsafeTimespecSlice is like Timespec.MarshalUnsafe, but for a []Timespec. -func MarshalUnsafeTimespecSlice(src []Timespec, dst []byte) (int, error) { +// MarshalUnsafeTimevalSlice is like Timeval.MarshalUnsafe, but for a []Timeval. +func MarshalUnsafeTimevalSlice(src []Timeval, dst []byte) (int, error) { count := len(src) if count == 0 { return 0, nil } - size := (*Timespec)(nil).SizeBytes() + size := (*Timeval)(nil).SizeBytes() ptr := unsafe.Pointer(&src) val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) @@ -9368,13 +9189,13 @@ func MarshalUnsafeTimespecSlice(src []Timespec, dst []byte) (int, error) { return length, err } -// UnmarshalUnsafeTimespecSlice is like Timespec.UnmarshalUnsafe, but for a []Timespec. -func UnmarshalUnsafeTimespecSlice(dst []Timespec, src []byte) (int, error) { +// UnmarshalUnsafeTimevalSlice is like Timeval.UnmarshalUnsafe, but for a []Timeval. +func UnmarshalUnsafeTimevalSlice(dst []Timeval, src []byte) (int, error) { count := len(dst) if count == 0 { return 0, nil } - size := (*Timespec)(nil).SizeBytes() + size := (*Timeval)(nil).SizeBytes() ptr := unsafe.Pointer(&dst) val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) @@ -10058,164 +9879,232 @@ func (u *Utime) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (t *Termios) SizeBytes() int { - return 17 + - 1*NumControlCharacters +func (ts *Timespec) SizeBytes() int { + return 16 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (t *Termios) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.InputFlags)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.OutputFlags)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.ControlFlags)) - dst = dst[4:] - usermem.ByteOrder.PutUint32(dst[:4], uint32(t.LocalFlags)) - dst = dst[4:] - dst[0] = byte(t.LineDiscipline) - dst = dst[1:] - for idx := 0; idx < NumControlCharacters; idx++ { - dst[0] = byte(t.ControlCharacters[idx]) - dst = dst[1:] - } +func (ts *Timespec) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint64(dst[:8], uint64(ts.Sec)) + dst = dst[8:] + usermem.ByteOrder.PutUint64(dst[:8], uint64(ts.Nsec)) + dst = dst[8:] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (t *Termios) UnmarshalBytes(src []byte) { - t.InputFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.OutputFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.ControlFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.LocalFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) - src = src[4:] - t.LineDiscipline = uint8(src[0]) - src = src[1:] - for idx := 0; idx < NumControlCharacters; idx++ { - t.ControlCharacters[idx] = uint8(src[0]) - src = src[1:] - } +func (ts *Timespec) UnmarshalBytes(src []byte) { + ts.Sec = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] + ts.Nsec = int64(usermem.ByteOrder.Uint64(src[:8])) + src = src[8:] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (t *Termios) Packed() bool { +func (ts *Timespec) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (t *Termios) MarshalUnsafe(dst []byte) { - safecopy.CopyIn(dst, unsafe.Pointer(t)) +func (ts *Timespec) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(ts)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (t *Termios) UnmarshalUnsafe(src []byte) { - safecopy.CopyOut(unsafe.Pointer(t), src) +func (ts *Timespec) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(ts), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (t *Termios) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (ts *Timespec) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(t))) - hdr.Len = t.SizeBytes() - hdr.Cap = t.SizeBytes() + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(ts))) + hdr.Len = ts.SizeBytes() + hdr.Cap = ts.SizeBytes() length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. - // Since we bypassed the compiler's escape analysis, indicate that t + // Since we bypassed the compiler's escape analysis, indicate that ts // must live until the use above. - runtime.KeepAlive(t) // escapes: replaced by intrinsic. + runtime.KeepAlive(ts) // escapes: replaced by intrinsic. return length, err } // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (t *Termios) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { - return t.CopyOutN(cc, addr, t.SizeBytes()) +func (ts *Timespec) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { + return ts.CopyOutN(cc, addr, ts.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (t *Termios) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (ts *Timespec) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(t))) - hdr.Len = t.SizeBytes() - hdr.Cap = t.SizeBytes() + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(ts))) + hdr.Len = ts.SizeBytes() + hdr.Cap = ts.SizeBytes() length, err := cc.CopyInBytes(addr, buf) // escapes: okay. - // Since we bypassed the compiler's escape analysis, indicate that t + // Since we bypassed the compiler's escape analysis, indicate that ts // must live until the use above. - runtime.KeepAlive(t) // escapes: replaced by intrinsic. + runtime.KeepAlive(ts) // escapes: replaced by intrinsic. return length, err } // WriteTo implements io.WriterTo.WriteTo. -func (t *Termios) WriteTo(writer io.Writer) (int64, error) { +func (ts *Timespec) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) - hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(t))) - hdr.Len = t.SizeBytes() - hdr.Cap = t.SizeBytes() + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(ts))) + hdr.Len = ts.SizeBytes() + hdr.Cap = ts.SizeBytes() length, err := writer.Write(buf) - // Since we bypassed the compiler's escape analysis, indicate that t + // Since we bypassed the compiler's escape analysis, indicate that ts // must live until the use above. - runtime.KeepAlive(t) // escapes: replaced by intrinsic. + runtime.KeepAlive(ts) // escapes: replaced by intrinsic. return int64(length), err } +// CopyTimespecSliceIn copies in a slice of Timespec objects from the task's memory. +func CopyTimespecSliceIn(cc marshal.CopyContext, addr usermem.Addr, dst []Timespec) (int, error) { + count := len(dst) + if count == 0 { + return 0, nil + } + size := (*Timespec)(nil).SizeBytes() + + ptr := unsafe.Pointer(&dst) + val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) + + // Construct a slice backed by dst's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = uintptr(val) + hdr.Len = size * count + hdr.Cap = size * count + + length, err := cc.CopyInBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that dst + // must live until the use above. + runtime.KeepAlive(dst) // escapes: replaced by intrinsic. + return length, err +} + +// CopyTimespecSliceOut copies a slice of Timespec objects to the task's memory. +func CopyTimespecSliceOut(cc marshal.CopyContext, addr usermem.Addr, src []Timespec) (int, error) { + count := len(src) + if count == 0 { + return 0, nil + } + size := (*Timespec)(nil).SizeBytes() + + ptr := unsafe.Pointer(&src) + val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) + + // Construct a slice backed by dst's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = uintptr(val) + hdr.Len = size * count + hdr.Cap = size * count + + length, err := cc.CopyOutBytes(addr, buf) + // Since we bypassed the compiler's escape analysis, indicate that src + // must live until the use above. + runtime.KeepAlive(src) // escapes: replaced by intrinsic. + return length, err +} + +// MarshalUnsafeTimespecSlice is like Timespec.MarshalUnsafe, but for a []Timespec. +func MarshalUnsafeTimespecSlice(src []Timespec, dst []byte) (int, error) { + count := len(src) + if count == 0 { + return 0, nil + } + size := (*Timespec)(nil).SizeBytes() + + ptr := unsafe.Pointer(&src) + val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) + + length, err := safecopy.CopyIn(dst[:(size*count)], val) + // Since we bypassed the compiler's escape analysis, indicate that src + // must live until the use above. + runtime.KeepAlive(src) // escapes: replaced by intrinsic. + return length, err +} + +// UnmarshalUnsafeTimespecSlice is like Timespec.UnmarshalUnsafe, but for a []Timespec. +func UnmarshalUnsafeTimespecSlice(dst []Timespec, src []byte) (int, error) { + count := len(dst) + if count == 0 { + return 0, nil + } + size := (*Timespec)(nil).SizeBytes() + + ptr := unsafe.Pointer(&dst) + val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) + + length, err := safecopy.CopyOut(val, src[:(size*count)]) + // Since we bypassed the compiler's escape analysis, indicate that dst + // must live until the use above. + runtime.KeepAlive(dst) // escapes: replaced by intrinsic. + return length, err +} + // SizeBytes implements marshal.Marshallable.SizeBytes. -func (w *WindowSize) SizeBytes() int { - return 4 + - 1*4 +func (w *Winsize) SizeBytes() int { + return 8 } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (w *WindowSize) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint16(dst[:2], uint16(w.Rows)) +func (w *Winsize) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint16(dst[:2], uint16(w.Row)) dst = dst[2:] - usermem.ByteOrder.PutUint16(dst[:2], uint16(w.Cols)) + usermem.ByteOrder.PutUint16(dst[:2], uint16(w.Col)) + dst = dst[2:] + usermem.ByteOrder.PutUint16(dst[:2], uint16(w.Xpixel)) + dst = dst[2:] + usermem.ByteOrder.PutUint16(dst[:2], uint16(w.Ypixel)) dst = dst[2:] - // Padding: dst[:sizeof(byte)*4] ~= [4]byte{0} - dst = dst[1*(4):] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (w *WindowSize) UnmarshalBytes(src []byte) { - w.Rows = uint16(usermem.ByteOrder.Uint16(src[:2])) +func (w *Winsize) UnmarshalBytes(src []byte) { + w.Row = uint16(usermem.ByteOrder.Uint16(src[:2])) src = src[2:] - w.Cols = uint16(usermem.ByteOrder.Uint16(src[:2])) + w.Col = uint16(usermem.ByteOrder.Uint16(src[:2])) + src = src[2:] + w.Xpixel = uint16(usermem.ByteOrder.Uint16(src[:2])) + src = src[2:] + w.Ypixel = uint16(usermem.ByteOrder.Uint16(src[:2])) src = src[2:] - // Padding: ~ copy([4]byte(w._), src[:sizeof(byte)*4]) - src = src[1*(4):] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (w *WindowSize) Packed() bool { +func (w *Winsize) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (w *WindowSize) MarshalUnsafe(dst []byte) { +func (w *Winsize) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(w)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (w *WindowSize) UnmarshalUnsafe(src []byte) { +func (w *Winsize) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(w), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (w *WindowSize) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (w *Winsize) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -10232,13 +10121,13 @@ func (w *WindowSize) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit i // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (w *WindowSize) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (w *Winsize) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return w.CopyOutN(cc, addr, w.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (w *WindowSize) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (w *Winsize) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -10254,7 +10143,7 @@ func (w *WindowSize) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, err } // WriteTo implements io.WriterTo.WriteTo. -func (w *WindowSize) WriteTo(writer io.Writer) (int64, error) { +func (w *Winsize) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -10270,53 +10159,164 @@ func (w *WindowSize) WriteTo(writer io.Writer) (int64, error) { } // SizeBytes implements marshal.Marshallable.SizeBytes. -func (w *Winsize) SizeBytes() int { - return 8 +func (t *Termios) SizeBytes() int { + return 17 + + 1*NumControlCharacters } // MarshalBytes implements marshal.Marshallable.MarshalBytes. -func (w *Winsize) MarshalBytes(dst []byte) { - usermem.ByteOrder.PutUint16(dst[:2], uint16(w.Row)) - dst = dst[2:] - usermem.ByteOrder.PutUint16(dst[:2], uint16(w.Col)) - dst = dst[2:] - usermem.ByteOrder.PutUint16(dst[:2], uint16(w.Xpixel)) +func (t *Termios) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.InputFlags)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.OutputFlags)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.ControlFlags)) + dst = dst[4:] + usermem.ByteOrder.PutUint32(dst[:4], uint32(t.LocalFlags)) + dst = dst[4:] + dst[0] = byte(t.LineDiscipline) + dst = dst[1:] + for idx := 0; idx < NumControlCharacters; idx++ { + dst[0] = byte(t.ControlCharacters[idx]) + dst = dst[1:] + } +} + +// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. +func (t *Termios) UnmarshalBytes(src []byte) { + t.InputFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.OutputFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.ControlFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.LocalFlags = uint32(usermem.ByteOrder.Uint32(src[:4])) + src = src[4:] + t.LineDiscipline = uint8(src[0]) + src = src[1:] + for idx := 0; idx < NumControlCharacters; idx++ { + t.ControlCharacters[idx] = uint8(src[0]) + src = src[1:] + } +} + +// Packed implements marshal.Marshallable.Packed. +//go:nosplit +func (t *Termios) Packed() bool { + return true +} + +// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. +func (t *Termios) MarshalUnsafe(dst []byte) { + safecopy.CopyIn(dst, unsafe.Pointer(t)) +} + +// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. +func (t *Termios) UnmarshalUnsafe(src []byte) { + safecopy.CopyOut(unsafe.Pointer(t), src) +} + +// CopyOutN implements marshal.Marshallable.CopyOutN. +//go:nosplit +func (t *Termios) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { + // Construct a slice backed by dst's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(t))) + hdr.Len = t.SizeBytes() + hdr.Cap = t.SizeBytes() + + length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. + // Since we bypassed the compiler's escape analysis, indicate that t + // must live until the use above. + runtime.KeepAlive(t) // escapes: replaced by intrinsic. + return length, err +} + +// CopyOut implements marshal.Marshallable.CopyOut. +//go:nosplit +func (t *Termios) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { + return t.CopyOutN(cc, addr, t.SizeBytes()) +} + +// CopyIn implements marshal.Marshallable.CopyIn. +//go:nosplit +func (t *Termios) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { + // Construct a slice backed by dst's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(t))) + hdr.Len = t.SizeBytes() + hdr.Cap = t.SizeBytes() + + length, err := cc.CopyInBytes(addr, buf) // escapes: okay. + // Since we bypassed the compiler's escape analysis, indicate that t + // must live until the use above. + runtime.KeepAlive(t) // escapes: replaced by intrinsic. + return length, err +} + +// WriteTo implements io.WriterTo.WriteTo. +func (t *Termios) WriteTo(writer io.Writer) (int64, error) { + // Construct a slice backed by dst's underlying memory. + var buf []byte + hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) + hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(t))) + hdr.Len = t.SizeBytes() + hdr.Cap = t.SizeBytes() + + length, err := writer.Write(buf) + // Since we bypassed the compiler's escape analysis, indicate that t + // must live until the use above. + runtime.KeepAlive(t) // escapes: replaced by intrinsic. + return int64(length), err +} + +// SizeBytes implements marshal.Marshallable.SizeBytes. +func (w *WindowSize) SizeBytes() int { + return 4 + + 1*4 +} + +// MarshalBytes implements marshal.Marshallable.MarshalBytes. +func (w *WindowSize) MarshalBytes(dst []byte) { + usermem.ByteOrder.PutUint16(dst[:2], uint16(w.Rows)) dst = dst[2:] - usermem.ByteOrder.PutUint16(dst[:2], uint16(w.Ypixel)) + usermem.ByteOrder.PutUint16(dst[:2], uint16(w.Cols)) dst = dst[2:] + // Padding: dst[:sizeof(byte)*4] ~= [4]byte{0} + dst = dst[1*(4):] } // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. -func (w *Winsize) UnmarshalBytes(src []byte) { - w.Row = uint16(usermem.ByteOrder.Uint16(src[:2])) - src = src[2:] - w.Col = uint16(usermem.ByteOrder.Uint16(src[:2])) - src = src[2:] - w.Xpixel = uint16(usermem.ByteOrder.Uint16(src[:2])) +func (w *WindowSize) UnmarshalBytes(src []byte) { + w.Rows = uint16(usermem.ByteOrder.Uint16(src[:2])) src = src[2:] - w.Ypixel = uint16(usermem.ByteOrder.Uint16(src[:2])) + w.Cols = uint16(usermem.ByteOrder.Uint16(src[:2])) src = src[2:] + // Padding: ~ copy([4]byte(w._), src[:sizeof(byte)*4]) + src = src[1*(4):] } // Packed implements marshal.Marshallable.Packed. //go:nosplit -func (w *Winsize) Packed() bool { +func (w *WindowSize) Packed() bool { return true } // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. -func (w *Winsize) MarshalUnsafe(dst []byte) { +func (w *WindowSize) MarshalUnsafe(dst []byte) { safecopy.CopyIn(dst, unsafe.Pointer(w)) } // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. -func (w *Winsize) UnmarshalUnsafe(src []byte) { +func (w *WindowSize) UnmarshalUnsafe(src []byte) { safecopy.CopyOut(unsafe.Pointer(w), src) } // CopyOutN implements marshal.Marshallable.CopyOutN. //go:nosplit -func (w *Winsize) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { +func (w *WindowSize) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -10333,13 +10333,13 @@ func (w *Winsize) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) // CopyOut implements marshal.Marshallable.CopyOut. //go:nosplit -func (w *Winsize) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (w *WindowSize) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) { return w.CopyOutN(cc, addr, w.SizeBytes()) } // CopyIn implements marshal.Marshallable.CopyIn. //go:nosplit -func (w *Winsize) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { +func (w *WindowSize) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) @@ -10355,7 +10355,7 @@ func (w *Winsize) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) } // WriteTo implements io.WriterTo.WriteTo. -func (w *Winsize) WriteTo(writer io.Writer) (int64, error) { +func (w *WindowSize) WriteTo(writer io.Writer) (int64, error) { // Construct a slice backed by dst's underlying memory. var buf []byte hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) |