summaryrefslogtreecommitdiffhomepage
path: root/pkg/sentry/arch
diff options
context:
space:
mode:
Diffstat (limited to 'pkg/sentry/arch')
-rw-r--r--pkg/sentry/arch/BUILD50
-rw-r--r--pkg/sentry/arch/arch_aarch64_abi_autogen_unsafe.go9
-rw-r--r--pkg/sentry/arch/arch_aarch64_state_autogen.go72
-rw-r--r--pkg/sentry/arch/arch_abi_autogen_unsafe.go372
-rw-r--r--pkg/sentry/arch/arch_amd64_abi_autogen_unsafe.go407
-rw-r--r--pkg/sentry/arch/arch_amd64_state_autogen.go41
-rw-r--r--pkg/sentry/arch/arch_arm64_abi_autogen_unsafe.go592
-rw-r--r--pkg/sentry/arch/arch_arm64_state_autogen.go41
-rw-r--r--pkg/sentry/arch/arch_state_autogen.go174
-rw-r--r--pkg/sentry/arch/arch_unsafe_abi_autogen_unsafe.go7
-rw-r--r--pkg/sentry/arch/arch_unsafe_state_autogen.go3
-rw-r--r--pkg/sentry/arch/arch_x86_abi_autogen_unsafe.go10
-rw-r--r--pkg/sentry/arch/arch_x86_impl_abi_autogen_unsafe.go9
-rw-r--r--pkg/sentry/arch/arch_x86_impl_state_autogen.go41
-rw-r--r--pkg/sentry/arch/arch_x86_state_autogen.go37
-rw-r--r--pkg/sentry/arch/registers.proto93
-rw-r--r--pkg/sentry/arch/registers_go_proto/registers.pb.go863
17 files changed, 2678 insertions, 143 deletions
diff --git a/pkg/sentry/arch/BUILD b/pkg/sentry/arch/BUILD
deleted file mode 100644
index 4af4d6e84..000000000
--- a/pkg/sentry/arch/BUILD
+++ /dev/null
@@ -1,50 +0,0 @@
-load("//tools:defs.bzl", "go_library", "proto_library")
-
-package(licenses = ["notice"])
-
-go_library(
- name = "arch",
- srcs = [
- "aligned.go",
- "arch.go",
- "arch_aarch64.go",
- "arch_amd64.go",
- "arch_amd64.s",
- "arch_arm64.go",
- "arch_state_x86.go",
- "arch_x86.go",
- "arch_x86_impl.go",
- "auxv.go",
- "signal.go",
- "signal_act.go",
- "signal_amd64.go",
- "signal_arm64.go",
- "signal_info.go",
- "signal_stack.go",
- "stack.go",
- "stack_unsafe.go",
- "syscalls_amd64.go",
- "syscalls_arm64.go",
- ],
- marshal = True,
- visibility = ["//:sandbox"],
- deps = [
- ":registers_go_proto",
- "//pkg/abi/linux",
- "//pkg/context",
- "//pkg/cpuid",
- "//pkg/log",
- "//pkg/marshal",
- "//pkg/marshal/primitive",
- "//pkg/sentry/limits",
- "//pkg/sync",
- "//pkg/syserror",
- "//pkg/usermem",
- ],
-)
-
-proto_library(
- name = "registers",
- srcs = ["registers.proto"],
- visibility = ["//visibility:public"],
-)
diff --git a/pkg/sentry/arch/arch_aarch64_abi_autogen_unsafe.go b/pkg/sentry/arch/arch_aarch64_abi_autogen_unsafe.go
new file mode 100644
index 000000000..f7a3597d4
--- /dev/null
+++ b/pkg/sentry/arch/arch_aarch64_abi_autogen_unsafe.go
@@ -0,0 +1,9 @@
+// Automatically generated marshal implementation. See tools/go_marshal.
+
+// +build arm64
+
+package arch
+
+import (
+)
+
diff --git a/pkg/sentry/arch/arch_aarch64_state_autogen.go b/pkg/sentry/arch/arch_aarch64_state_autogen.go
new file mode 100644
index 000000000..817679b05
--- /dev/null
+++ b/pkg/sentry/arch/arch_aarch64_state_autogen.go
@@ -0,0 +1,72 @@
+// automatically generated by stateify.
+
+// +build arm64
+
+package arch
+
+import (
+ "gvisor.dev/gvisor/pkg/state"
+)
+
+func (r *Registers) StateTypeName() string {
+ return "pkg/sentry/arch.Registers"
+}
+
+func (r *Registers) StateFields() []string {
+ return []string{
+ "PtraceRegs",
+ "TPIDR_EL0",
+ }
+}
+
+func (r *Registers) beforeSave() {}
+
+func (r *Registers) StateSave(stateSinkObject state.Sink) {
+ r.beforeSave()
+ stateSinkObject.Save(0, &r.PtraceRegs)
+ stateSinkObject.Save(1, &r.TPIDR_EL0)
+}
+
+func (r *Registers) afterLoad() {}
+
+func (r *Registers) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &r.PtraceRegs)
+ stateSourceObject.Load(1, &r.TPIDR_EL0)
+}
+
+func (s *State) StateTypeName() string {
+ return "pkg/sentry/arch.State"
+}
+
+func (s *State) StateFields() []string {
+ return []string{
+ "Regs",
+ "aarch64FPState",
+ "FeatureSet",
+ "OrigR0",
+ }
+}
+
+func (s *State) beforeSave() {}
+
+func (s *State) StateSave(stateSinkObject state.Sink) {
+ s.beforeSave()
+ stateSinkObject.Save(0, &s.Regs)
+ stateSinkObject.Save(1, &s.aarch64FPState)
+ stateSinkObject.Save(2, &s.FeatureSet)
+ stateSinkObject.Save(3, &s.OrigR0)
+}
+
+func (s *State) afterLoad() {}
+
+func (s *State) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &s.Regs)
+ stateSourceObject.LoadWait(1, &s.aarch64FPState)
+ stateSourceObject.Load(2, &s.FeatureSet)
+ stateSourceObject.Load(3, &s.OrigR0)
+}
+
+func init() {
+ state.Register((*Registers)(nil))
+ state.Register((*State)(nil))
+}
diff --git a/pkg/sentry/arch/arch_abi_autogen_unsafe.go b/pkg/sentry/arch/arch_abi_autogen_unsafe.go
new file mode 100644
index 000000000..fe68f921d
--- /dev/null
+++ b/pkg/sentry/arch/arch_abi_autogen_unsafe.go
@@ -0,0 +1,372 @@
+// Automatically generated marshal implementation. See tools/go_marshal.
+
+// +build 386 amd64 arm64
+
+package arch
+
+import (
+ "gvisor.dev/gvisor/pkg/abi/linux"
+ "gvisor.dev/gvisor/pkg/gohacks"
+ "gvisor.dev/gvisor/pkg/marshal"
+ "gvisor.dev/gvisor/pkg/safecopy"
+ "gvisor.dev/gvisor/pkg/usermem"
+ "io"
+ "reflect"
+ "runtime"
+ "unsafe"
+)
+
+// Marshallable types used by this file.
+var _ marshal.Marshallable = (*SignalAct)(nil)
+var _ marshal.Marshallable = (*SignalInfo)(nil)
+var _ marshal.Marshallable = (*SignalStack)(nil)
+var _ marshal.Marshallable = (*linux.SignalSet)(nil)
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (s *SignalAct) SizeBytes() int {
+ return 24 +
+ (*linux.SignalSet)(nil).SizeBytes()
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (s *SignalAct) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Handler))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Flags))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Restorer))
+ dst = dst[8:]
+ s.Mask.MarshalBytes(dst[:s.Mask.SizeBytes()])
+ dst = dst[s.Mask.SizeBytes():]
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (s *SignalAct) UnmarshalBytes(src []byte) {
+ s.Handler = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Flags = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Restorer = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Mask.UnmarshalBytes(src[:s.Mask.SizeBytes()])
+ src = src[s.Mask.SizeBytes():]
+}
+
+// Packed implements marshal.Marshallable.Packed.
+//go:nosplit
+func (s *SignalAct) Packed() bool {
+ return s.Mask.Packed()
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (s *SignalAct) MarshalUnsafe(dst []byte) {
+ if s.Mask.Packed() {
+ safecopy.CopyIn(dst, unsafe.Pointer(s))
+ } else {
+ // Type SignalAct doesn't have a packed layout in memory, fallback to MarshalBytes.
+ s.MarshalBytes(dst)
+ }
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (s *SignalAct) UnmarshalUnsafe(src []byte) {
+ if s.Mask.Packed() {
+ safecopy.CopyOut(unsafe.Pointer(s), src)
+ } else {
+ // Type SignalAct doesn't have a packed layout in memory, fallback to UnmarshalBytes.
+ s.UnmarshalBytes(src)
+ }
+}
+
+// CopyOutN implements marshal.Marshallable.CopyOutN.
+//go:nosplit
+func (s *SignalAct) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) {
+ if !s.Mask.Packed() {
+ // Type SignalAct doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay.
+ s.MarshalBytes(buf) // escapes: fallback.
+ return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+//go:nosplit
+func (s *SignalAct) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ return s.CopyOutN(cc, addr, s.SizeBytes())
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+//go:nosplit
+func (s *SignalAct) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ if !s.Mask.Packed() {
+ // Type SignalAct doesn't have a packed layout in memory, fall back to UnmarshalBytes.
+ buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay.
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Unmarshal unconditionally. If we had a short copy-in, this results in a
+ // partially unmarshalled struct.
+ s.UnmarshalBytes(buf) // escapes: fallback.
+ return length, err
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (s *SignalAct) WriteTo(writer io.Writer) (int64, error) {
+ if !s.Mask.Packed() {
+ // Type SignalAct doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := make([]byte, s.SizeBytes())
+ s.MarshalBytes(buf)
+ length, err := writer.Write(buf)
+ return int64(length), err
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := writer.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return int64(length), err
+}
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (s *SignalStack) SizeBytes() int {
+ return 24
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (s *SignalStack) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Addr))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Flags))
+ dst = dst[4:]
+ // Padding: dst[:sizeof(uint32)] ~= uint32(0)
+ dst = dst[4:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Size))
+ dst = dst[8:]
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (s *SignalStack) UnmarshalBytes(src []byte) {
+ s.Addr = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Flags = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ // Padding: var _ uint32 ~= src[:sizeof(uint32)]
+ src = src[4:]
+ s.Size = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+}
+
+// Packed implements marshal.Marshallable.Packed.
+//go:nosplit
+func (s *SignalStack) Packed() bool {
+ return true
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (s *SignalStack) MarshalUnsafe(dst []byte) {
+ safecopy.CopyIn(dst, unsafe.Pointer(s))
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (s *SignalStack) UnmarshalUnsafe(src []byte) {
+ safecopy.CopyOut(unsafe.Pointer(s), src)
+}
+
+// CopyOutN implements marshal.Marshallable.CopyOutN.
+//go:nosplit
+func (s *SignalStack) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) {
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+//go:nosplit
+func (s *SignalStack) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ return s.CopyOutN(cc, addr, s.SizeBytes())
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+//go:nosplit
+func (s *SignalStack) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (s *SignalStack) WriteTo(writer io.Writer) (int64, error) {
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := writer.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return int64(length), err
+}
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (s *SignalInfo) SizeBytes() int {
+ return 16 +
+ 1*(128-16)
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (s *SignalInfo) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Signo))
+ dst = dst[4:]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Errno))
+ dst = dst[4:]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(s.Code))
+ dst = dst[4:]
+ // Padding: dst[:sizeof(uint32)] ~= uint32(0)
+ dst = dst[4:]
+ for idx := 0; idx < (128-16); idx++ {
+ dst[0] = byte(s.Fields[idx])
+ dst = dst[1:]
+ }
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (s *SignalInfo) UnmarshalBytes(src []byte) {
+ s.Signo = int32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ s.Errno = int32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ s.Code = int32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ // Padding: var _ uint32 ~= src[:sizeof(uint32)]
+ src = src[4:]
+ for idx := 0; idx < (128-16); idx++ {
+ s.Fields[idx] = src[0]
+ src = src[1:]
+ }
+}
+
+// Packed implements marshal.Marshallable.Packed.
+//go:nosplit
+func (s *SignalInfo) Packed() bool {
+ return true
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (s *SignalInfo) MarshalUnsafe(dst []byte) {
+ safecopy.CopyIn(dst, unsafe.Pointer(s))
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (s *SignalInfo) UnmarshalUnsafe(src []byte) {
+ safecopy.CopyOut(unsafe.Pointer(s), src)
+}
+
+// CopyOutN implements marshal.Marshallable.CopyOutN.
+//go:nosplit
+func (s *SignalInfo) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) {
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+//go:nosplit
+func (s *SignalInfo) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ return s.CopyOutN(cc, addr, s.SizeBytes())
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+//go:nosplit
+func (s *SignalInfo) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (s *SignalInfo) WriteTo(writer io.Writer) (int64, error) {
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := writer.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return int64(length), err
+}
+
diff --git a/pkg/sentry/arch/arch_amd64_abi_autogen_unsafe.go b/pkg/sentry/arch/arch_amd64_abi_autogen_unsafe.go
new file mode 100644
index 000000000..466bf889a
--- /dev/null
+++ b/pkg/sentry/arch/arch_amd64_abi_autogen_unsafe.go
@@ -0,0 +1,407 @@
+// Automatically generated marshal implementation. See tools/go_marshal.
+
+// +build amd64
+// +build amd64
+// +build amd64
+
+package arch
+
+import (
+ "gvisor.dev/gvisor/pkg/abi/linux"
+ "gvisor.dev/gvisor/pkg/gohacks"
+ "gvisor.dev/gvisor/pkg/marshal"
+ "gvisor.dev/gvisor/pkg/safecopy"
+ "gvisor.dev/gvisor/pkg/usermem"
+ "io"
+ "reflect"
+ "runtime"
+ "unsafe"
+)
+
+// Marshallable types used by this file.
+var _ marshal.Marshallable = (*SignalContext64)(nil)
+var _ marshal.Marshallable = (*SignalStack)(nil)
+var _ marshal.Marshallable = (*UContext64)(nil)
+var _ marshal.Marshallable = (*linux.SignalSet)(nil)
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (s *SignalContext64) SizeBytes() int {
+ return 184 +
+ (*linux.SignalSet)(nil).SizeBytes() +
+ 8*8
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (s *SignalContext64) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.R8))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.R9))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.R10))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.R11))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.R12))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.R13))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.R14))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.R15))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Rdi))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Rsi))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Rbp))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Rbx))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Rdx))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Rax))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Rcx))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Rsp))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Rip))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Eflags))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint16(dst[:2], uint16(s.Cs))
+ dst = dst[2:]
+ usermem.ByteOrder.PutUint16(dst[:2], uint16(s.Gs))
+ dst = dst[2:]
+ usermem.ByteOrder.PutUint16(dst[:2], uint16(s.Fs))
+ dst = dst[2:]
+ usermem.ByteOrder.PutUint16(dst[:2], uint16(s.Ss))
+ dst = dst[2:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Err))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Trapno))
+ dst = dst[8:]
+ s.Oldmask.MarshalBytes(dst[:s.Oldmask.SizeBytes()])
+ dst = dst[s.Oldmask.SizeBytes():]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Cr2))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Fpstate))
+ dst = dst[8:]
+ for idx := 0; idx < 8; idx++ {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Reserved[idx]))
+ dst = dst[8:]
+ }
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (s *SignalContext64) UnmarshalBytes(src []byte) {
+ s.R8 = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.R9 = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.R10 = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.R11 = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.R12 = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.R13 = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.R14 = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.R15 = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Rdi = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Rsi = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Rbp = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Rbx = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Rdx = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Rax = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Rcx = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Rsp = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Rip = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Eflags = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Cs = uint16(usermem.ByteOrder.Uint16(src[:2]))
+ src = src[2:]
+ s.Gs = uint16(usermem.ByteOrder.Uint16(src[:2]))
+ src = src[2:]
+ s.Fs = uint16(usermem.ByteOrder.Uint16(src[:2]))
+ src = src[2:]
+ s.Ss = uint16(usermem.ByteOrder.Uint16(src[:2]))
+ src = src[2:]
+ s.Err = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Trapno = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Oldmask.UnmarshalBytes(src[:s.Oldmask.SizeBytes()])
+ src = src[s.Oldmask.SizeBytes():]
+ s.Cr2 = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Fpstate = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ for idx := 0; idx < 8; idx++ {
+ s.Reserved[idx] = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ }
+}
+
+// Packed implements marshal.Marshallable.Packed.
+//go:nosplit
+func (s *SignalContext64) Packed() bool {
+ return s.Oldmask.Packed()
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (s *SignalContext64) MarshalUnsafe(dst []byte) {
+ if s.Oldmask.Packed() {
+ safecopy.CopyIn(dst, unsafe.Pointer(s))
+ } else {
+ // Type SignalContext64 doesn't have a packed layout in memory, fallback to MarshalBytes.
+ s.MarshalBytes(dst)
+ }
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (s *SignalContext64) UnmarshalUnsafe(src []byte) {
+ if s.Oldmask.Packed() {
+ safecopy.CopyOut(unsafe.Pointer(s), src)
+ } else {
+ // Type SignalContext64 doesn't have a packed layout in memory, fallback to UnmarshalBytes.
+ s.UnmarshalBytes(src)
+ }
+}
+
+// CopyOutN implements marshal.Marshallable.CopyOutN.
+//go:nosplit
+func (s *SignalContext64) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) {
+ if !s.Oldmask.Packed() {
+ // Type SignalContext64 doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay.
+ s.MarshalBytes(buf) // escapes: fallback.
+ return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+//go:nosplit
+func (s *SignalContext64) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ return s.CopyOutN(cc, addr, s.SizeBytes())
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+//go:nosplit
+func (s *SignalContext64) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ if !s.Oldmask.Packed() {
+ // Type SignalContext64 doesn't have a packed layout in memory, fall back to UnmarshalBytes.
+ buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay.
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Unmarshal unconditionally. If we had a short copy-in, this results in a
+ // partially unmarshalled struct.
+ s.UnmarshalBytes(buf) // escapes: fallback.
+ return length, err
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (s *SignalContext64) WriteTo(writer io.Writer) (int64, error) {
+ if !s.Oldmask.Packed() {
+ // Type SignalContext64 doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := make([]byte, s.SizeBytes())
+ s.MarshalBytes(buf)
+ length, err := writer.Write(buf)
+ return int64(length), err
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := writer.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return int64(length), err
+}
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (u *UContext64) SizeBytes() int {
+ return 16 +
+ (*SignalStack)(nil).SizeBytes() +
+ (*SignalContext64)(nil).SizeBytes() +
+ (*linux.SignalSet)(nil).SizeBytes()
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (u *UContext64) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(u.Flags))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(u.Link))
+ dst = dst[8:]
+ u.Stack.MarshalBytes(dst[:u.Stack.SizeBytes()])
+ dst = dst[u.Stack.SizeBytes():]
+ u.MContext.MarshalBytes(dst[:u.MContext.SizeBytes()])
+ dst = dst[u.MContext.SizeBytes():]
+ u.Sigset.MarshalBytes(dst[:u.Sigset.SizeBytes()])
+ dst = dst[u.Sigset.SizeBytes():]
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (u *UContext64) UnmarshalBytes(src []byte) {
+ u.Flags = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ u.Link = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ u.Stack.UnmarshalBytes(src[:u.Stack.SizeBytes()])
+ src = src[u.Stack.SizeBytes():]
+ u.MContext.UnmarshalBytes(src[:u.MContext.SizeBytes()])
+ src = src[u.MContext.SizeBytes():]
+ u.Sigset.UnmarshalBytes(src[:u.Sigset.SizeBytes()])
+ src = src[u.Sigset.SizeBytes():]
+}
+
+// Packed implements marshal.Marshallable.Packed.
+//go:nosplit
+func (u *UContext64) Packed() bool {
+ return u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed()
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (u *UContext64) MarshalUnsafe(dst []byte) {
+ if u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() {
+ safecopy.CopyIn(dst, unsafe.Pointer(u))
+ } else {
+ // Type UContext64 doesn't have a packed layout in memory, fallback to MarshalBytes.
+ u.MarshalBytes(dst)
+ }
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (u *UContext64) UnmarshalUnsafe(src []byte) {
+ if u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() {
+ safecopy.CopyOut(unsafe.Pointer(u), src)
+ } else {
+ // Type UContext64 doesn't have a packed layout in memory, fallback to UnmarshalBytes.
+ u.UnmarshalBytes(src)
+ }
+}
+
+// CopyOutN implements marshal.Marshallable.CopyOutN.
+//go:nosplit
+func (u *UContext64) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) {
+ if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() {
+ // Type UContext64 doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := cc.CopyScratchBuffer(u.SizeBytes()) // escapes: okay.
+ u.MarshalBytes(buf) // escapes: fallback.
+ return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u)))
+ hdr.Len = u.SizeBytes()
+ hdr.Cap = u.SizeBytes()
+
+ length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that u
+ // must live until the use above.
+ runtime.KeepAlive(u) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+//go:nosplit
+func (u *UContext64) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ return u.CopyOutN(cc, addr, u.SizeBytes())
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+//go:nosplit
+func (u *UContext64) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() {
+ // Type UContext64 doesn't have a packed layout in memory, fall back to UnmarshalBytes.
+ buf := cc.CopyScratchBuffer(u.SizeBytes()) // escapes: okay.
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Unmarshal unconditionally. If we had a short copy-in, this results in a
+ // partially unmarshalled struct.
+ u.UnmarshalBytes(buf) // escapes: fallback.
+ return length, err
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u)))
+ hdr.Len = u.SizeBytes()
+ hdr.Cap = u.SizeBytes()
+
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that u
+ // must live until the use above.
+ runtime.KeepAlive(u) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (u *UContext64) WriteTo(writer io.Writer) (int64, error) {
+ if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() {
+ // Type UContext64 doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := make([]byte, u.SizeBytes())
+ u.MarshalBytes(buf)
+ length, err := writer.Write(buf)
+ return int64(length), err
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u)))
+ hdr.Len = u.SizeBytes()
+ hdr.Cap = u.SizeBytes()
+
+ length, err := writer.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that u
+ // must live until the use above.
+ runtime.KeepAlive(u) // escapes: replaced by intrinsic.
+ return int64(length), err
+}
+
diff --git a/pkg/sentry/arch/arch_amd64_state_autogen.go b/pkg/sentry/arch/arch_amd64_state_autogen.go
new file mode 100644
index 000000000..7fe18338d
--- /dev/null
+++ b/pkg/sentry/arch/arch_amd64_state_autogen.go
@@ -0,0 +1,41 @@
+// automatically generated by stateify.
+
+// +build amd64
+// +build amd64
+// +build amd64
+
+package arch
+
+import (
+ "gvisor.dev/gvisor/pkg/state"
+)
+
+func (c *context64) StateTypeName() string {
+ return "pkg/sentry/arch.context64"
+}
+
+func (c *context64) StateFields() []string {
+ return []string{
+ "State",
+ "sigFPState",
+ }
+}
+
+func (c *context64) beforeSave() {}
+
+func (c *context64) StateSave(stateSinkObject state.Sink) {
+ c.beforeSave()
+ stateSinkObject.Save(0, &c.State)
+ stateSinkObject.Save(1, &c.sigFPState)
+}
+
+func (c *context64) afterLoad() {}
+
+func (c *context64) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &c.State)
+ stateSourceObject.Load(1, &c.sigFPState)
+}
+
+func init() {
+ state.Register((*context64)(nil))
+}
diff --git a/pkg/sentry/arch/arch_arm64_abi_autogen_unsafe.go b/pkg/sentry/arch/arch_arm64_abi_autogen_unsafe.go
new file mode 100644
index 000000000..b8667cdb9
--- /dev/null
+++ b/pkg/sentry/arch/arch_arm64_abi_autogen_unsafe.go
@@ -0,0 +1,592 @@
+// Automatically generated marshal implementation. See tools/go_marshal.
+
+// +build arm64
+// +build arm64
+// +build arm64
+
+package arch
+
+import (
+ "gvisor.dev/gvisor/pkg/abi/linux"
+ "gvisor.dev/gvisor/pkg/gohacks"
+ "gvisor.dev/gvisor/pkg/marshal"
+ "gvisor.dev/gvisor/pkg/safecopy"
+ "gvisor.dev/gvisor/pkg/usermem"
+ "io"
+ "reflect"
+ "runtime"
+ "unsafe"
+)
+
+// Marshallable types used by this file.
+var _ marshal.Marshallable = (*FpsimdContext)(nil)
+var _ marshal.Marshallable = (*SignalContext64)(nil)
+var _ marshal.Marshallable = (*SignalStack)(nil)
+var _ marshal.Marshallable = (*UContext64)(nil)
+var _ marshal.Marshallable = (*aarch64Ctx)(nil)
+var _ marshal.Marshallable = (*linux.SignalSet)(nil)
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (a *aarch64Ctx) SizeBytes() int {
+ return 8
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (a *aarch64Ctx) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(a.Magic))
+ dst = dst[4:]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(a.Size))
+ dst = dst[4:]
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (a *aarch64Ctx) UnmarshalBytes(src []byte) {
+ a.Magic = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ a.Size = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+}
+
+// Packed implements marshal.Marshallable.Packed.
+//go:nosplit
+func (a *aarch64Ctx) Packed() bool {
+ return true
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (a *aarch64Ctx) MarshalUnsafe(dst []byte) {
+ safecopy.CopyIn(dst, unsafe.Pointer(a))
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (a *aarch64Ctx) UnmarshalUnsafe(src []byte) {
+ safecopy.CopyOut(unsafe.Pointer(a), src)
+}
+
+// CopyOutN implements marshal.Marshallable.CopyOutN.
+//go:nosplit
+func (a *aarch64Ctx) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) {
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(a)))
+ hdr.Len = a.SizeBytes()
+ hdr.Cap = a.SizeBytes()
+
+ length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that a
+ // must live until the use above.
+ runtime.KeepAlive(a) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+//go:nosplit
+func (a *aarch64Ctx) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ return a.CopyOutN(cc, addr, a.SizeBytes())
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+//go:nosplit
+func (a *aarch64Ctx) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(a)))
+ hdr.Len = a.SizeBytes()
+ hdr.Cap = a.SizeBytes()
+
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that a
+ // must live until the use above.
+ runtime.KeepAlive(a) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (a *aarch64Ctx) WriteTo(writer io.Writer) (int64, error) {
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(a)))
+ hdr.Len = a.SizeBytes()
+ hdr.Cap = a.SizeBytes()
+
+ length, err := writer.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that a
+ // must live until the use above.
+ runtime.KeepAlive(a) // escapes: replaced by intrinsic.
+ return int64(length), err
+}
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (f *FpsimdContext) SizeBytes() int {
+ return 8 +
+ (*aarch64Ctx)(nil).SizeBytes() +
+ 8*64
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (f *FpsimdContext) MarshalBytes(dst []byte) {
+ f.Head.MarshalBytes(dst[:f.Head.SizeBytes()])
+ dst = dst[f.Head.SizeBytes():]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Fpsr))
+ dst = dst[4:]
+ usermem.ByteOrder.PutUint32(dst[:4], uint32(f.Fpcr))
+ dst = dst[4:]
+ for idx := 0; idx < 64; idx++ {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(f.Vregs[idx]))
+ dst = dst[8:]
+ }
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (f *FpsimdContext) UnmarshalBytes(src []byte) {
+ f.Head.UnmarshalBytes(src[:f.Head.SizeBytes()])
+ src = src[f.Head.SizeBytes():]
+ f.Fpsr = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ f.Fpcr = uint32(usermem.ByteOrder.Uint32(src[:4]))
+ src = src[4:]
+ for idx := 0; idx < 64; idx++ {
+ f.Vregs[idx] = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ }
+}
+
+// Packed implements marshal.Marshallable.Packed.
+//go:nosplit
+func (f *FpsimdContext) Packed() bool {
+ return f.Head.Packed()
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (f *FpsimdContext) MarshalUnsafe(dst []byte) {
+ if f.Head.Packed() {
+ safecopy.CopyIn(dst, unsafe.Pointer(f))
+ } else {
+ // Type FpsimdContext doesn't have a packed layout in memory, fallback to MarshalBytes.
+ f.MarshalBytes(dst)
+ }
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (f *FpsimdContext) UnmarshalUnsafe(src []byte) {
+ if f.Head.Packed() {
+ safecopy.CopyOut(unsafe.Pointer(f), src)
+ } else {
+ // Type FpsimdContext doesn't have a packed layout in memory, fallback to UnmarshalBytes.
+ f.UnmarshalBytes(src)
+ }
+}
+
+// CopyOutN implements marshal.Marshallable.CopyOutN.
+//go:nosplit
+func (f *FpsimdContext) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) {
+ if !f.Head.Packed() {
+ // Type FpsimdContext doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay.
+ f.MarshalBytes(buf) // escapes: fallback.
+ return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(f)))
+ hdr.Len = f.SizeBytes()
+ hdr.Cap = f.SizeBytes()
+
+ length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that f
+ // must live until the use above.
+ runtime.KeepAlive(f) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+//go:nosplit
+func (f *FpsimdContext) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ return f.CopyOutN(cc, addr, f.SizeBytes())
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+//go:nosplit
+func (f *FpsimdContext) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ if !f.Head.Packed() {
+ // Type FpsimdContext doesn't have a packed layout in memory, fall back to UnmarshalBytes.
+ buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay.
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Unmarshal unconditionally. If we had a short copy-in, this results in a
+ // partially unmarshalled struct.
+ f.UnmarshalBytes(buf) // escapes: fallback.
+ return length, err
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(f)))
+ hdr.Len = f.SizeBytes()
+ hdr.Cap = f.SizeBytes()
+
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that f
+ // must live until the use above.
+ runtime.KeepAlive(f) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (f *FpsimdContext) WriteTo(writer io.Writer) (int64, error) {
+ if !f.Head.Packed() {
+ // Type FpsimdContext doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := make([]byte, f.SizeBytes())
+ f.MarshalBytes(buf)
+ length, err := writer.Write(buf)
+ return int64(length), err
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(f)))
+ hdr.Len = f.SizeBytes()
+ hdr.Cap = f.SizeBytes()
+
+ length, err := writer.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that f
+ // must live until the use above.
+ runtime.KeepAlive(f) // escapes: replaced by intrinsic.
+ return int64(length), err
+}
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (u *UContext64) SizeBytes() int {
+ return 16 +
+ (*SignalStack)(nil).SizeBytes() +
+ (*linux.SignalSet)(nil).SizeBytes() +
+ 1*120 +
+ 1*8 +
+ (*SignalContext64)(nil).SizeBytes()
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (u *UContext64) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(u.Flags))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(u.Link))
+ dst = dst[8:]
+ u.Stack.MarshalBytes(dst[:u.Stack.SizeBytes()])
+ dst = dst[u.Stack.SizeBytes():]
+ u.Sigset.MarshalBytes(dst[:u.Sigset.SizeBytes()])
+ dst = dst[u.Sigset.SizeBytes():]
+ for idx := 0; idx < 120; idx++ {
+ dst[0] = byte(u._pad[idx])
+ dst = dst[1:]
+ }
+ for idx := 0; idx < 8; idx++ {
+ dst[0] = byte(u._pad2[idx])
+ dst = dst[1:]
+ }
+ u.MContext.MarshalBytes(dst[:u.MContext.SizeBytes()])
+ dst = dst[u.MContext.SizeBytes():]
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (u *UContext64) UnmarshalBytes(src []byte) {
+ u.Flags = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ u.Link = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ u.Stack.UnmarshalBytes(src[:u.Stack.SizeBytes()])
+ src = src[u.Stack.SizeBytes():]
+ u.Sigset.UnmarshalBytes(src[:u.Sigset.SizeBytes()])
+ src = src[u.Sigset.SizeBytes():]
+ for idx := 0; idx < 120; idx++ {
+ u._pad[idx] = src[0]
+ src = src[1:]
+ }
+ for idx := 0; idx < 8; idx++ {
+ u._pad2[idx] = src[0]
+ src = src[1:]
+ }
+ u.MContext.UnmarshalBytes(src[:u.MContext.SizeBytes()])
+ src = src[u.MContext.SizeBytes():]
+}
+
+// Packed implements marshal.Marshallable.Packed.
+//go:nosplit
+func (u *UContext64) Packed() bool {
+ return u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed()
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (u *UContext64) MarshalUnsafe(dst []byte) {
+ if u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() {
+ safecopy.CopyIn(dst, unsafe.Pointer(u))
+ } else {
+ // Type UContext64 doesn't have a packed layout in memory, fallback to MarshalBytes.
+ u.MarshalBytes(dst)
+ }
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (u *UContext64) UnmarshalUnsafe(src []byte) {
+ if u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() {
+ safecopy.CopyOut(unsafe.Pointer(u), src)
+ } else {
+ // Type UContext64 doesn't have a packed layout in memory, fallback to UnmarshalBytes.
+ u.UnmarshalBytes(src)
+ }
+}
+
+// CopyOutN implements marshal.Marshallable.CopyOutN.
+//go:nosplit
+func (u *UContext64) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) {
+ if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() {
+ // Type UContext64 doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := cc.CopyScratchBuffer(u.SizeBytes()) // escapes: okay.
+ u.MarshalBytes(buf) // escapes: fallback.
+ return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u)))
+ hdr.Len = u.SizeBytes()
+ hdr.Cap = u.SizeBytes()
+
+ length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that u
+ // must live until the use above.
+ runtime.KeepAlive(u) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+//go:nosplit
+func (u *UContext64) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ return u.CopyOutN(cc, addr, u.SizeBytes())
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+//go:nosplit
+func (u *UContext64) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() {
+ // Type UContext64 doesn't have a packed layout in memory, fall back to UnmarshalBytes.
+ buf := cc.CopyScratchBuffer(u.SizeBytes()) // escapes: okay.
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Unmarshal unconditionally. If we had a short copy-in, this results in a
+ // partially unmarshalled struct.
+ u.UnmarshalBytes(buf) // escapes: fallback.
+ return length, err
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u)))
+ hdr.Len = u.SizeBytes()
+ hdr.Cap = u.SizeBytes()
+
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that u
+ // must live until the use above.
+ runtime.KeepAlive(u) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (u *UContext64) WriteTo(writer io.Writer) (int64, error) {
+ if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() {
+ // Type UContext64 doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := make([]byte, u.SizeBytes())
+ u.MarshalBytes(buf)
+ length, err := writer.Write(buf)
+ return int64(length), err
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u)))
+ hdr.Len = u.SizeBytes()
+ hdr.Cap = u.SizeBytes()
+
+ length, err := writer.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that u
+ // must live until the use above.
+ runtime.KeepAlive(u) // escapes: replaced by intrinsic.
+ return int64(length), err
+}
+
+// SizeBytes implements marshal.Marshallable.SizeBytes.
+func (s *SignalContext64) SizeBytes() int {
+ return 32 +
+ 8*31 +
+ 1*8 +
+ (*FpsimdContext)(nil).SizeBytes() +
+ 1*3568
+}
+
+// MarshalBytes implements marshal.Marshallable.MarshalBytes.
+func (s *SignalContext64) MarshalBytes(dst []byte) {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.FaultAddr))
+ dst = dst[8:]
+ for idx := 0; idx < 31; idx++ {
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Regs[idx]))
+ dst = dst[8:]
+ }
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Sp))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Pc))
+ dst = dst[8:]
+ usermem.ByteOrder.PutUint64(dst[:8], uint64(s.Pstate))
+ dst = dst[8:]
+ for idx := 0; idx < 8; idx++ {
+ dst[0] = byte(s._pad[idx])
+ dst = dst[1:]
+ }
+ s.Fpsimd64.MarshalBytes(dst[:s.Fpsimd64.SizeBytes()])
+ dst = dst[s.Fpsimd64.SizeBytes():]
+ for idx := 0; idx < 3568; idx++ {
+ dst[0] = byte(s.Reserved[idx])
+ dst = dst[1:]
+ }
+}
+
+// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.
+func (s *SignalContext64) UnmarshalBytes(src []byte) {
+ s.FaultAddr = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ for idx := 0; idx < 31; idx++ {
+ s.Regs[idx] = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ }
+ s.Sp = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Pc = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ s.Pstate = uint64(usermem.ByteOrder.Uint64(src[:8]))
+ src = src[8:]
+ for idx := 0; idx < 8; idx++ {
+ s._pad[idx] = src[0]
+ src = src[1:]
+ }
+ s.Fpsimd64.UnmarshalBytes(src[:s.Fpsimd64.SizeBytes()])
+ src = src[s.Fpsimd64.SizeBytes():]
+ for idx := 0; idx < 3568; idx++ {
+ s.Reserved[idx] = uint8(src[0])
+ src = src[1:]
+ }
+}
+
+// Packed implements marshal.Marshallable.Packed.
+//go:nosplit
+func (s *SignalContext64) Packed() bool {
+ return s.Fpsimd64.Packed()
+}
+
+// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.
+func (s *SignalContext64) MarshalUnsafe(dst []byte) {
+ if s.Fpsimd64.Packed() {
+ safecopy.CopyIn(dst, unsafe.Pointer(s))
+ } else {
+ // Type SignalContext64 doesn't have a packed layout in memory, fallback to MarshalBytes.
+ s.MarshalBytes(dst)
+ }
+}
+
+// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.
+func (s *SignalContext64) UnmarshalUnsafe(src []byte) {
+ if s.Fpsimd64.Packed() {
+ safecopy.CopyOut(unsafe.Pointer(s), src)
+ } else {
+ // Type SignalContext64 doesn't have a packed layout in memory, fallback to UnmarshalBytes.
+ s.UnmarshalBytes(src)
+ }
+}
+
+// CopyOutN implements marshal.Marshallable.CopyOutN.
+//go:nosplit
+func (s *SignalContext64) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) {
+ if !s.Fpsimd64.Packed() {
+ // Type SignalContext64 doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay.
+ s.MarshalBytes(buf) // escapes: fallback.
+ return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// CopyOut implements marshal.Marshallable.CopyOut.
+//go:nosplit
+func (s *SignalContext64) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ return s.CopyOutN(cc, addr, s.SizeBytes())
+}
+
+// CopyIn implements marshal.Marshallable.CopyIn.
+//go:nosplit
+func (s *SignalContext64) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) {
+ if !s.Fpsimd64.Packed() {
+ // Type SignalContext64 doesn't have a packed layout in memory, fall back to UnmarshalBytes.
+ buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay.
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Unmarshal unconditionally. If we had a short copy-in, this results in a
+ // partially unmarshalled struct.
+ s.UnmarshalBytes(buf) // escapes: fallback.
+ return length, err
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := cc.CopyInBytes(addr, buf) // escapes: okay.
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return length, err
+}
+
+// WriteTo implements io.WriterTo.WriteTo.
+func (s *SignalContext64) WriteTo(writer io.Writer) (int64, error) {
+ if !s.Fpsimd64.Packed() {
+ // Type SignalContext64 doesn't have a packed layout in memory, fall back to MarshalBytes.
+ buf := make([]byte, s.SizeBytes())
+ s.MarshalBytes(buf)
+ length, err := writer.Write(buf)
+ return int64(length), err
+ }
+
+ // Construct a slice backed by dst's underlying memory.
+ var buf []byte
+ hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+ hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s)))
+ hdr.Len = s.SizeBytes()
+ hdr.Cap = s.SizeBytes()
+
+ length, err := writer.Write(buf)
+ // Since we bypassed the compiler's escape analysis, indicate that s
+ // must live until the use above.
+ runtime.KeepAlive(s) // escapes: replaced by intrinsic.
+ return int64(length), err
+}
+
diff --git a/pkg/sentry/arch/arch_arm64_state_autogen.go b/pkg/sentry/arch/arch_arm64_state_autogen.go
new file mode 100644
index 000000000..bece3fa9e
--- /dev/null
+++ b/pkg/sentry/arch/arch_arm64_state_autogen.go
@@ -0,0 +1,41 @@
+// automatically generated by stateify.
+
+// +build arm64
+// +build arm64
+// +build arm64
+
+package arch
+
+import (
+ "gvisor.dev/gvisor/pkg/state"
+)
+
+func (c *context64) StateTypeName() string {
+ return "pkg/sentry/arch.context64"
+}
+
+func (c *context64) StateFields() []string {
+ return []string{
+ "State",
+ "sigFPState",
+ }
+}
+
+func (c *context64) beforeSave() {}
+
+func (c *context64) StateSave(stateSinkObject state.Sink) {
+ c.beforeSave()
+ stateSinkObject.Save(0, &c.State)
+ stateSinkObject.Save(1, &c.sigFPState)
+}
+
+func (c *context64) afterLoad() {}
+
+func (c *context64) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &c.State)
+ stateSourceObject.Load(1, &c.sigFPState)
+}
+
+func init() {
+ state.Register((*context64)(nil))
+}
diff --git a/pkg/sentry/arch/arch_state_autogen.go b/pkg/sentry/arch/arch_state_autogen.go
new file mode 100644
index 000000000..e0a1de627
--- /dev/null
+++ b/pkg/sentry/arch/arch_state_autogen.go
@@ -0,0 +1,174 @@
+// automatically generated by stateify.
+
+// +build 386 amd64 arm64
+
+package arch
+
+import (
+ "gvisor.dev/gvisor/pkg/state"
+)
+
+func (m *MmapLayout) StateTypeName() string {
+ return "pkg/sentry/arch.MmapLayout"
+}
+
+func (m *MmapLayout) StateFields() []string {
+ return []string{
+ "MinAddr",
+ "MaxAddr",
+ "BottomUpBase",
+ "TopDownBase",
+ "DefaultDirection",
+ "MaxStackRand",
+ }
+}
+
+func (m *MmapLayout) beforeSave() {}
+
+func (m *MmapLayout) StateSave(stateSinkObject state.Sink) {
+ m.beforeSave()
+ stateSinkObject.Save(0, &m.MinAddr)
+ stateSinkObject.Save(1, &m.MaxAddr)
+ stateSinkObject.Save(2, &m.BottomUpBase)
+ stateSinkObject.Save(3, &m.TopDownBase)
+ stateSinkObject.Save(4, &m.DefaultDirection)
+ stateSinkObject.Save(5, &m.MaxStackRand)
+}
+
+func (m *MmapLayout) afterLoad() {}
+
+func (m *MmapLayout) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &m.MinAddr)
+ stateSourceObject.Load(1, &m.MaxAddr)
+ stateSourceObject.Load(2, &m.BottomUpBase)
+ stateSourceObject.Load(3, &m.TopDownBase)
+ stateSourceObject.Load(4, &m.DefaultDirection)
+ stateSourceObject.Load(5, &m.MaxStackRand)
+}
+
+func (a *AuxEntry) StateTypeName() string {
+ return "pkg/sentry/arch.AuxEntry"
+}
+
+func (a *AuxEntry) StateFields() []string {
+ return []string{
+ "Key",
+ "Value",
+ }
+}
+
+func (a *AuxEntry) beforeSave() {}
+
+func (a *AuxEntry) StateSave(stateSinkObject state.Sink) {
+ a.beforeSave()
+ stateSinkObject.Save(0, &a.Key)
+ stateSinkObject.Save(1, &a.Value)
+}
+
+func (a *AuxEntry) afterLoad() {}
+
+func (a *AuxEntry) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &a.Key)
+ stateSourceObject.Load(1, &a.Value)
+}
+
+func (s *SignalAct) StateTypeName() string {
+ return "pkg/sentry/arch.SignalAct"
+}
+
+func (s *SignalAct) StateFields() []string {
+ return []string{
+ "Handler",
+ "Flags",
+ "Restorer",
+ "Mask",
+ }
+}
+
+func (s *SignalAct) beforeSave() {}
+
+func (s *SignalAct) StateSave(stateSinkObject state.Sink) {
+ s.beforeSave()
+ stateSinkObject.Save(0, &s.Handler)
+ stateSinkObject.Save(1, &s.Flags)
+ stateSinkObject.Save(2, &s.Restorer)
+ stateSinkObject.Save(3, &s.Mask)
+}
+
+func (s *SignalAct) afterLoad() {}
+
+func (s *SignalAct) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &s.Handler)
+ stateSourceObject.Load(1, &s.Flags)
+ stateSourceObject.Load(2, &s.Restorer)
+ stateSourceObject.Load(3, &s.Mask)
+}
+
+func (s *SignalStack) StateTypeName() string {
+ return "pkg/sentry/arch.SignalStack"
+}
+
+func (s *SignalStack) StateFields() []string {
+ return []string{
+ "Addr",
+ "Flags",
+ "Size",
+ }
+}
+
+func (s *SignalStack) beforeSave() {}
+
+func (s *SignalStack) StateSave(stateSinkObject state.Sink) {
+ s.beforeSave()
+ stateSinkObject.Save(0, &s.Addr)
+ stateSinkObject.Save(1, &s.Flags)
+ stateSinkObject.Save(2, &s.Size)
+}
+
+func (s *SignalStack) afterLoad() {}
+
+func (s *SignalStack) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &s.Addr)
+ stateSourceObject.Load(1, &s.Flags)
+ stateSourceObject.Load(2, &s.Size)
+}
+
+func (s *SignalInfo) StateTypeName() string {
+ return "pkg/sentry/arch.SignalInfo"
+}
+
+func (s *SignalInfo) StateFields() []string {
+ return []string{
+ "Signo",
+ "Errno",
+ "Code",
+ "Fields",
+ }
+}
+
+func (s *SignalInfo) beforeSave() {}
+
+func (s *SignalInfo) StateSave(stateSinkObject state.Sink) {
+ s.beforeSave()
+ stateSinkObject.Save(0, &s.Signo)
+ stateSinkObject.Save(1, &s.Errno)
+ stateSinkObject.Save(2, &s.Code)
+ stateSinkObject.Save(3, &s.Fields)
+}
+
+func (s *SignalInfo) afterLoad() {}
+
+func (s *SignalInfo) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &s.Signo)
+ stateSourceObject.Load(1, &s.Errno)
+ stateSourceObject.Load(2, &s.Code)
+ stateSourceObject.Load(3, &s.Fields)
+}
+
+func init() {
+ state.Register((*MmapLayout)(nil))
+ state.Register((*AuxEntry)(nil))
+ state.Register((*SignalAct)(nil))
+ state.Register((*SignalStack)(nil))
+ state.Register((*SignalInfo)(nil))
+}
diff --git a/pkg/sentry/arch/arch_unsafe_abi_autogen_unsafe.go b/pkg/sentry/arch/arch_unsafe_abi_autogen_unsafe.go
new file mode 100644
index 000000000..2167ed9c9
--- /dev/null
+++ b/pkg/sentry/arch/arch_unsafe_abi_autogen_unsafe.go
@@ -0,0 +1,7 @@
+// Automatically generated marshal implementation. See tools/go_marshal.
+
+package arch
+
+import (
+)
+
diff --git a/pkg/sentry/arch/arch_unsafe_state_autogen.go b/pkg/sentry/arch/arch_unsafe_state_autogen.go
new file mode 100644
index 000000000..9a45071b9
--- /dev/null
+++ b/pkg/sentry/arch/arch_unsafe_state_autogen.go
@@ -0,0 +1,3 @@
+// automatically generated by stateify.
+
+package arch
diff --git a/pkg/sentry/arch/arch_x86_abi_autogen_unsafe.go b/pkg/sentry/arch/arch_x86_abi_autogen_unsafe.go
new file mode 100644
index 000000000..521dcbb06
--- /dev/null
+++ b/pkg/sentry/arch/arch_x86_abi_autogen_unsafe.go
@@ -0,0 +1,10 @@
+// Automatically generated marshal implementation. See tools/go_marshal.
+
+// +build amd64 386
+// +build amd64 386
+
+package arch
+
+import (
+)
+
diff --git a/pkg/sentry/arch/arch_x86_impl_abi_autogen_unsafe.go b/pkg/sentry/arch/arch_x86_impl_abi_autogen_unsafe.go
new file mode 100644
index 000000000..4357dc093
--- /dev/null
+++ b/pkg/sentry/arch/arch_x86_impl_abi_autogen_unsafe.go
@@ -0,0 +1,9 @@
+// Automatically generated marshal implementation. See tools/go_marshal.
+
+// +build amd64 386
+
+package arch
+
+import (
+)
+
diff --git a/pkg/sentry/arch/arch_x86_impl_state_autogen.go b/pkg/sentry/arch/arch_x86_impl_state_autogen.go
new file mode 100644
index 000000000..e584c3f83
--- /dev/null
+++ b/pkg/sentry/arch/arch_x86_impl_state_autogen.go
@@ -0,0 +1,41 @@
+// automatically generated by stateify.
+
+// +build amd64 386
+
+package arch
+
+import (
+ "gvisor.dev/gvisor/pkg/state"
+)
+
+func (s *State) StateTypeName() string {
+ return "pkg/sentry/arch.State"
+}
+
+func (s *State) StateFields() []string {
+ return []string{
+ "Regs",
+ "x86FPState",
+ "FeatureSet",
+ }
+}
+
+func (s *State) beforeSave() {}
+
+func (s *State) StateSave(stateSinkObject state.Sink) {
+ s.beforeSave()
+ stateSinkObject.Save(0, &s.Regs)
+ stateSinkObject.Save(1, &s.x86FPState)
+ stateSinkObject.Save(2, &s.FeatureSet)
+}
+
+func (s *State) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &s.Regs)
+ stateSourceObject.LoadWait(1, &s.x86FPState)
+ stateSourceObject.Load(2, &s.FeatureSet)
+ stateSourceObject.AfterLoad(s.afterLoad)
+}
+
+func init() {
+ state.Register((*State)(nil))
+}
diff --git a/pkg/sentry/arch/arch_x86_state_autogen.go b/pkg/sentry/arch/arch_x86_state_autogen.go
new file mode 100644
index 000000000..11378624b
--- /dev/null
+++ b/pkg/sentry/arch/arch_x86_state_autogen.go
@@ -0,0 +1,37 @@
+// automatically generated by stateify.
+
+// +build amd64 386
+// +build amd64 386
+
+package arch
+
+import (
+ "gvisor.dev/gvisor/pkg/state"
+)
+
+func (r *Registers) StateTypeName() string {
+ return "pkg/sentry/arch.Registers"
+}
+
+func (r *Registers) StateFields() []string {
+ return []string{
+ "PtraceRegs",
+ }
+}
+
+func (r *Registers) beforeSave() {}
+
+func (r *Registers) StateSave(stateSinkObject state.Sink) {
+ r.beforeSave()
+ stateSinkObject.Save(0, &r.PtraceRegs)
+}
+
+func (r *Registers) afterLoad() {}
+
+func (r *Registers) StateLoad(stateSourceObject state.Source) {
+ stateSourceObject.Load(0, &r.PtraceRegs)
+}
+
+func init() {
+ state.Register((*Registers)(nil))
+}
diff --git a/pkg/sentry/arch/registers.proto b/pkg/sentry/arch/registers.proto
deleted file mode 100644
index 2727ba08a..000000000
--- a/pkg/sentry/arch/registers.proto
+++ /dev/null
@@ -1,93 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-syntax = "proto3";
-
-package gvisor;
-
-message AMD64Registers {
- uint64 rax = 1;
- uint64 rbx = 2;
- uint64 rcx = 3;
- uint64 rdx = 4;
- uint64 rsi = 5;
- uint64 rdi = 6;
- uint64 rsp = 7;
- uint64 rbp = 8;
-
- uint64 r8 = 9;
- uint64 r9 = 10;
- uint64 r10 = 11;
- uint64 r11 = 12;
- uint64 r12 = 13;
- uint64 r13 = 14;
- uint64 r14 = 15;
- uint64 r15 = 16;
-
- uint64 rip = 17;
- uint64 rflags = 18;
- uint64 orig_rax = 19;
- uint64 cs = 20;
- uint64 ds = 21;
- uint64 es = 22;
- uint64 fs = 23;
- uint64 gs = 24;
- uint64 ss = 25;
- uint64 fs_base = 26;
- uint64 gs_base = 27;
-}
-
-message ARM64Registers {
- uint64 r0 = 1;
- uint64 r1 = 2;
- uint64 r2 = 3;
- uint64 r3 = 4;
- uint64 r4 = 5;
- uint64 r5 = 6;
- uint64 r6 = 7;
- uint64 r7 = 8;
- uint64 r8 = 9;
- uint64 r9 = 10;
- uint64 r10 = 11;
- uint64 r11 = 12;
- uint64 r12 = 13;
- uint64 r13 = 14;
- uint64 r14 = 15;
- uint64 r15 = 16;
- uint64 r16 = 17;
- uint64 r17 = 18;
- uint64 r18 = 19;
- uint64 r19 = 20;
- uint64 r20 = 21;
- uint64 r21 = 22;
- uint64 r22 = 23;
- uint64 r23 = 24;
- uint64 r24 = 25;
- uint64 r25 = 26;
- uint64 r26 = 27;
- uint64 r27 = 28;
- uint64 r28 = 29;
- uint64 r29 = 30;
- uint64 r30 = 31;
- uint64 sp = 32;
- uint64 pc = 33;
- uint64 pstate = 34;
- uint64 tls = 35;
-}
-message Registers {
- oneof arch {
- AMD64Registers amd64 = 1;
- ARM64Registers arm64 = 2;
- }
-}
diff --git a/pkg/sentry/arch/registers_go_proto/registers.pb.go b/pkg/sentry/arch/registers_go_proto/registers.pb.go
new file mode 100644
index 000000000..eb0d36273
--- /dev/null
+++ b/pkg/sentry/arch/registers_go_proto/registers.pb.go
@@ -0,0 +1,863 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// versions:
+// protoc-gen-go v1.25.0
+// protoc v3.9.0
+// source: pkg/sentry/arch/registers.proto
+
+package gvisor
+
+import (
+ proto "github.com/golang/protobuf/proto"
+ protoreflect "google.golang.org/protobuf/reflect/protoreflect"
+ protoimpl "google.golang.org/protobuf/runtime/protoimpl"
+ reflect "reflect"
+ sync "sync"
+)
+
+const (
+ // Verify that this generated code is sufficiently up-to-date.
+ _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
+ // Verify that runtime/protoimpl is sufficiently up-to-date.
+ _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
+)
+
+// This is a compile-time assertion that a sufficiently up-to-date version
+// of the legacy proto package is being used.
+const _ = proto.ProtoPackageIsVersion4
+
+type AMD64Registers struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Rax uint64 `protobuf:"varint,1,opt,name=rax,proto3" json:"rax,omitempty"`
+ Rbx uint64 `protobuf:"varint,2,opt,name=rbx,proto3" json:"rbx,omitempty"`
+ Rcx uint64 `protobuf:"varint,3,opt,name=rcx,proto3" json:"rcx,omitempty"`
+ Rdx uint64 `protobuf:"varint,4,opt,name=rdx,proto3" json:"rdx,omitempty"`
+ Rsi uint64 `protobuf:"varint,5,opt,name=rsi,proto3" json:"rsi,omitempty"`
+ Rdi uint64 `protobuf:"varint,6,opt,name=rdi,proto3" json:"rdi,omitempty"`
+ Rsp uint64 `protobuf:"varint,7,opt,name=rsp,proto3" json:"rsp,omitempty"`
+ Rbp uint64 `protobuf:"varint,8,opt,name=rbp,proto3" json:"rbp,omitempty"`
+ R8 uint64 `protobuf:"varint,9,opt,name=r8,proto3" json:"r8,omitempty"`
+ R9 uint64 `protobuf:"varint,10,opt,name=r9,proto3" json:"r9,omitempty"`
+ R10 uint64 `protobuf:"varint,11,opt,name=r10,proto3" json:"r10,omitempty"`
+ R11 uint64 `protobuf:"varint,12,opt,name=r11,proto3" json:"r11,omitempty"`
+ R12 uint64 `protobuf:"varint,13,opt,name=r12,proto3" json:"r12,omitempty"`
+ R13 uint64 `protobuf:"varint,14,opt,name=r13,proto3" json:"r13,omitempty"`
+ R14 uint64 `protobuf:"varint,15,opt,name=r14,proto3" json:"r14,omitempty"`
+ R15 uint64 `protobuf:"varint,16,opt,name=r15,proto3" json:"r15,omitempty"`
+ Rip uint64 `protobuf:"varint,17,opt,name=rip,proto3" json:"rip,omitempty"`
+ Rflags uint64 `protobuf:"varint,18,opt,name=rflags,proto3" json:"rflags,omitempty"`
+ OrigRax uint64 `protobuf:"varint,19,opt,name=orig_rax,json=origRax,proto3" json:"orig_rax,omitempty"`
+ Cs uint64 `protobuf:"varint,20,opt,name=cs,proto3" json:"cs,omitempty"`
+ Ds uint64 `protobuf:"varint,21,opt,name=ds,proto3" json:"ds,omitempty"`
+ Es uint64 `protobuf:"varint,22,opt,name=es,proto3" json:"es,omitempty"`
+ Fs uint64 `protobuf:"varint,23,opt,name=fs,proto3" json:"fs,omitempty"`
+ Gs uint64 `protobuf:"varint,24,opt,name=gs,proto3" json:"gs,omitempty"`
+ Ss uint64 `protobuf:"varint,25,opt,name=ss,proto3" json:"ss,omitempty"`
+ FsBase uint64 `protobuf:"varint,26,opt,name=fs_base,json=fsBase,proto3" json:"fs_base,omitempty"`
+ GsBase uint64 `protobuf:"varint,27,opt,name=gs_base,json=gsBase,proto3" json:"gs_base,omitempty"`
+}
+
+func (x *AMD64Registers) Reset() {
+ *x = AMD64Registers{}
+ if protoimpl.UnsafeEnabled {
+ mi := &file_pkg_sentry_arch_registers_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
+}
+
+func (x *AMD64Registers) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*AMD64Registers) ProtoMessage() {}
+
+func (x *AMD64Registers) ProtoReflect() protoreflect.Message {
+ mi := &file_pkg_sentry_arch_registers_proto_msgTypes[0]
+ if protoimpl.UnsafeEnabled && x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use AMD64Registers.ProtoReflect.Descriptor instead.
+func (*AMD64Registers) Descriptor() ([]byte, []int) {
+ return file_pkg_sentry_arch_registers_proto_rawDescGZIP(), []int{0}
+}
+
+func (x *AMD64Registers) GetRax() uint64 {
+ if x != nil {
+ return x.Rax
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetRbx() uint64 {
+ if x != nil {
+ return x.Rbx
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetRcx() uint64 {
+ if x != nil {
+ return x.Rcx
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetRdx() uint64 {
+ if x != nil {
+ return x.Rdx
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetRsi() uint64 {
+ if x != nil {
+ return x.Rsi
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetRdi() uint64 {
+ if x != nil {
+ return x.Rdi
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetRsp() uint64 {
+ if x != nil {
+ return x.Rsp
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetRbp() uint64 {
+ if x != nil {
+ return x.Rbp
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetR8() uint64 {
+ if x != nil {
+ return x.R8
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetR9() uint64 {
+ if x != nil {
+ return x.R9
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetR10() uint64 {
+ if x != nil {
+ return x.R10
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetR11() uint64 {
+ if x != nil {
+ return x.R11
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetR12() uint64 {
+ if x != nil {
+ return x.R12
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetR13() uint64 {
+ if x != nil {
+ return x.R13
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetR14() uint64 {
+ if x != nil {
+ return x.R14
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetR15() uint64 {
+ if x != nil {
+ return x.R15
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetRip() uint64 {
+ if x != nil {
+ return x.Rip
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetRflags() uint64 {
+ if x != nil {
+ return x.Rflags
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetOrigRax() uint64 {
+ if x != nil {
+ return x.OrigRax
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetCs() uint64 {
+ if x != nil {
+ return x.Cs
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetDs() uint64 {
+ if x != nil {
+ return x.Ds
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetEs() uint64 {
+ if x != nil {
+ return x.Es
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetFs() uint64 {
+ if x != nil {
+ return x.Fs
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetGs() uint64 {
+ if x != nil {
+ return x.Gs
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetSs() uint64 {
+ if x != nil {
+ return x.Ss
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetFsBase() uint64 {
+ if x != nil {
+ return x.FsBase
+ }
+ return 0
+}
+
+func (x *AMD64Registers) GetGsBase() uint64 {
+ if x != nil {
+ return x.GsBase
+ }
+ return 0
+}
+
+type ARM64Registers struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ R0 uint64 `protobuf:"varint,1,opt,name=r0,proto3" json:"r0,omitempty"`
+ R1 uint64 `protobuf:"varint,2,opt,name=r1,proto3" json:"r1,omitempty"`
+ R2 uint64 `protobuf:"varint,3,opt,name=r2,proto3" json:"r2,omitempty"`
+ R3 uint64 `protobuf:"varint,4,opt,name=r3,proto3" json:"r3,omitempty"`
+ R4 uint64 `protobuf:"varint,5,opt,name=r4,proto3" json:"r4,omitempty"`
+ R5 uint64 `protobuf:"varint,6,opt,name=r5,proto3" json:"r5,omitempty"`
+ R6 uint64 `protobuf:"varint,7,opt,name=r6,proto3" json:"r6,omitempty"`
+ R7 uint64 `protobuf:"varint,8,opt,name=r7,proto3" json:"r7,omitempty"`
+ R8 uint64 `protobuf:"varint,9,opt,name=r8,proto3" json:"r8,omitempty"`
+ R9 uint64 `protobuf:"varint,10,opt,name=r9,proto3" json:"r9,omitempty"`
+ R10 uint64 `protobuf:"varint,11,opt,name=r10,proto3" json:"r10,omitempty"`
+ R11 uint64 `protobuf:"varint,12,opt,name=r11,proto3" json:"r11,omitempty"`
+ R12 uint64 `protobuf:"varint,13,opt,name=r12,proto3" json:"r12,omitempty"`
+ R13 uint64 `protobuf:"varint,14,opt,name=r13,proto3" json:"r13,omitempty"`
+ R14 uint64 `protobuf:"varint,15,opt,name=r14,proto3" json:"r14,omitempty"`
+ R15 uint64 `protobuf:"varint,16,opt,name=r15,proto3" json:"r15,omitempty"`
+ R16 uint64 `protobuf:"varint,17,opt,name=r16,proto3" json:"r16,omitempty"`
+ R17 uint64 `protobuf:"varint,18,opt,name=r17,proto3" json:"r17,omitempty"`
+ R18 uint64 `protobuf:"varint,19,opt,name=r18,proto3" json:"r18,omitempty"`
+ R19 uint64 `protobuf:"varint,20,opt,name=r19,proto3" json:"r19,omitempty"`
+ R20 uint64 `protobuf:"varint,21,opt,name=r20,proto3" json:"r20,omitempty"`
+ R21 uint64 `protobuf:"varint,22,opt,name=r21,proto3" json:"r21,omitempty"`
+ R22 uint64 `protobuf:"varint,23,opt,name=r22,proto3" json:"r22,omitempty"`
+ R23 uint64 `protobuf:"varint,24,opt,name=r23,proto3" json:"r23,omitempty"`
+ R24 uint64 `protobuf:"varint,25,opt,name=r24,proto3" json:"r24,omitempty"`
+ R25 uint64 `protobuf:"varint,26,opt,name=r25,proto3" json:"r25,omitempty"`
+ R26 uint64 `protobuf:"varint,27,opt,name=r26,proto3" json:"r26,omitempty"`
+ R27 uint64 `protobuf:"varint,28,opt,name=r27,proto3" json:"r27,omitempty"`
+ R28 uint64 `protobuf:"varint,29,opt,name=r28,proto3" json:"r28,omitempty"`
+ R29 uint64 `protobuf:"varint,30,opt,name=r29,proto3" json:"r29,omitempty"`
+ R30 uint64 `protobuf:"varint,31,opt,name=r30,proto3" json:"r30,omitempty"`
+ Sp uint64 `protobuf:"varint,32,opt,name=sp,proto3" json:"sp,omitempty"`
+ Pc uint64 `protobuf:"varint,33,opt,name=pc,proto3" json:"pc,omitempty"`
+ Pstate uint64 `protobuf:"varint,34,opt,name=pstate,proto3" json:"pstate,omitempty"`
+ Tls uint64 `protobuf:"varint,35,opt,name=tls,proto3" json:"tls,omitempty"`
+}
+
+func (x *ARM64Registers) Reset() {
+ *x = ARM64Registers{}
+ if protoimpl.UnsafeEnabled {
+ mi := &file_pkg_sentry_arch_registers_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
+}
+
+func (x *ARM64Registers) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*ARM64Registers) ProtoMessage() {}
+
+func (x *ARM64Registers) ProtoReflect() protoreflect.Message {
+ mi := &file_pkg_sentry_arch_registers_proto_msgTypes[1]
+ if protoimpl.UnsafeEnabled && x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use ARM64Registers.ProtoReflect.Descriptor instead.
+func (*ARM64Registers) Descriptor() ([]byte, []int) {
+ return file_pkg_sentry_arch_registers_proto_rawDescGZIP(), []int{1}
+}
+
+func (x *ARM64Registers) GetR0() uint64 {
+ if x != nil {
+ return x.R0
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR1() uint64 {
+ if x != nil {
+ return x.R1
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR2() uint64 {
+ if x != nil {
+ return x.R2
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR3() uint64 {
+ if x != nil {
+ return x.R3
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR4() uint64 {
+ if x != nil {
+ return x.R4
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR5() uint64 {
+ if x != nil {
+ return x.R5
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR6() uint64 {
+ if x != nil {
+ return x.R6
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR7() uint64 {
+ if x != nil {
+ return x.R7
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR8() uint64 {
+ if x != nil {
+ return x.R8
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR9() uint64 {
+ if x != nil {
+ return x.R9
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR10() uint64 {
+ if x != nil {
+ return x.R10
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR11() uint64 {
+ if x != nil {
+ return x.R11
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR12() uint64 {
+ if x != nil {
+ return x.R12
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR13() uint64 {
+ if x != nil {
+ return x.R13
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR14() uint64 {
+ if x != nil {
+ return x.R14
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR15() uint64 {
+ if x != nil {
+ return x.R15
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR16() uint64 {
+ if x != nil {
+ return x.R16
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR17() uint64 {
+ if x != nil {
+ return x.R17
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR18() uint64 {
+ if x != nil {
+ return x.R18
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR19() uint64 {
+ if x != nil {
+ return x.R19
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR20() uint64 {
+ if x != nil {
+ return x.R20
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR21() uint64 {
+ if x != nil {
+ return x.R21
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR22() uint64 {
+ if x != nil {
+ return x.R22
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR23() uint64 {
+ if x != nil {
+ return x.R23
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR24() uint64 {
+ if x != nil {
+ return x.R24
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR25() uint64 {
+ if x != nil {
+ return x.R25
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR26() uint64 {
+ if x != nil {
+ return x.R26
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR27() uint64 {
+ if x != nil {
+ return x.R27
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR28() uint64 {
+ if x != nil {
+ return x.R28
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR29() uint64 {
+ if x != nil {
+ return x.R29
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetR30() uint64 {
+ if x != nil {
+ return x.R30
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetSp() uint64 {
+ if x != nil {
+ return x.Sp
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetPc() uint64 {
+ if x != nil {
+ return x.Pc
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetPstate() uint64 {
+ if x != nil {
+ return x.Pstate
+ }
+ return 0
+}
+
+func (x *ARM64Registers) GetTls() uint64 {
+ if x != nil {
+ return x.Tls
+ }
+ return 0
+}
+
+type Registers struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ // Types that are assignable to Arch:
+ // *Registers_Amd64
+ // *Registers_Arm64
+ Arch isRegisters_Arch `protobuf_oneof:"arch"`
+}
+
+func (x *Registers) Reset() {
+ *x = Registers{}
+ if protoimpl.UnsafeEnabled {
+ mi := &file_pkg_sentry_arch_registers_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
+}
+
+func (x *Registers) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*Registers) ProtoMessage() {}
+
+func (x *Registers) ProtoReflect() protoreflect.Message {
+ mi := &file_pkg_sentry_arch_registers_proto_msgTypes[2]
+ if protoimpl.UnsafeEnabled && x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use Registers.ProtoReflect.Descriptor instead.
+func (*Registers) Descriptor() ([]byte, []int) {
+ return file_pkg_sentry_arch_registers_proto_rawDescGZIP(), []int{2}
+}
+
+func (m *Registers) GetArch() isRegisters_Arch {
+ if m != nil {
+ return m.Arch
+ }
+ return nil
+}
+
+func (x *Registers) GetAmd64() *AMD64Registers {
+ if x, ok := x.GetArch().(*Registers_Amd64); ok {
+ return x.Amd64
+ }
+ return nil
+}
+
+func (x *Registers) GetArm64() *ARM64Registers {
+ if x, ok := x.GetArch().(*Registers_Arm64); ok {
+ return x.Arm64
+ }
+ return nil
+}
+
+type isRegisters_Arch interface {
+ isRegisters_Arch()
+}
+
+type Registers_Amd64 struct {
+ Amd64 *AMD64Registers `protobuf:"bytes,1,opt,name=amd64,proto3,oneof"`
+}
+
+type Registers_Arm64 struct {
+ Arm64 *ARM64Registers `protobuf:"bytes,2,opt,name=arm64,proto3,oneof"`
+}
+
+func (*Registers_Amd64) isRegisters_Arch() {}
+
+func (*Registers_Arm64) isRegisters_Arch() {}
+
+var File_pkg_sentry_arch_registers_proto protoreflect.FileDescriptor
+
+var file_pkg_sentry_arch_registers_proto_rawDesc = []byte{
+ 0x0a, 0x1f, 0x70, 0x6b, 0x67, 0x2f, 0x73, 0x65, 0x6e, 0x74, 0x72, 0x79, 0x2f, 0x61, 0x72, 0x63,
+ 0x68, 0x2f, 0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74,
+ 0x6f, 0x12, 0x06, 0x67, 0x76, 0x69, 0x73, 0x6f, 0x72, 0x22, 0x83, 0x04, 0x0a, 0x0e, 0x41, 0x4d,
+ 0x44, 0x36, 0x34, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x73, 0x12, 0x10, 0x0a, 0x03,
+ 0x72, 0x61, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x61, 0x78, 0x12, 0x10,
+ 0x0a, 0x03, 0x72, 0x62, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x62, 0x78,
+ 0x12, 0x10, 0x0a, 0x03, 0x72, 0x63, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72,
+ 0x63, 0x78, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x64, 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52,
+ 0x03, 0x72, 0x64, 0x78, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x73, 0x69, 0x18, 0x05, 0x20, 0x01, 0x28,
+ 0x04, 0x52, 0x03, 0x72, 0x73, 0x69, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x64, 0x69, 0x18, 0x06, 0x20,
+ 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x64, 0x69, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x73, 0x70, 0x18,
+ 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x73, 0x70, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x62,
+ 0x70, 0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x62, 0x70, 0x12, 0x0e, 0x0a, 0x02,
+ 0x72, 0x38, 0x18, 0x09, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x72, 0x38, 0x12, 0x0e, 0x0a, 0x02,
+ 0x72, 0x39, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x72, 0x39, 0x12, 0x10, 0x0a, 0x03,
+ 0x72, 0x31, 0x30, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x31, 0x30, 0x12, 0x10,
+ 0x0a, 0x03, 0x72, 0x31, 0x31, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x31, 0x31,
+ 0x12, 0x10, 0x0a, 0x03, 0x72, 0x31, 0x32, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72,
+ 0x31, 0x32, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x31, 0x33, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x04, 0x52,
+ 0x03, 0x72, 0x31, 0x33, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x31, 0x34, 0x18, 0x0f, 0x20, 0x01, 0x28,
+ 0x04, 0x52, 0x03, 0x72, 0x31, 0x34, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x31, 0x35, 0x18, 0x10, 0x20,
+ 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x31, 0x35, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x69, 0x70, 0x18,
+ 0x11, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x69, 0x70, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x66,
+ 0x6c, 0x61, 0x67, 0x73, 0x18, 0x12, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x72, 0x66, 0x6c, 0x61,
+ 0x67, 0x73, 0x12, 0x19, 0x0a, 0x08, 0x6f, 0x72, 0x69, 0x67, 0x5f, 0x72, 0x61, 0x78, 0x18, 0x13,
+ 0x20, 0x01, 0x28, 0x04, 0x52, 0x07, 0x6f, 0x72, 0x69, 0x67, 0x52, 0x61, 0x78, 0x12, 0x0e, 0x0a,
+ 0x02, 0x63, 0x73, 0x18, 0x14, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x63, 0x73, 0x12, 0x0e, 0x0a,
+ 0x02, 0x64, 0x73, 0x18, 0x15, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x64, 0x73, 0x12, 0x0e, 0x0a,
+ 0x02, 0x65, 0x73, 0x18, 0x16, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x65, 0x73, 0x12, 0x0e, 0x0a,
+ 0x02, 0x66, 0x73, 0x18, 0x17, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x66, 0x73, 0x12, 0x0e, 0x0a,
+ 0x02, 0x67, 0x73, 0x18, 0x18, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x67, 0x73, 0x12, 0x0e, 0x0a,
+ 0x02, 0x73, 0x73, 0x18, 0x19, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x73, 0x73, 0x12, 0x17, 0x0a,
+ 0x07, 0x66, 0x73, 0x5f, 0x62, 0x61, 0x73, 0x65, 0x18, 0x1a, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06,
+ 0x66, 0x73, 0x42, 0x61, 0x73, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x67, 0x73, 0x5f, 0x62, 0x61, 0x73,
+ 0x65, 0x18, 0x1b, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x67, 0x73, 0x42, 0x61, 0x73, 0x65, 0x22,
+ 0xf4, 0x04, 0x0a, 0x0e, 0x41, 0x52, 0x4d, 0x36, 0x34, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65,
+ 0x72, 0x73, 0x12, 0x0e, 0x0a, 0x02, 0x72, 0x30, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02,
+ 0x72, 0x30, 0x12, 0x0e, 0x0a, 0x02, 0x72, 0x31, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02,
+ 0x72, 0x31, 0x12, 0x0e, 0x0a, 0x02, 0x72, 0x32, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02,
+ 0x72, 0x32, 0x12, 0x0e, 0x0a, 0x02, 0x72, 0x33, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02,
+ 0x72, 0x33, 0x12, 0x0e, 0x0a, 0x02, 0x72, 0x34, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02,
+ 0x72, 0x34, 0x12, 0x0e, 0x0a, 0x02, 0x72, 0x35, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02,
+ 0x72, 0x35, 0x12, 0x0e, 0x0a, 0x02, 0x72, 0x36, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02,
+ 0x72, 0x36, 0x12, 0x0e, 0x0a, 0x02, 0x72, 0x37, 0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02,
+ 0x72, 0x37, 0x12, 0x0e, 0x0a, 0x02, 0x72, 0x38, 0x18, 0x09, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02,
+ 0x72, 0x38, 0x12, 0x0e, 0x0a, 0x02, 0x72, 0x39, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02,
+ 0x72, 0x39, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x31, 0x30, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x04, 0x52,
+ 0x03, 0x72, 0x31, 0x30, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x31, 0x31, 0x18, 0x0c, 0x20, 0x01, 0x28,
+ 0x04, 0x52, 0x03, 0x72, 0x31, 0x31, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x31, 0x32, 0x18, 0x0d, 0x20,
+ 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x31, 0x32, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x31, 0x33, 0x18,
+ 0x0e, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x31, 0x33, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x31,
+ 0x34, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x31, 0x34, 0x12, 0x10, 0x0a, 0x03,
+ 0x72, 0x31, 0x35, 0x18, 0x10, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x31, 0x35, 0x12, 0x10,
+ 0x0a, 0x03, 0x72, 0x31, 0x36, 0x18, 0x11, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x31, 0x36,
+ 0x12, 0x10, 0x0a, 0x03, 0x72, 0x31, 0x37, 0x18, 0x12, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72,
+ 0x31, 0x37, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x31, 0x38, 0x18, 0x13, 0x20, 0x01, 0x28, 0x04, 0x52,
+ 0x03, 0x72, 0x31, 0x38, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x31, 0x39, 0x18, 0x14, 0x20, 0x01, 0x28,
+ 0x04, 0x52, 0x03, 0x72, 0x31, 0x39, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x32, 0x30, 0x18, 0x15, 0x20,
+ 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x32, 0x30, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x32, 0x31, 0x18,
+ 0x16, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x32, 0x31, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x32,
+ 0x32, 0x18, 0x17, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x32, 0x32, 0x12, 0x10, 0x0a, 0x03,
+ 0x72, 0x32, 0x33, 0x18, 0x18, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x32, 0x33, 0x12, 0x10,
+ 0x0a, 0x03, 0x72, 0x32, 0x34, 0x18, 0x19, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x32, 0x34,
+ 0x12, 0x10, 0x0a, 0x03, 0x72, 0x32, 0x35, 0x18, 0x1a, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72,
+ 0x32, 0x35, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x32, 0x36, 0x18, 0x1b, 0x20, 0x01, 0x28, 0x04, 0x52,
+ 0x03, 0x72, 0x32, 0x36, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x32, 0x37, 0x18, 0x1c, 0x20, 0x01, 0x28,
+ 0x04, 0x52, 0x03, 0x72, 0x32, 0x37, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x32, 0x38, 0x18, 0x1d, 0x20,
+ 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x32, 0x38, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x32, 0x39, 0x18,
+ 0x1e, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x32, 0x39, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x33,
+ 0x30, 0x18, 0x1f, 0x20, 0x01, 0x28, 0x04, 0x52, 0x03, 0x72, 0x33, 0x30, 0x12, 0x0e, 0x0a, 0x02,
+ 0x73, 0x70, 0x18, 0x20, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x73, 0x70, 0x12, 0x0e, 0x0a, 0x02,
+ 0x70, 0x63, 0x18, 0x21, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x70, 0x63, 0x12, 0x16, 0x0a, 0x06,
+ 0x70, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x22, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x70, 0x73,
+ 0x74, 0x61, 0x74, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x74, 0x6c, 0x73, 0x18, 0x23, 0x20, 0x01, 0x28,
+ 0x04, 0x52, 0x03, 0x74, 0x6c, 0x73, 0x22, 0x73, 0x0a, 0x09, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74,
+ 0x65, 0x72, 0x73, 0x12, 0x2e, 0x0a, 0x05, 0x61, 0x6d, 0x64, 0x36, 0x34, 0x18, 0x01, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x76, 0x69, 0x73, 0x6f, 0x72, 0x2e, 0x41, 0x4d, 0x44, 0x36,
+ 0x34, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x73, 0x48, 0x00, 0x52, 0x05, 0x61, 0x6d,
+ 0x64, 0x36, 0x34, 0x12, 0x2e, 0x0a, 0x05, 0x61, 0x72, 0x6d, 0x36, 0x34, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x76, 0x69, 0x73, 0x6f, 0x72, 0x2e, 0x41, 0x52, 0x4d, 0x36,
+ 0x34, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x73, 0x48, 0x00, 0x52, 0x05, 0x61, 0x72,
+ 0x6d, 0x36, 0x34, 0x42, 0x06, 0x0a, 0x04, 0x61, 0x72, 0x63, 0x68, 0x62, 0x06, 0x70, 0x72, 0x6f,
+ 0x74, 0x6f, 0x33,
+}
+
+var (
+ file_pkg_sentry_arch_registers_proto_rawDescOnce sync.Once
+ file_pkg_sentry_arch_registers_proto_rawDescData = file_pkg_sentry_arch_registers_proto_rawDesc
+)
+
+func file_pkg_sentry_arch_registers_proto_rawDescGZIP() []byte {
+ file_pkg_sentry_arch_registers_proto_rawDescOnce.Do(func() {
+ file_pkg_sentry_arch_registers_proto_rawDescData = protoimpl.X.CompressGZIP(file_pkg_sentry_arch_registers_proto_rawDescData)
+ })
+ return file_pkg_sentry_arch_registers_proto_rawDescData
+}
+
+var file_pkg_sentry_arch_registers_proto_msgTypes = make([]protoimpl.MessageInfo, 3)
+var file_pkg_sentry_arch_registers_proto_goTypes = []interface{}{
+ (*AMD64Registers)(nil), // 0: gvisor.AMD64Registers
+ (*ARM64Registers)(nil), // 1: gvisor.ARM64Registers
+ (*Registers)(nil), // 2: gvisor.Registers
+}
+var file_pkg_sentry_arch_registers_proto_depIdxs = []int32{
+ 0, // 0: gvisor.Registers.amd64:type_name -> gvisor.AMD64Registers
+ 1, // 1: gvisor.Registers.arm64:type_name -> gvisor.ARM64Registers
+ 2, // [2:2] is the sub-list for method output_type
+ 2, // [2:2] is the sub-list for method input_type
+ 2, // [2:2] is the sub-list for extension type_name
+ 2, // [2:2] is the sub-list for extension extendee
+ 0, // [0:2] is the sub-list for field type_name
+}
+
+func init() { file_pkg_sentry_arch_registers_proto_init() }
+func file_pkg_sentry_arch_registers_proto_init() {
+ if File_pkg_sentry_arch_registers_proto != nil {
+ return
+ }
+ if !protoimpl.UnsafeEnabled {
+ file_pkg_sentry_arch_registers_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+ switch v := v.(*AMD64Registers); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_pkg_sentry_arch_registers_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
+ switch v := v.(*ARM64Registers); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_pkg_sentry_arch_registers_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
+ switch v := v.(*Registers); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
+ file_pkg_sentry_arch_registers_proto_msgTypes[2].OneofWrappers = []interface{}{
+ (*Registers_Amd64)(nil),
+ (*Registers_Arm64)(nil),
+ }
+ type x struct{}
+ out := protoimpl.TypeBuilder{
+ File: protoimpl.DescBuilder{
+ GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
+ RawDescriptor: file_pkg_sentry_arch_registers_proto_rawDesc,
+ NumEnums: 0,
+ NumMessages: 3,
+ NumExtensions: 0,
+ NumServices: 0,
+ },
+ GoTypes: file_pkg_sentry_arch_registers_proto_goTypes,
+ DependencyIndexes: file_pkg_sentry_arch_registers_proto_depIdxs,
+ MessageInfos: file_pkg_sentry_arch_registers_proto_msgTypes,
+ }.Build()
+ File_pkg_sentry_arch_registers_proto = out.File
+ file_pkg_sentry_arch_registers_proto_rawDesc = nil
+ file_pkg_sentry_arch_registers_proto_goTypes = nil
+ file_pkg_sentry_arch_registers_proto_depIdxs = nil
+}