summaryrefslogtreecommitdiffhomepage
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/defs.bzl12
-rw-r--r--tools/go_marshal/gomarshal/generator.go47
-rw-r--r--tools/go_marshal/gomarshal/generator_interfaces.go146
-rw-r--r--tools/go_marshal/gomarshal/generator_tests.go50
-rw-r--r--tools/go_marshal/gomarshal/util.go30
-rw-r--r--tools/go_marshal/marshal/BUILD3
-rw-r--r--tools/go_marshal/marshal/marshal.go50
-rw-r--r--tools/go_marshal/test/BUILD14
-rw-r--r--tools/go_marshal/test/benchmark_test.go2
-rw-r--r--tools/go_marshal/test/escape.go114
10 files changed, 426 insertions, 42 deletions
diff --git a/tools/defs.bzl b/tools/defs.bzl
index 46249f9c4..39f035f12 100644
--- a/tools/defs.bzl
+++ b/tools/defs.bzl
@@ -117,10 +117,10 @@ def go_library(name, srcs, deps = [], imports = [], stateify = True, marshal = F
# First, we need to segregate the input files via the special suffixes,
# and calculate the final output set.
state_sets = calculate_sets(srcs)
- for (suffix, srcs) in state_sets.items():
+ for (suffix, src_subset) in state_sets.items():
go_stateify(
name = name + suffix + "_state_autogen_with_imports",
- srcs = srcs,
+ srcs = src_subset,
imports = imports,
package = full_pkg,
out = name + suffix + "_state_autogen_with_imports.go",
@@ -140,10 +140,10 @@ def go_library(name, srcs, deps = [], imports = [], stateify = True, marshal = F
if marshal:
# See above.
marshal_sets = calculate_sets(srcs)
- for (suffix, srcs) in marshal_sets.items():
+ for (suffix, src_subset) in marshal_sets.items():
go_marshal(
name = name + suffix + "_abi_autogen",
- srcs = srcs,
+ srcs = src_subset,
debug = False,
imports = imports,
package = name,
@@ -172,11 +172,11 @@ def go_library(name, srcs, deps = [], imports = [], stateify = True, marshal = F
# See above.
marshal_sets = calculate_sets(srcs)
- for (suffix, srcs) in marshal_sets.items():
+ for (suffix, _) in marshal_sets.items():
_go_test(
name = name + suffix + "_abi_autogen_test",
srcs = [name + suffix + "_abi_autogen_test.go"],
- library = ":" + name + suffix,
+ library = ":" + name,
deps = marshal_test_deps,
**kwargs
)
diff --git a/tools/go_marshal/gomarshal/generator.go b/tools/go_marshal/gomarshal/generator.go
index 0b3f600fe..0294ba5ba 100644
--- a/tools/go_marshal/gomarshal/generator.go
+++ b/tools/go_marshal/gomarshal/generator.go
@@ -34,9 +34,9 @@ const (
usermemImport = "gvisor.dev/gvisor/pkg/usermem"
)
-// List of identifiers we use in generated code, that may conflict a
-// similarly-named source identifier. Avoid problems by refusing the generate
-// code when we see these.
+// List of identifiers we use in generated code that may conflict with a
+// similarly-named source identifier. Abort gracefully when we see these to
+// avoid potentially confusing compilation failures in generated code.
//
// This only applies to import aliases at the moment. All other identifiers
// are qualified by a receiver argument, since they're struct fields.
@@ -44,10 +44,20 @@ const (
// All recievers are single letters, so we don't allow import aliases to be a
// single letter.
var badIdents = []string{
- "src", "srcs", "dst", "dsts", "blk", "buf", "err",
+ "addr", "blk", "buf", "dst", "dsts", "err", "hdr", "len", "ptr", "src", "srcs", "task", "val",
// All single-letter identifiers.
}
+// Constructed fromt badIdents in init().
+var badIdentsMap map[string]struct{}
+
+func init() {
+ badIdentsMap = make(map[string]struct{})
+ for _, ident := range badIdents {
+ badIdentsMap[ident] = struct{}{}
+ }
+}
+
// Generator drives code generation for a single invocation of the go_marshal
// utility.
//
@@ -88,16 +98,20 @@ func NewGenerator(srcs []string, out, outTest, pkg string, imports []string) (*G
}
for _, i := range imports {
// All imports on the extra imports list are unconditionally marked as
- // used, so they're always added to the generated code.
+ // used, so that they're always added to the generated code.
g.imports.add(i).markUsed()
}
- g.imports.add(marshalImport).markUsed()
- // The follow imports may or may not be used by the generated
- // code, depending what's required for the target types. Don't
- // mark these imports as used by default.
- g.imports.add(usermemImport)
- g.imports.add(safecopyImport)
+
+ // The following imports may or may not be used by the generated code,
+ // depending on what's required for the target types. Don't mark these as
+ // used by default.
+ g.imports.add("io")
+ g.imports.add("reflect")
+ g.imports.add("runtime")
g.imports.add("unsafe")
+ g.imports.add(marshalImport)
+ g.imports.add(safecopyImport)
+ g.imports.add(usermemImport)
return &g, nil
}
@@ -111,7 +125,7 @@ func (g *Generator) writeHeader() error {
// Emit build tags.
if t := tags.Aggregate(g.inputs); len(t) > 0 {
b.emit(strings.Join(t.Lines(), "\n"))
- b.emit("\n")
+ b.emit("\n\n")
}
// Package header.
@@ -229,11 +243,6 @@ func (g *Generator) collectMarshallabeTypes(a *ast.File, f *token.FileSet) []*as
// identifiers in the generated code don't conflict with any imported package
// names.
func (g *Generator) collectImports(a *ast.File, f *token.FileSet) map[string]importStmt {
- badImportNames := make(map[string]bool)
- for _, i := range badIdents {
- badImportNames[i] = true
- }
-
is := make(map[string]importStmt)
for _, decl := range a.Decls {
gdecl, ok := decl.(*ast.GenDecl)
@@ -250,7 +259,7 @@ func (g *Generator) collectImports(a *ast.File, f *token.FileSet) map[string]imp
if len(i.name) == 1 {
abortAt(f.Position(spec.Pos()), fmt.Sprintf("Import has a single character local name '%s'; this may conflict with code generated by go_marshal, use a multi-character import alias", i.name))
}
- if badImportNames[i.name] {
+ if _, ok := badIdentsMap[i.name]; ok {
abortAt(f.Position(spec.Pos()), fmt.Sprintf("Import name '%s' is likely to conflict with code generated by go_marshal, use a different import alias", i.name))
}
}
@@ -371,6 +380,7 @@ func (g *Generator) writeTests(ts []*testGenerator) error {
return err
}
+ // Collect and write test import statements.
imports := newImportTable()
for _, t := range ts {
imports.merge(t.imports)
@@ -380,6 +390,7 @@ func (g *Generator) writeTests(ts []*testGenerator) error {
return err
}
+ // Write test functions.
for _, t := range ts {
if err := t.write(g.outputTest); err != nil {
return err
diff --git a/tools/go_marshal/gomarshal/generator_interfaces.go b/tools/go_marshal/gomarshal/generator_interfaces.go
index a712c14dc..3aa299ccd 100644
--- a/tools/go_marshal/gomarshal/generator_interfaces.go
+++ b/tools/go_marshal/gomarshal/generator_interfaces.go
@@ -301,7 +301,7 @@ func (g *interfaceGenerator) emitMarshallable() {
primitiveSize += size
} else {
g.recordUsedMarshallable(t.Name)
- dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("%s.SizeBytes()", g.fieldAccessor(n)))
+ dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("(*%s)(nil).SizeBytes()", t.Name))
}
},
selector: func(n, tX, tSel *ast.Ident) {
@@ -504,4 +504,148 @@ func (g *interfaceGenerator) emitMarshallable() {
})
g.emit("}\n\n")
+ g.emit("// CopyOut implements marshal.Marshallable.CopyOut.\n")
+ g.recordUsedImport("marshal")
+ g.recordUsedImport("usermem")
+ g.emit("func (%s *%s) CopyOut(task marshal.Task, addr usermem.Addr) (int, error) {\n", g.r, g.typeName())
+ g.inIndent(func() {
+ fallback := func() {
+ g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName())
+ g.emit("buf := task.CopyScratchBuffer(%s.SizeBytes())\n", g.r)
+ g.emit("%s.MarshalBytes(buf)\n", g.r)
+ g.emit("return task.CopyOutBytes(addr, buf)\n")
+ }
+ if thisPacked {
+ g.recordUsedImport("reflect")
+ g.recordUsedImport("runtime")
+ g.recordUsedImport("unsafe")
+ if cond, ok := g.areFieldsPackedExpression(); ok {
+ g.emit("if !%s {\n", cond)
+ g.inIndent(fallback)
+ g.emit("}\n\n")
+ }
+ // Fast serialization.
+ g.emit("// Bypass escape analysis on %s. The no-op arithmetic operation on the\n", g.r)
+ g.emit("// pointer makes the compiler think val doesn't depend on %s.\n", g.r)
+ g.emit("// See src/runtime/stubs.go:noescape() in the golang toolchain.\n")
+ g.emit("ptr := unsafe.Pointer(%s)\n", g.r)
+ g.emit("val := uintptr(ptr)\n")
+ g.emit("val = val^0\n\n")
+
+ g.emit("// Construct a slice backed by %s's underlying memory.\n", g.r)
+ g.emit("var buf []byte\n")
+ g.emit("hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))\n")
+ g.emit("hdr.Data = val\n")
+ g.emit("hdr.Len = %s.SizeBytes()\n", g.r)
+ g.emit("hdr.Cap = %s.SizeBytes()\n\n", g.r)
+
+ g.emit("len, err := task.CopyOutBytes(addr, buf)\n")
+ g.emit("// Since we bypassed the compiler's escape analysis, indicate that %s\n", g.r)
+ g.emit("// must live until after the CopyOutBytes.\n")
+ g.emit("runtime.KeepAlive(%s)\n", g.r)
+ g.emit("return len, err\n")
+ } else {
+ fallback()
+ }
+ })
+ g.emit("}\n\n")
+
+ g.emit("// CopyIn implements marshal.Marshallable.CopyIn.\n")
+ g.recordUsedImport("marshal")
+ g.recordUsedImport("usermem")
+ g.emit("func (%s *%s) CopyIn(task marshal.Task, addr usermem.Addr) (int, error) {\n", g.r, g.typeName())
+ g.inIndent(func() {
+ fallback := func() {
+ g.emit("// Type %s doesn't have a packed layout in memory, fall back to UnmarshalBytes.\n", g.typeName())
+ g.emit("buf := task.CopyScratchBuffer(%s.SizeBytes())\n", g.r)
+ g.emit("n, err := task.CopyInBytes(addr, buf)\n")
+ g.emit("if err != nil {\n")
+ g.inIndent(func() {
+ g.emit("return n, err\n")
+ })
+ g.emit("}\n")
+
+ g.emit("%s.UnmarshalBytes(buf)\n", g.r)
+ g.emit("return n, nil\n")
+ }
+ if thisPacked {
+ g.recordUsedImport("reflect")
+ g.recordUsedImport("runtime")
+ g.recordUsedImport("unsafe")
+ if cond, ok := g.areFieldsPackedExpression(); ok {
+ g.emit("if !%s {\n", cond)
+ g.inIndent(fallback)
+ g.emit("}\n\n")
+ }
+ // Fast deserialization.
+ g.emit("// Bypass escape analysis on %s. The no-op arithmetic operation on the\n", g.r)
+ g.emit("// pointer makes the compiler think val doesn't depend on %s.\n", g.r)
+ g.emit("// See src/runtime/stubs.go:noescape() in the golang toolchain.\n")
+ g.emit("ptr := unsafe.Pointer(%s)\n", g.r)
+ g.emit("val := uintptr(ptr)\n")
+ g.emit("val = val^0\n\n")
+
+ g.emit("// Construct a slice backed by %s's underlying memory.\n", g.r)
+ g.emit("var buf []byte\n")
+ g.emit("hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))\n")
+ g.emit("hdr.Data = val\n")
+ g.emit("hdr.Len = %s.SizeBytes()\n", g.r)
+ g.emit("hdr.Cap = %s.SizeBytes()\n\n", g.r)
+
+ g.emit("len, err := task.CopyInBytes(addr, buf)\n")
+ g.emit("// Since we bypassed the compiler's escape analysis, indicate that %s\n", g.r)
+ g.emit("// must live until after the CopyInBytes.\n")
+ g.emit("runtime.KeepAlive(%s)\n", g.r)
+ g.emit("return len, err\n")
+ } else {
+ fallback()
+ }
+ })
+ g.emit("}\n\n")
+
+ g.emit("// WriteTo implements io.WriterTo.WriteTo.\n")
+ g.recordUsedImport("io")
+ g.emit("func (%s *%s) WriteTo(w io.Writer) (int64, error) {\n", g.r, g.typeName())
+ g.inIndent(func() {
+ fallback := func() {
+ g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName())
+ g.emit("buf := make([]byte, %s.SizeBytes())\n", g.r)
+ g.emit("%s.MarshalBytes(buf)\n", g.r)
+ g.emit("n, err := w.Write(buf)\n")
+ g.emit("return int64(n), err\n")
+ }
+ if thisPacked {
+ g.recordUsedImport("reflect")
+ g.recordUsedImport("runtime")
+ g.recordUsedImport("unsafe")
+ if cond, ok := g.areFieldsPackedExpression(); ok {
+ g.emit("if !%s {\n", cond)
+ g.inIndent(fallback)
+ g.emit("}\n\n")
+ }
+ // Fast serialization.
+ g.emit("// Bypass escape analysis on %s. The no-op arithmetic operation on the\n", g.r)
+ g.emit("// pointer makes the compiler think val doesn't depend on %s.\n", g.r)
+ g.emit("// See src/runtime/stubs.go:noescape() in the golang toolchain.\n")
+ g.emit("ptr := unsafe.Pointer(%s)\n", g.r)
+ g.emit("val := uintptr(ptr)\n")
+ g.emit("val = val^0\n\n")
+
+ g.emit("// Construct a slice backed by %s's underlying memory.\n", g.r)
+ g.emit("var buf []byte\n")
+ g.emit("hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf))\n")
+ g.emit("hdr.Data = val\n")
+ g.emit("hdr.Len = %s.SizeBytes()\n", g.r)
+ g.emit("hdr.Cap = %s.SizeBytes()\n\n", g.r)
+
+ g.emit("len, err := w.Write(buf)\n")
+ g.emit("// Since we bypassed the compiler's escape analysis, indicate that %s\n", g.r)
+ g.emit("// must live until after the Write.\n")
+ g.emit("runtime.KeepAlive(%s)\n", g.r)
+ g.emit("return int64(len), err\n")
+ } else {
+ fallback()
+ }
+ })
+ g.emit("}\n\n")
}
diff --git a/tools/go_marshal/gomarshal/generator_tests.go b/tools/go_marshal/gomarshal/generator_tests.go
index bcda17c3b..8c28b00d0 100644
--- a/tools/go_marshal/gomarshal/generator_tests.go
+++ b/tools/go_marshal/gomarshal/generator_tests.go
@@ -22,9 +22,11 @@ import (
)
var standardImports = []string{
+ "bytes",
"fmt",
"reflect",
"testing",
+
"gvisor.dev/gvisor/tools/go_marshal/analysis",
}
@@ -116,26 +118,64 @@ func (g *testGenerator) emitTestMarshalUnmarshalPreservesData() {
g.emit("y.UnmarshalBytes(buf)\n")
g.emit("if !reflect.DeepEqual(x, y) {\n")
g.inIndent(func() {
- g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across Marshal/Unmarshal cycle:\\nBefore: %%+v\\nAfter: %%+v\\n\", x, y))\n")
+ g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across MarshalBytes/UnmarshalBytes cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, y))\n")
})
g.emit("}\n")
g.emit("yUnsafe.UnmarshalBytes(bufUnsafe)\n")
g.emit("if !reflect.DeepEqual(x, yUnsafe) {\n")
g.inIndent(func() {
- g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across MarshalUnsafe/Unmarshal cycle:\\nBefore: %%+v\\nAfter: %%+v\\n\", x, yUnsafe))\n")
+ g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across MarshalUnsafe/UnmarshalBytes cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, yUnsafe))\n")
})
g.emit("}\n\n")
g.emit("z.UnmarshalUnsafe(buf)\n")
g.emit("if !reflect.DeepEqual(x, z) {\n")
g.inIndent(func() {
- g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across Marshal/UnmarshalUnsafe cycle:\\nBefore: %%+v\\nAfter: %%+v\\n\", x, z))\n")
+ g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across MarshalBytes/UnmarshalUnsafe cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, z))\n")
})
g.emit("}\n")
g.emit("zUnsafe.UnmarshalUnsafe(bufUnsafe)\n")
g.emit("if !reflect.DeepEqual(x, zUnsafe) {\n")
g.inIndent(func() {
- g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across MarshalUnsafe/UnmarshalUnsafe cycle:\\nBefore: %%+v\\nAfter: %%+v\\n\", x, zUnsafe))\n")
+ g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across MarshalUnsafe/UnmarshalUnsafe cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, zUnsafe))\n")
+ })
+ g.emit("}\n")
+ })
+}
+
+func (g *testGenerator) emitTestWriteToUnmarshalPreservesData() {
+ g.inTestFunction("TestWriteToUnmarshalPreservesData", func() {
+ g.emit("var x, y, yUnsafe %s\n", g.typeName())
+ g.emit("analysis.RandomizeValue(&x)\n\n")
+
+ g.emit("var buf bytes.Buffer\n\n")
+
+ g.emit("x.WriteTo(&buf)\n")
+ g.emit("y.UnmarshalBytes(buf.Bytes())\n\n")
+ g.emit("yUnsafe.UnmarshalUnsafe(buf.Bytes())\n\n")
+
+ g.emit("if !reflect.DeepEqual(x, y) {\n")
+ g.inIndent(func() {
+ g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across WriteTo/UnmarshalBytes cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, y))\n")
+ })
+ g.emit("}\n")
+ g.emit("if !reflect.DeepEqual(x, yUnsafe) {\n")
+ g.inIndent(func() {
+ g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across WriteTo/UnmarshalUnsafe cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, yUnsafe))\n")
+ })
+ g.emit("}\n")
+ })
+}
+
+func (g *testGenerator) emitTestSizeBytesOnTypedNilPtr() {
+ g.inTestFunction("TestSizeBytesOnTypedNilPtr", func() {
+ g.emit("var x %s\n", g.typeName())
+ g.emit("sizeFromConcrete := x.SizeBytes()\n")
+ g.emit("sizeFromTypedNilPtr := (*%s)(nil).SizeBytes()\n\n", g.typeName())
+
+ g.emit("if sizeFromTypedNilPtr != sizeFromConcrete {\n")
+ g.inIndent(func() {
+ g.emit("t.Fatalf(\"SizeBytes() on typed nil pointer (%v) doesn't match size returned by a concrete object (%v).\\n\", sizeFromTypedNilPtr, sizeFromConcrete)")
})
g.emit("}\n")
})
@@ -145,6 +185,8 @@ func (g *testGenerator) emitTests() {
g.emitTestNonZeroSize()
g.emitTestSuspectAlignment()
g.emitTestMarshalUnmarshalPreservesData()
+ g.emitTestWriteToUnmarshalPreservesData()
+ g.emitTestSizeBytesOnTypedNilPtr()
}
func (g *testGenerator) write(out io.Writer) error {
diff --git a/tools/go_marshal/gomarshal/util.go b/tools/go_marshal/gomarshal/util.go
index 967537abf..e2bca4e7c 100644
--- a/tools/go_marshal/gomarshal/util.go
+++ b/tools/go_marshal/gomarshal/util.go
@@ -219,6 +219,11 @@ type sourceBuffer struct {
b bytes.Buffer
}
+func (b *sourceBuffer) reset() {
+ b.indent = 0
+ b.b.Reset()
+}
+
func (b *sourceBuffer) incIndent() {
b.indent++
}
@@ -305,7 +310,7 @@ func (i *importStmt) markUsed() {
}
func (i *importStmt) equivalent(other *importStmt) bool {
- return i == other
+ return i.name == other.name && i.path == other.path && i.aliased == other.aliased
}
// importTable represents a collection of importStmts.
@@ -324,7 +329,7 @@ func newImportTable() *importTable {
// result in a panic.
func (i *importTable) merge(other *importTable) {
for name, im := range other.is {
- if dup, ok := i.is[name]; ok && dup.equivalent(im) {
+ if dup, ok := i.is[name]; ok && !dup.equivalent(im) {
panic(fmt.Sprintf("Found colliding import statements: ours: %+v, other's: %+v", dup, im))
}
@@ -332,16 +337,27 @@ func (i *importTable) merge(other *importTable) {
}
}
+func (i *importTable) addStmt(s *importStmt) *importStmt {
+ if old, ok := i.is[s.name]; ok && !old.equivalent(s) {
+ // A collision should always be between an import inserted by the
+ // go-marshal tool and an import from the original source file (assuming
+ // the original source file was valid). We could theoretically handle
+ // the collision by assigning a local name to our import. However, this
+ // would need to be plumbed throughout the generator. Given that
+ // collisions should be rare, simply panic on collision.
+ panic(fmt.Sprintf("Import collision: old: %s as %v; new: %v as %v", old.path, old.name, s.path, s.name))
+ }
+ i.is[s.name] = s
+ return s
+}
+
func (i *importTable) add(s string) *importStmt {
n := newImport(s)
- i.is[n.name] = n
- return n
+ return i.addStmt(n)
}
func (i *importTable) addFromSpec(spec *ast.ImportSpec, f *token.FileSet) *importStmt {
- n := newImportFromSpec(spec, f)
- i.is[n.name] = n
- return n
+ return i.addStmt(newImportFromSpec(spec, f))
}
// Marks the import named n as used. If no such import is in the table, returns
diff --git a/tools/go_marshal/marshal/BUILD b/tools/go_marshal/marshal/BUILD
index ad508c72f..bacfaa5a4 100644
--- a/tools/go_marshal/marshal/BUILD
+++ b/tools/go_marshal/marshal/BUILD
@@ -10,4 +10,7 @@ go_library(
visibility = [
"//:sandbox",
],
+ deps = [
+ "//pkg/usermem",
+ ],
)
diff --git a/tools/go_marshal/marshal/marshal.go b/tools/go_marshal/marshal/marshal.go
index a313a27ed..20353850d 100644
--- a/tools/go_marshal/marshal/marshal.go
+++ b/tools/go_marshal/marshal/marshal.go
@@ -20,10 +20,38 @@
// tools/go_marshal. See the go_marshal README for details.
package marshal
+import (
+ "io"
+
+ "gvisor.dev/gvisor/pkg/usermem"
+)
+
+// Task provides a subset of kernel.Task, used in marshalling. We don't import
+// the kernel package directly to avoid circular dependency.
+type Task interface {
+ // CopyScratchBuffer provides a task goroutine-local scratch buffer. See
+ // kernel.CopyScratchBuffer.
+ CopyScratchBuffer(size int) []byte
+
+ // CopyOutBytes writes the contents of b to the task's memory. See
+ // kernel.CopyOutBytes.
+ CopyOutBytes(addr usermem.Addr, b []byte) (int, error)
+
+ // CopyInBytes reads the contents of the task's memory to b. See
+ // kernel.CopyInBytes.
+ CopyInBytes(addr usermem.Addr, b []byte) (int, error)
+}
+
// Marshallable represents a type that can be marshalled to and from memory.
type Marshallable interface {
+ io.WriterTo
+
// SizeBytes is the size of the memory representation of a type in
// marshalled form.
+ //
+ // SizeBytes must handle a nil receiver. Practically, this means SizeBytes
+ // cannot deference any fields on the object implementing it (but will
+ // likely make use of the type of these fields).
SizeBytes() int
// MarshalBytes serializes a copy of a type to dst. dst must be at least
@@ -48,13 +76,27 @@ type Marshallable interface {
// MarshalBytes.
MarshalUnsafe(dst []byte)
- // UnmarshalUnsafe deserializes a type directly to the underlying memory
- // allocated for the object by the runtime.
+ // UnmarshalUnsafe deserializes a type by directly copying to the underlying
+ // memory allocated for the object by the runtime.
//
// This allows much faster unmarshalling of types which have no implicit
// padding, see Marshallable.Packed. When Packed would return false,
// UnmarshalUnsafe should fall back to the safer but slower unmarshal
- // mechanism implemented in UnmarshalBytes (usually by calling
- // UnmarshalBytes directly).
+ // mechanism implemented in UnmarshalBytes.
UnmarshalUnsafe(src []byte)
+
+ // CopyIn deserializes a Marshallable type from a task's memory. This may
+ // only be called from a task goroutine. This is more efficient than calling
+ // UnmarshalUnsafe on Marshallable.Packed types, as the type being
+ // marshalled does not escape. The implementation should avoid creating
+ // extra copies in memory by directly deserializing to the object's
+ // underlying memory.
+ CopyIn(task Task, addr usermem.Addr) (int, error)
+
+ // CopyOut serializes a Marshallable type to a task's memory. This may only
+ // be called from a task goroutine. This is more efficient than calling
+ // MarshalUnsafe on Marshallable.Packed types, as the type being serialized
+ // does not escape. The implementation should avoid creating extra copies in
+ // memory by directly serializing from the object's underlying memory.
+ CopyOut(task Task, addr usermem.Addr) (int, error)
}
diff --git a/tools/go_marshal/test/BUILD b/tools/go_marshal/test/BUILD
index e345e3a8e..f27c5ce52 100644
--- a/tools/go_marshal/test/BUILD
+++ b/tools/go_marshal/test/BUILD
@@ -1,4 +1,4 @@
-load("//tools:defs.bzl", "go_library", "go_test")
+load("//tools:defs.bzl", "go_binary", "go_library", "go_test")
licenses(["notice"])
@@ -27,3 +27,15 @@ go_library(
marshal = True,
deps = ["//tools/go_marshal/test/external"],
)
+
+go_binary(
+ name = "escape",
+ testonly = 1,
+ srcs = ["escape.go"],
+ gc_goopts = ["-m"],
+ deps = [
+ ":test",
+ "//pkg/usermem",
+ "//tools/go_marshal/marshal",
+ ],
+)
diff --git a/tools/go_marshal/test/benchmark_test.go b/tools/go_marshal/test/benchmark_test.go
index e12403741..c79defe9e 100644
--- a/tools/go_marshal/test/benchmark_test.go
+++ b/tools/go_marshal/test/benchmark_test.go
@@ -24,7 +24,7 @@ import (
"gvisor.dev/gvisor/pkg/binary"
"gvisor.dev/gvisor/pkg/usermem"
"gvisor.dev/gvisor/tools/go_marshal/analysis"
- test "gvisor.dev/gvisor/tools/go_marshal/test"
+ "gvisor.dev/gvisor/tools/go_marshal/test"
)
// Marshalling using the standard encoding/binary package.
diff --git a/tools/go_marshal/test/escape.go b/tools/go_marshal/test/escape.go
new file mode 100644
index 000000000..184f05ea3
--- /dev/null
+++ b/tools/go_marshal/test/escape.go
@@ -0,0 +1,114 @@
+// Copyright 2020 The gVisor Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This binary provides a convienient target for analyzing how the go-marshal
+// API causes its various arguments to escape to the heap. To use, build and
+// observe the output from the go compiler's escape analysis:
+//
+// $ bazel build :escape
+// ...
+// escape.go:67:2: moved to heap: task
+// escape.go:77:31: make([]byte, size) escapes to heap
+// escape.go:87:31: make([]byte, size) escapes to heap
+// escape.go:96:6: moved to heap: stat
+// ...
+//
+// This is not an automated test, but simply a minimal binary for easy analysis.
+package main
+
+import (
+ "gvisor.dev/gvisor/pkg/usermem"
+ "gvisor.dev/gvisor/tools/go_marshal/marshal"
+ "gvisor.dev/gvisor/tools/go_marshal/test"
+)
+
+// dummyTask implements marshal.Task.
+type dummyTask struct {
+}
+
+func (*dummyTask) CopyScratchBuffer(size int) []byte {
+ return make([]byte, size)
+}
+
+func (*dummyTask) CopyOutBytes(addr usermem.Addr, b []byte) (int, error) {
+ return len(b), nil
+}
+
+func (*dummyTask) CopyInBytes(addr usermem.Addr, b []byte) (int, error) {
+ return len(b), nil
+}
+
+func (task *dummyTask) MarshalBytes(addr usermem.Addr, marshallable marshal.Marshallable) {
+ buf := task.CopyScratchBuffer(marshallable.SizeBytes())
+ marshallable.MarshalBytes(buf)
+ task.CopyOutBytes(addr, buf)
+}
+
+func (task *dummyTask) MarshalUnsafe(addr usermem.Addr, marshallable marshal.Marshallable) {
+ buf := task.CopyScratchBuffer(marshallable.SizeBytes())
+ marshallable.MarshalUnsafe(buf)
+ task.CopyOutBytes(addr, buf)
+}
+
+// Expected escapes:
+// - task: passed to marshal.Marshallable.CopyOut as the marshal.Task interface.
+func doCopyOut() {
+ task := dummyTask{}
+ var stat test.Stat
+ stat.CopyOut(&task, usermem.Addr(0xf000ba12))
+}
+
+// Expected escapes:
+// - buf: make allocates on the heap.
+func doMarshalBytesDirect() {
+ task := dummyTask{}
+ var stat test.Stat
+ buf := task.CopyScratchBuffer(stat.SizeBytes())
+ stat.MarshalBytes(buf)
+ task.CopyOutBytes(usermem.Addr(0xf000ba12), buf)
+}
+
+// Expected escapes:
+// - buf: make allocates on the heap.
+func doMarshalUnsafeDirect() {
+ task := dummyTask{}
+ var stat test.Stat
+ buf := task.CopyScratchBuffer(stat.SizeBytes())
+ stat.MarshalUnsafe(buf)
+ task.CopyOutBytes(usermem.Addr(0xf000ba12), buf)
+}
+
+// Expected escapes:
+// - stat: passed to dummyTask.MarshalBytes as the marshal.Marshallable interface.
+func doMarshalBytesViaMarshallable() {
+ task := dummyTask{}
+ var stat test.Stat
+ task.MarshalBytes(usermem.Addr(0xf000ba12), &stat)
+}
+
+// Expected escapes:
+// - stat: passed to dummyTask.MarshalUnsafe as the marshal.Marshallable interface.
+func doMarshalUnsafeViaMarshallable() {
+ task := dummyTask{}
+ var stat test.Stat
+ task.MarshalUnsafe(usermem.Addr(0xf000ba12), &stat)
+}
+
+func main() {
+ doCopyOut()
+ doMarshalBytesDirect()
+ doMarshalUnsafeDirect()
+ doMarshalBytesViaMarshallable()
+ doMarshalUnsafeViaMarshallable()
+}