summaryrefslogtreecommitdiffhomepage
path: root/tools/checkescape
diff options
context:
space:
mode:
Diffstat (limited to 'tools/checkescape')
-rw-r--r--tools/checkescape/checkescape.go54
-rw-r--r--tools/checkescape/test1/test1.go13
2 files changed, 39 insertions, 28 deletions
diff --git a/tools/checkescape/checkescape.go b/tools/checkescape/checkescape.go
index e5a7e23c7..8eeabbc3d 100644
--- a/tools/checkescape/checkescape.go
+++ b/tools/checkescape/checkescape.go
@@ -27,7 +27,7 @@
// heap: A direct allocation is made on the heap (hard).
// builtin: A call is made to a built-in allocation function (hard).
// stack: A stack split as part of a function preamble (soft).
-// interface: A call is made via an interface whicy *may* escape (soft).
+// interface: A call is made via an interface which *may* escape (soft).
// dynamic: A dynamic function is dispatched which *may* escape (soft).
//
// To the use the package, annotate a function-level comment with either the
@@ -404,27 +404,27 @@ func loadObjdump() (map[string][]string, error) {
// This is because some of the functions (duffzero) may have
// jump targets in the middle of the function itself.
funcsAllowed := map[string]struct{}{
- "runtime.duffzero": struct{}{},
- "runtime.duffcopy": struct{}{},
- "runtime.racefuncenter": struct{}{},
- "runtime.gcWriteBarrier": struct{}{},
- "runtime.retpolineAX": struct{}{},
- "runtime.retpolineBP": struct{}{},
- "runtime.retpolineBX": struct{}{},
- "runtime.retpolineCX": struct{}{},
- "runtime.retpolineDI": struct{}{},
- "runtime.retpolineDX": struct{}{},
- "runtime.retpolineR10": struct{}{},
- "runtime.retpolineR11": struct{}{},
- "runtime.retpolineR12": struct{}{},
- "runtime.retpolineR13": struct{}{},
- "runtime.retpolineR14": struct{}{},
- "runtime.retpolineR15": struct{}{},
- "runtime.retpolineR8": struct{}{},
- "runtime.retpolineR9": struct{}{},
- "runtime.retpolineSI": struct{}{},
- "runtime.stackcheck": struct{}{},
- "runtime.settls": struct{}{},
+ "runtime.duffzero": {},
+ "runtime.duffcopy": {},
+ "runtime.racefuncenter": {},
+ "runtime.gcWriteBarrier": {},
+ "runtime.retpolineAX": {},
+ "runtime.retpolineBP": {},
+ "runtime.retpolineBX": {},
+ "runtime.retpolineCX": {},
+ "runtime.retpolineDI": {},
+ "runtime.retpolineDX": {},
+ "runtime.retpolineR10": {},
+ "runtime.retpolineR11": {},
+ "runtime.retpolineR12": {},
+ "runtime.retpolineR13": {},
+ "runtime.retpolineR14": {},
+ "runtime.retpolineR15": {},
+ "runtime.retpolineR8": {},
+ "runtime.retpolineR9": {},
+ "runtime.retpolineSI": {},
+ "runtime.stackcheck": {},
+ "runtime.settls": {},
}
addrsAllowed := make(map[string]struct{})
@@ -618,12 +618,12 @@ func findReasons(pass *analysis.Pass, fdecl *ast.FuncDecl) ([]EscapeReason, bool
// run performs the analysis.
func run(pass *analysis.Pass, localEscapes bool) (interface{}, error) {
- calls, err := loadObjdump()
- if err != nil {
+ calls, callsErr := loadObjdump()
+ if callsErr != nil {
// Note that if this analysis fails, then we don't actually
// fail the analyzer itself. We simply report every possible
// escape. In most cases this will work just fine.
- log.Printf("WARNING: unable to load objdump: %v", err)
+ log.Printf("WARNING: unable to load objdump: %v", callsErr)
}
allEscapes := make(map[string][]Escapes)
mergedEscapes := make(map[string]Escapes)
@@ -645,10 +645,10 @@ func run(pass *analysis.Pass, localEscapes bool) (interface{}, error) {
}
hasCall := func(inst poser) (string, bool) {
p := linePosition(inst, nil)
- if calls == nil {
+ if callsErr != nil {
// See above: we don't have access to the binary
// itself, so need to include every possible call.
- return "(possible)", true
+ return fmt.Sprintf("(possible, unable to load objdump: %v)", callsErr), true
}
s, ok := calls[p.Simplified()]
if !ok {
diff --git a/tools/checkescape/test1/test1.go b/tools/checkescape/test1/test1.go
index 27991649f..f46eba39b 100644
--- a/tools/checkescape/test1/test1.go
+++ b/tools/checkescape/test1/test1.go
@@ -36,17 +36,20 @@ func (t Type) Foo() {
fmt.Printf("%v", t) // Never executed.
}
+// InterfaceFunction is passed an interface argument.
// +checkescape:all,hard
//go:nosplit
func InterfaceFunction(i Interface) {
// Do nothing; exported for tests.
}
+// TypeFunction is passed a concrete pointer argument.
// +checkesacape:all,hard
//go:nosplit
func TypeFunction(t *Type) {
}
+// BuiltinMap creates a new map.
// +mustescape:local,builtin
//go:noinline
//go:nosplit
@@ -61,7 +64,8 @@ func builtinMapRec(x int) map[string]bool {
return BuiltinMap(x)
}
-// +temustescapestescape:local,builtin
+// BuiltinClosure returns a closure around x.
+// +mustescape:local,builtin
//go:noinline
//go:nosplit
func BuiltinClosure(x int) func() {
@@ -77,6 +81,7 @@ func builtinClosureRec(x int) func() {
return BuiltinClosure(x)
}
+// BuiltinMakeSlice makes a new slice.
// +mustescape:local,builtin
//go:noinline
//go:nosplit
@@ -91,6 +96,7 @@ func builtinMakeSliceRec(x int) []byte {
return BuiltinMakeSlice(x)
}
+// BuiltinAppend calls append on a slice.
// +mustescape:local,builtin
//go:noinline
//go:nosplit
@@ -105,6 +111,7 @@ func builtinAppendRec() []byte {
return BuiltinAppend(nil)
}
+// BuiltinChan makes a channel.
// +mustescape:local,builtin
//go:noinline
//go:nosplit
@@ -119,6 +126,7 @@ func builtinChanRec() chan int {
return BuiltinChan()
}
+// Heap performs an explicit heap allocation.
// +mustescape:local,heap
//go:noinline
//go:nosplit
@@ -134,6 +142,7 @@ func heapRec() *Type {
return Heap()
}
+// Dispatch dispatches via an interface.
// +mustescape:local,interface
//go:noinline
//go:nosplit
@@ -148,6 +157,7 @@ func dispatchRec(i Interface) {
Dispatch(i)
}
+// Dynamic invokes a dynamic function.
// +mustescape:local,dynamic
//go:noinline
//go:nosplit
@@ -167,6 +177,7 @@ func dynamicRec(f func()) {
func internalFunc() {
}
+// Split includes a guaranteed stack split.
// +mustescape:local,stack
//go:noinline
func Split() {