summaryrefslogtreecommitdiffhomepage
path: root/tools/nogo
diff options
context:
space:
mode:
Diffstat (limited to 'tools/nogo')
-rw-r--r--tools/nogo/BUILD11
-rw-r--r--tools/nogo/analyzers.go6
-rw-r--r--tools/nogo/build.go3
-rw-r--r--tools/nogo/check/main.go18
-rw-r--r--tools/nogo/config.go18
-rw-r--r--tools/nogo/config_test.go301
-rw-r--r--tools/nogo/defs.bzl44
-rw-r--r--tools/nogo/filter/main.go2
-rw-r--r--tools/nogo/findings.go2
-rw-r--r--tools/nogo/nogo.go148
-rw-r--r--tools/nogo/objdump/BUILD10
-rw-r--r--tools/nogo/objdump/objdump.go96
12 files changed, 611 insertions, 48 deletions
diff --git a/tools/nogo/BUILD b/tools/nogo/BUILD
index 6c6f604b5..d72821377 100644
--- a/tools/nogo/BUILD
+++ b/tools/nogo/BUILD
@@ -1,4 +1,4 @@
-load("//tools:defs.bzl", "bzl_library", "go_library", "select_goarch", "select_goos")
+load("//tools:defs.bzl", "bzl_library", "go_library", "go_test", "select_goarch", "select_goos")
load("//tools/nogo:defs.bzl", "nogo_objdump_tool", "nogo_stdlib", "nogo_target")
package(licenses = ["notice"])
@@ -35,8 +35,10 @@ go_library(
visibility = ["//:sandbox"],
deps = [
"//tools/checkescape",
+ "//tools/checklinkname",
"//tools/checklocks",
"//tools/checkunsafe",
+ "//tools/nogo/objdump",
"//tools/worker",
"@co_honnef_go_tools//staticcheck:go_default_library",
"@co_honnef_go_tools//stylecheck:go_default_library",
@@ -68,9 +70,16 @@ go_library(
"@org_golang_x_tools//go/analysis/passes/unsafeptr:go_default_library",
"@org_golang_x_tools//go/analysis/passes/unusedresult:go_default_library",
"@org_golang_x_tools//go/gcexportdata:go_default_library",
+ "@org_golang_x_tools//go/types/objectpath:go_default_library",
],
)
+go_test(
+ name = "nogo_test",
+ srcs = ["config_test.go"],
+ library = ":nogo",
+)
+
bzl_library(
name = "defs_bzl",
srcs = ["defs.bzl"],
diff --git a/tools/nogo/analyzers.go b/tools/nogo/analyzers.go
index 2b3c03fec..db8bbdb8a 100644
--- a/tools/nogo/analyzers.go
+++ b/tools/nogo/analyzers.go
@@ -47,6 +47,7 @@ import (
"honnef.co/go/tools/stylecheck"
"gvisor.dev/gvisor/tools/checkescape"
+ "gvisor.dev/gvisor/tools/checklinkname"
"gvisor.dev/gvisor/tools/checklocks"
"gvisor.dev/gvisor/tools/checkunsafe"
)
@@ -80,6 +81,7 @@ var AllAnalyzers = []*analysis.Analyzer{
unusedresult.Analyzer,
checkescape.Analyzer,
checkunsafe.Analyzer,
+ checklinkname.Analyzer,
checklocks.Analyzer,
}
@@ -115,11 +117,11 @@ func register(all []*analysis.Analyzer) {
func init() {
// Add all staticcheck analyzers.
for _, a := range staticcheck.Analyzers {
- AllAnalyzers = append(AllAnalyzers, a)
+ AllAnalyzers = append(AllAnalyzers, a.Analyzer)
}
// Add all stylecheck analyzers.
for _, a := range stylecheck.Analyzers {
- AllAnalyzers = append(AllAnalyzers, a)
+ AllAnalyzers = append(AllAnalyzers, a.Analyzer)
}
// Register lists.
diff --git a/tools/nogo/build.go b/tools/nogo/build.go
index d173cff1f..4067bb480 100644
--- a/tools/nogo/build.go
+++ b/tools/nogo/build.go
@@ -12,6 +12,9 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+//go:build go1.1
+// +build go1.1
+
package nogo
import (
diff --git a/tools/nogo/check/main.go b/tools/nogo/check/main.go
index 3a6c3fb08..0e7e92965 100644
--- a/tools/nogo/check/main.go
+++ b/tools/nogo/check/main.go
@@ -62,7 +62,8 @@ func run([]string) int {
// Check & load the configuration.
if *packageFile != "" && *stdlibFile != "" {
- log.Fatalf("unable to perform stdlib and package analysis; provide only one!")
+ fmt.Fprintf(os.Stderr, "unable to perform stdlib and package analysis; provide only one!")
+ return 1
}
// Run the configuration.
@@ -75,18 +76,21 @@ func run([]string) int {
c := loadConfig(*packageFile, new(nogo.PackageConfig)).(*nogo.PackageConfig)
findings, factData, err = nogo.CheckPackage(c, nogo.AllAnalyzers, nil)
} else {
- log.Fatalf("please provide at least one of package or stdlib!")
+ fmt.Fprintf(os.Stderr, "please provide at least one of package or stdlib!")
+ return 1
}
// Check that analysis was successful.
if err != nil {
- log.Fatalf("error performing analysis: %v", err)
+ fmt.Fprintf(os.Stderr, "error performing analysis: %v", err)
+ return 1
}
// Save facts.
if *factsOutput != "" {
if err := ioutil.WriteFile(*factsOutput, factData, 0644); err != nil {
- log.Fatalf("error saving findings to %q: %v", *factsOutput, err)
+ fmt.Fprintf(os.Stderr, "error saving findings to %q: %v", *factsOutput, err)
+ return 1
}
}
@@ -94,10 +98,12 @@ func run([]string) int {
if *findingsOutput != "" {
w, err := os.OpenFile(*findingsOutput, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644)
if err != nil {
- log.Fatalf("error opening output file %q: %v", *findingsOutput, err)
+ fmt.Fprintf(os.Stderr, "error opening output file %q: %v", *findingsOutput, err)
+ return 1
}
if err := nogo.WriteFindingsTo(w, findings, false /* json */); err != nil {
- log.Fatalf("error writing findings to %q: %v", *findingsOutput, err)
+ fmt.Fprintf(os.Stderr, "error writing findings to %q: %v", *findingsOutput, err)
+ return 1
}
} else {
for _, finding := range findings {
diff --git a/tools/nogo/config.go b/tools/nogo/config.go
index 6436f9d34..ee2533610 100644
--- a/tools/nogo/config.go
+++ b/tools/nogo/config.go
@@ -186,16 +186,19 @@ func (a AnalyzerConfig) merge(other AnalyzerConfig) {
}
}
-func (a AnalyzerConfig) shouldReport(groupConfig *Group, fullPos, msg string) bool {
+// shouldReport returns whether the finding should be reported or suppressed.
+// It returns !ok if there is no configuration sufficient to decide one way or
+// another.
+func (a AnalyzerConfig) shouldReport(groupConfig *Group, fullPos, msg string) (report, ok bool) {
gc, ok := a[groupConfig.Name]
if !ok {
- return groupConfig.Default
+ return false, false
}
// Note that if a section appears for a particular group
// for a particular analyzer, then it will now be enabled,
// and the group default no longer applies.
- return gc.shouldReport(fullPos, msg)
+ return gc.shouldReport(fullPos, msg), true
}
// Config is a nogo configuration.
@@ -298,7 +301,8 @@ func (c *Config) ShouldReport(finding Finding) bool {
}
// Suppress via global rule?
- if !c.Global.shouldReport(groupConfig, fullPos, finding.Message) {
+ report, ok := c.Global.shouldReport(groupConfig, fullPos, finding.Message)
+ if ok && !report {
return false
}
@@ -307,5 +311,9 @@ func (c *Config) ShouldReport(finding Finding) bool {
if !ok {
return groupConfig.Default
}
- return ac.shouldReport(groupConfig, fullPos, finding.Message)
+ report, ok = ac.shouldReport(groupConfig, fullPos, finding.Message)
+ if !ok {
+ return groupConfig.Default
+ }
+ return report
}
diff --git a/tools/nogo/config_test.go b/tools/nogo/config_test.go
new file mode 100644
index 000000000..685cffbec
--- /dev/null
+++ b/tools/nogo/config_test.go
@@ -0,0 +1,301 @@
+// Copyright 2021 The gVisor Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.package nogo
+package nogo
+
+import (
+ "go/token"
+ "testing"
+)
+
+// TestShouldReport validates the suppression behavior of Config.ShouldReport.
+func TestShouldReport(t *testing.T) {
+ config := &Config{
+ Groups: []Group{
+ {
+ Name: "default-enabled",
+ Regex: "^default-enabled/",
+ Default: true,
+ },
+ {
+ Name: "default-disabled",
+ Regex: "^default-disabled/",
+ Default: false,
+ },
+ {
+ Name: "default-disabled-omitted-from-global",
+ Regex: "^default-disabled-omitted-from-global/",
+ Default: false,
+ },
+ },
+ Global: AnalyzerConfig{
+ "default-enabled": &ItemConfig{
+ Exclude: []string{"excluded.go"},
+ Suppress: []string{"suppressed"},
+ },
+ "default-disabled": &ItemConfig{
+ Exclude: []string{"excluded.go"},
+ Suppress: []string{"suppressed"},
+ },
+ // Omitting default-disabled-omitted-from-global here
+ // has no effect on configuration below.
+ },
+ Analyzers: map[AnalyzerName]AnalyzerConfig{
+ "analyzer-suppressions": AnalyzerConfig{
+ // Suppress some.
+ "default-enabled": &ItemConfig{
+ Exclude: []string{"limited-exclude.go"},
+ Suppress: []string{"limited suppress"},
+ },
+ // Enable all.
+ "default-disabled": nil,
+ },
+ "enabled-for-default-disabled": AnalyzerConfig{
+ "default-disabled": nil,
+ "default-disabled-omitted-from-global": nil,
+ },
+ },
+ }
+
+ if err := config.Compile(); err != nil {
+ t.Fatalf("Compile(%+v) = %v, want nil", config, err)
+ }
+
+ cases := []struct {
+ name string
+ finding Finding
+ want bool
+ }{
+ {
+ name: "enabled",
+ finding: Finding{
+ Category: "foo",
+ Position: token.Position{
+ Filename: "default-enabled/file.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message",
+ },
+ want: true,
+ },
+ {
+ name: "ungrouped",
+ finding: Finding{
+ Category: "foo",
+ Position: token.Position{
+ Filename: "ungrouped/file.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message",
+ },
+ want: true,
+ },
+ {
+ name: "suppressed",
+ finding: Finding{
+ Category: "foo",
+ Position: token.Position{
+ Filename: "default-enabled/file.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message suppressed",
+ },
+ want: false,
+ },
+ {
+ name: "excluded",
+ finding: Finding{
+ Category: "foo",
+ Position: token.Position{
+ Filename: "default-enabled/excluded.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message",
+ },
+ want: false,
+ },
+ {
+ name: "disabled",
+ finding: Finding{
+ Category: "foo",
+ Position: token.Position{
+ Filename: "default-disabled/file.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message",
+ },
+ want: false,
+ },
+ {
+ name: "analyzer suppressed",
+ finding: Finding{
+ Category: "analyzer-suppressions",
+ Position: token.Position{
+ Filename: "default-enabled/file.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message limited suppress",
+ },
+ want: false,
+ },
+ {
+ name: "analyzer suppressed not global",
+ finding: Finding{
+ // Doesn't apply outside of analyzer-suppressions.
+ Category: "foo",
+ Position: token.Position{
+ Filename: "default-enabled/file.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message limited suppress",
+ },
+ want: true,
+ },
+ {
+ name: "analyzer suppressed grouped",
+ finding: Finding{
+ Category: "analyzer-suppressions",
+ Position: token.Position{
+ // Doesn't apply outside of default-enabled.
+ Filename: "default-disabled/file.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message limited suppress",
+ },
+ want: true,
+ },
+ {
+ name: "analyzer excluded",
+ finding: Finding{
+ Category: "analyzer-suppressions",
+ Position: token.Position{
+ Filename: "default-enabled/limited-exclude.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message",
+ },
+ want: false,
+ },
+ {
+ name: "analyzer excluded not global",
+ finding: Finding{
+ // Doesn't apply outside of analyzer-suppressions.
+ Category: "foo",
+ Position: token.Position{
+ Filename: "default-enabled/limited-exclude.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message",
+ },
+ want: true,
+ },
+ {
+ name: "analyzer excluded grouped",
+ finding: Finding{
+ Category: "analyzer-suppressions",
+ Position: token.Position{
+ // Doesn't apply outside of default-enabled.
+ Filename: "default-disabled/limited-exclude.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message",
+ },
+ want: true,
+ },
+ {
+ name: "disabled-omitted",
+ finding: Finding{
+ Category: "foo",
+ Position: token.Position{
+ Filename: "default-disabled-omitted-from-global/file.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message",
+ },
+ want: false,
+ },
+ {
+ name: "default enabled applies to customized analyzer",
+ finding: Finding{
+ Category: "enabled-for-default-disabled",
+ Position: token.Position{
+ Filename: "default-enabled/file.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message",
+ },
+ want: true,
+ },
+ {
+ name: "default overridden in customized analyzer",
+ finding: Finding{
+ Category: "enabled-for-default-disabled",
+ Position: token.Position{
+ Filename: "default-disabled/file.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message",
+ },
+ want: true,
+ },
+ {
+ name: "default overridden in customized analyzer even when omitted from global",
+ finding: Finding{
+ Category: "enabled-for-default-disabled",
+ Position: token.Position{
+ Filename: "default-disabled-omitted-from-global/file.go",
+ Offset: 0,
+ Line: 1,
+ Column: 1,
+ },
+ Message: "message",
+ },
+ want: true,
+ },
+ }
+ for _, tc := range cases {
+ t.Run(tc.name, func(t *testing.T) {
+ if got := config.ShouldReport(tc.finding); got != tc.want {
+ t.Errorf("ShouldReport(%+v) = %v, want %v", tc.finding, got, tc.want)
+ }
+ })
+ }
+}
diff --git a/tools/nogo/defs.bzl b/tools/nogo/defs.bzl
index ddf5816a6..dc9a8b24e 100644
--- a/tools/nogo/defs.bzl
+++ b/tools/nogo/defs.bzl
@@ -160,6 +160,11 @@ def _nogo_stdlib_impl(ctx):
return [NogoStdlibInfo(
facts = facts,
raw_findings = raw_findings,
+ ), DefaultInfo(
+ # Declare the facts and findings as default outputs. This is not
+ # strictly required, but ensures that the target still perform analysis
+ # when built directly rather than just indirectly via a nogo_test.
+ files = depset([facts, raw_findings]),
)]
nogo_stdlib = go_rule(
@@ -198,6 +203,22 @@ NogoInfo = provider(
},
)
+def _select_objfile(files):
+ """Returns (.a file, .x file, is_archive).
+
+ If no .a file is available, then the first .x file will be returned
+ instead, and vice versa. If neither are available, then the first provided
+ file will be returned."""
+ a_files = [f for f in files if f.path.endswith(".a")]
+ x_files = [f for f in files if f.path.endswith(".x")]
+ if not len(x_files) and not len(a_files):
+ return (files[0], files[0], False)
+ if not len(x_files):
+ x_files = a_files
+ if not len(a_files):
+ a_files = x_files
+ return a_files[0], x_files[0], True
+
def _nogo_aspect_impl(target, ctx):
# If this is a nogo rule itself (and not the shadow of a go_library or
# go_binary rule created by such a rule), then we simply return nothing.
@@ -232,20 +253,14 @@ def _nogo_aspect_impl(target, ctx):
deps = deps + info.deps
# Start with all target files and srcs as input.
- inputs = target.files.to_list() + srcs
+ binaries = target.files.to_list()
+ inputs = binaries + srcs
# Generate a shell script that dumps the binary. Annoyingly, this seems
# necessary as the context in which a run_shell command runs does not seem
# to cleanly allow us redirect stdout to the actual output file. Perhaps
# I'm missing something here, but the intermediate script does work.
- binaries = target.files.to_list()
- objfiles = [f for f in binaries if f.path.endswith(".a")]
- if len(objfiles) > 0:
- # Prefer the .a files for go_library targets.
- target_objfile = objfiles[0]
- else:
- # Use the raw binary for go_binary and go_test targets.
- target_objfile = binaries[0]
+ target_objfile, target_xfile, has_objfile = _select_objfile(binaries)
inputs.append(target_objfile)
# Extract the importpath for this package.
@@ -274,10 +289,8 @@ def _nogo_aspect_impl(target, ctx):
# Configure where to find the binary & fact files. Note that this will
# use .x and .a regardless of whether this is a go_binary rule, since
# these dependencies must be go_library rules.
- x_files = [f.path for f in info.binaries if f.path.endswith(".x")]
- if not len(x_files):
- x_files = [f.path for f in info.binaries if f.path.endswith(".a")]
- import_map[info.importpath] = x_files[0]
+ _, x_file, _ = _select_objfile(info.binaries)
+ import_map[info.importpath] = x_file.path
fact_map[info.importpath] = info.facts.path
# Collect all findings; duplicates are resolved at the end.
@@ -287,6 +300,11 @@ def _nogo_aspect_impl(target, ctx):
inputs.append(info.facts)
inputs += info.binaries
+ # Add the module itself, for the type sanity check. This applies only to
+ # the libraries, and not binaries or tests.
+ if has_objfile:
+ import_map[importpath] = target_xfile.path
+
# Add the standard library facts.
stdlib_info = ctx.attr._nogo_stdlib[NogoStdlibInfo]
stdlib_facts = stdlib_info.facts
diff --git a/tools/nogo/filter/main.go b/tools/nogo/filter/main.go
index d50336b9b..4a925d03c 100644
--- a/tools/nogo/filter/main.go
+++ b/tools/nogo/filter/main.go
@@ -12,7 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-// Binary check is the nogo entrypoint.
+// Binary filter is the filters and reports nogo findings.
package main
import (
diff --git a/tools/nogo/findings.go b/tools/nogo/findings.go
index 329a7062e..a73bf1a09 100644
--- a/tools/nogo/findings.go
+++ b/tools/nogo/findings.go
@@ -109,7 +109,7 @@ func ExtractFindingsFromFile(filename string, asJSON bool) (FindingSet, error) {
return ExtractFindingsFrom(r, asJSON)
}
-// ExtractFindingsFromBytes loads findings from bytes.
+// ExtractFindingsFrom loads findings from an io.Reader.
func ExtractFindingsFrom(r io.Reader, asJSON bool) (findings FindingSet, err error) {
if asJSON {
dec := json.NewDecoder(r)
diff --git a/tools/nogo/nogo.go b/tools/nogo/nogo.go
index acee7c8bc..d95d7652f 100644
--- a/tools/nogo/nogo.go
+++ b/tools/nogo/nogo.go
@@ -41,9 +41,10 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/internal/facts"
"golang.org/x/tools/go/gcexportdata"
+ "golang.org/x/tools/go/types/objectpath"
// Special case: flags live here and change overall behavior.
- "gvisor.dev/gvisor/tools/checkescape"
+ "gvisor.dev/gvisor/tools/nogo/objdump"
"gvisor.dev/gvisor/tools/worker"
)
@@ -216,6 +217,11 @@ func (i *importer) Import(path string) (*types.Package, error) {
}
}
+ // Check the cache.
+ if pkg, ok := i.cache[path]; ok && pkg.Complete() {
+ return pkg, nil
+ }
+
// Actually load the data.
realPath, ok := i.ImportMap[path]
var (
@@ -327,6 +333,9 @@ func CheckStdlib(config *StdlibConfig, analyzers []*analysis.Analyzer) (allFindi
// Closure to check a single package.
localStdlibFacts := make(stdlibFacts)
localStdlibErrs := make(map[string]error)
+ stdlibCachedFacts.Lookup([]string{""}, func() worker.Sizer {
+ return localStdlibFacts
+ })
var checkOne func(pkg string) error // Recursive.
checkOne = func(pkg string) error {
// Is this already done?
@@ -355,11 +364,11 @@ func CheckStdlib(config *StdlibConfig, analyzers []*analysis.Analyzer) (allFindi
}
// Provide the input.
- oldReader := checkescape.Reader
- checkescape.Reader = rc // For analysis.
+ oldReader := objdump.Reader
+ objdump.Reader = rc // For analysis.
defer func() {
rc.Close()
- checkescape.Reader = oldReader // Restore.
+ objdump.Reader = oldReader // Restore.
}()
// Run the analysis.
@@ -406,6 +415,56 @@ func CheckStdlib(config *StdlibConfig, analyzers []*analysis.Analyzer) (allFindi
return allFindings, buf.Bytes(), nil
}
+// sanityCheckScope checks that all object in astTypes map to the correct
+// objects in binaryTypes. Note that we don't check whether the sets are the
+// same, we only care about the fidelity of objects in astTypes.
+//
+// When an inconsistency is identified, we record it in the astToBinaryMap.
+// This allows us to dynamically replace facts and correct for the issue. The
+// total number of mismatches is returned.
+func sanityCheckScope(astScope *types.Scope, binaryTypes *types.Package, binaryScope *types.Scope, astToBinary map[types.Object]types.Object) error {
+ for _, x := range astScope.Names() {
+ fe := astScope.Lookup(x)
+ path, err := objectpath.For(fe)
+ if err != nil {
+ continue // Not an encoded object.
+ }
+ se, err := objectpath.Object(binaryTypes, path)
+ if err != nil {
+ continue // May be unused, see below.
+ }
+ if fe.Id() != se.Id() {
+ // These types are incompatible. This means that when
+ // this objectpath is loading from the binaryTypes (for
+ // dependencies) it will resolve to a fact for that
+ // type. We don't actually care about this error since
+ // we do the rewritten, but may as well alert.
+ log.Printf("WARNING: Object %s is a victim of go/issues/44195.", fe.Id())
+ }
+ se = binaryScope.Lookup(x)
+ if se == nil {
+ // The fact may not be exported in the objectdata, if
+ // it is package internal. This is fine, as nothing out
+ // of this package can use these symbols.
+ continue
+ }
+ // Save the translation.
+ astToBinary[fe] = se
+ }
+ for i := 0; i < astScope.NumChildren(); i++ {
+ if err := sanityCheckScope(astScope.Child(i), binaryTypes, binaryScope, astToBinary); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// sanityCheckTypes checks that two types are sane. The total number of
+// mismatches is returned.
+func sanityCheckTypes(astTypes, binaryTypes *types.Package, astToBinary map[types.Object]types.Object) error {
+ return sanityCheckScope(astTypes.Scope(), binaryTypes, binaryTypes.Scope(), astToBinary)
+}
+
// CheckPackage runs all given analyzers.
//
// The implementation was adapted from [1], which was in turn adpated from [2].
@@ -450,17 +509,46 @@ func CheckPackage(config *PackageConfig, analyzers []*analysis.Analyzer, importC
Scopes: make(map[ast.Node]*types.Scope),
Selections: make(map[*ast.SelectorExpr]*types.Selection),
}
- types, err := typeConfig.Check(config.ImportPath, imp.fset, syntax, typesInfo)
+ astTypes, err := typeConfig.Check(config.ImportPath, imp.fset, syntax, typesInfo)
if err != nil && imp.lastErr != ErrSkip {
return nil, nil, fmt.Errorf("error checking types: %w", err)
}
- // Load all package facts.
- facts, err := facts.Decode(types, config.factLoader)
+ // Load all facts using the astTypes, although it may need reconciling
+ // later on. See the fact functions below.
+ astFacts, err := facts.Decode(astTypes, config.factLoader)
if err != nil {
return nil, nil, fmt.Errorf("error decoding facts: %w", err)
}
+ // Sanity check all types and record metadata to prevent
+ // https://github.com/golang/go/issues/44195.
+ //
+ // This block loads the binary types, whose encoding will be well
+ // defined and aligned with any downstream consumers. Below in the fact
+ // functions for the analysis, we serialize types to both the astFacts
+ // and the binaryFacts if available. The binaryFacts are the final
+ // encoded facts in order to ensure compatibility. We keep the
+ // intermediate astTypes in order to allow exporting and importing
+ // within the local package under analysis.
+ var (
+ astToBinary = make(map[types.Object]types.Object)
+ binaryFacts *facts.Set
+ )
+ if _, ok := config.ImportMap[config.ImportPath]; ok {
+ binaryTypes, err := imp.Import(config.ImportPath)
+ if err != nil {
+ return nil, nil, fmt.Errorf("error loading self: %w", err)
+ }
+ if err := sanityCheckTypes(astTypes, binaryTypes, astToBinary); err != nil {
+ return nil, nil, fmt.Errorf("error sanity checking types: %w", err)
+ }
+ binaryFacts, err = facts.Decode(binaryTypes, config.factLoader)
+ if err != nil {
+ return nil, nil, fmt.Errorf("error decoding facts: %w", err)
+ }
+ }
+
// Register fact types and establish dependencies between analyzers.
// The visit closure will execute recursively, and populate results
// will all required analysis results.
@@ -479,15 +567,15 @@ func CheckPackage(config *PackageConfig, analyzers []*analysis.Analyzer, importC
}
// Run the analysis.
- factFilter := make(map[reflect.Type]bool)
+ localFactsFilter := make(map[reflect.Type]bool)
for _, f := range a.FactTypes {
- factFilter[reflect.TypeOf(f)] = true
+ localFactsFilter[reflect.TypeOf(f)] = true
}
p := &analysis.Pass{
Analyzer: a,
Fset: imp.fset,
Files: syntax,
- Pkg: types,
+ Pkg: astTypes,
TypesInfo: typesInfo,
ResultOf: results, // All results.
Report: func(d analysis.Diagnostic) {
@@ -497,13 +585,29 @@ func CheckPackage(config *PackageConfig, analyzers []*analysis.Analyzer, importC
Message: d.Message,
})
},
- ImportPackageFact: facts.ImportPackageFact,
- ExportPackageFact: facts.ExportPackageFact,
- ImportObjectFact: facts.ImportObjectFact,
- ExportObjectFact: facts.ExportObjectFact,
- AllPackageFacts: func() []analysis.PackageFact { return facts.AllPackageFacts(factFilter) },
- AllObjectFacts: func() []analysis.ObjectFact { return facts.AllObjectFacts(factFilter) },
- TypesSizes: typesSizes,
+ ImportPackageFact: astFacts.ImportPackageFact,
+ ExportPackageFact: func(fact analysis.Fact) {
+ astFacts.ExportPackageFact(fact)
+ if binaryFacts != nil {
+ binaryFacts.ExportPackageFact(fact)
+ }
+ },
+ ImportObjectFact: astFacts.ImportObjectFact,
+ ExportObjectFact: func(obj types.Object, fact analysis.Fact) {
+ astFacts.ExportObjectFact(obj, fact)
+ // Note that if no object is recorded in
+ // astToBinary and binaryFacts != nil, then the
+ // object doesn't appear in the exported data.
+ // It was likely an internal object to the
+ // package, and there is no meaningful
+ // downstream consumer of the fact.
+ if binaryObj, ok := astToBinary[obj]; ok && binaryFacts != nil {
+ binaryFacts.ExportObjectFact(binaryObj, fact)
+ }
+ },
+ AllPackageFacts: func() []analysis.PackageFact { return astFacts.AllPackageFacts(localFactsFilter) },
+ AllObjectFacts: func() []analysis.ObjectFact { return astFacts.AllObjectFacts(localFactsFilter) },
+ TypesSizes: typesSizes,
}
result, err := a.Run(p)
if err != nil {
@@ -528,8 +632,14 @@ func CheckPackage(config *PackageConfig, analyzers []*analysis.Analyzer, importC
}
}
- // Return all findings.
- return findings, facts.Encode(), nil
+ // Return all findings. Note that we have a preference to returning the
+ // binary facts if available, so that downstream consumers of these
+ // facts will find the export aligns with the internal type details.
+ // See the block above with the call to sanityCheckTypes.
+ if binaryFacts != nil {
+ return findings, binaryFacts.Encode(), nil
+ }
+ return findings, astFacts.Encode(), nil
}
func init() {
diff --git a/tools/nogo/objdump/BUILD b/tools/nogo/objdump/BUILD
new file mode 100644
index 000000000..da56efdf7
--- /dev/null
+++ b/tools/nogo/objdump/BUILD
@@ -0,0 +1,10 @@
+load("//tools:defs.bzl", "go_library")
+
+package(licenses = ["notice"])
+
+go_library(
+ name = "objdump",
+ srcs = ["objdump.go"],
+ nogo = False,
+ visibility = ["//tools:__subpackages__"],
+)
diff --git a/tools/nogo/objdump/objdump.go b/tools/nogo/objdump/objdump.go
new file mode 100644
index 000000000..48484abf3
--- /dev/null
+++ b/tools/nogo/objdump/objdump.go
@@ -0,0 +1,96 @@
+// Copyright 2020 The gVisor Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package objdump is a wrapper around relevant objdump flags.
+package objdump
+
+import (
+ "flag"
+ "fmt"
+ "io"
+ "os"
+ "os/exec"
+)
+
+var (
+ // Binary is the binary under analysis.
+ //
+ // See Reader, below.
+ binary = flag.String("binary", "", "binary under analysis")
+
+ // Reader is the input stream.
+ //
+ // This may be set instead of Binary.
+ Reader io.Reader
+
+ // objdumpTool is the tool used to dump a binary.
+ objdumpTool = flag.String("objdump_tool", "", "tool used to dump a binary")
+)
+
+// LoadRaw reads the raw object output.
+func LoadRaw(fn func(r io.Reader) error) error {
+ var r io.Reader
+ if *binary != "" {
+ f, err := os.Open(*binary)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ r = f
+ } else if Reader != nil {
+ r = Reader
+ } else {
+ // We have no input stream.
+ return fmt.Errorf("no binary or reader provided")
+ }
+ return fn(r)
+}
+
+// Load reads the objdump output.
+func Load(fn func(r io.Reader) error) error {
+ var (
+ args []string
+ stdin io.Reader
+ )
+ if *binary != "" {
+ args = append(args, *binary)
+ } else if Reader != nil {
+ stdin = Reader
+ } else {
+ // We have no input stream or binary.
+ return fmt.Errorf("no binary or reader provided")
+ }
+
+ // Construct our command.
+ cmd := exec.Command(*objdumpTool, args...)
+ cmd.Stdin = stdin
+ cmd.Stderr = os.Stderr
+ out, err := cmd.StdoutPipe()
+ if err != nil {
+ return err
+ }
+ if err := cmd.Start(); err != nil {
+ return err
+ }
+
+ // Call the user hook.
+ userErr := fn(out)
+
+ // Wait for the dump to finish.
+ if err := cmd.Wait(); userErr == nil && err != nil {
+ return err
+ }
+
+ return userErr
+}