diff options
Diffstat (limited to 'tools/nogo')
-rw-r--r-- | tools/nogo/BUILD | 20 | ||||
-rw-r--r-- | tools/nogo/build.go | 9 | ||||
-rw-r--r-- | tools/nogo/check/BUILD | 1 | ||||
-rw-r--r-- | tools/nogo/config.go | 13 | ||||
-rw-r--r-- | tools/nogo/data/BUILD | 10 | ||||
-rw-r--r-- | tools/nogo/data/data.go | 21 | ||||
-rw-r--r-- | tools/nogo/defs.bzl | 286 | ||||
-rw-r--r-- | tools/nogo/matchers.go | 24 | ||||
-rw-r--r-- | tools/nogo/nogo.go | 406 | ||||
-rw-r--r-- | tools/nogo/register.go | 3 | ||||
-rw-r--r-- | tools/nogo/util/BUILD | 9 | ||||
-rw-r--r-- | tools/nogo/util/util.go | 85 |
12 files changed, 710 insertions, 177 deletions
diff --git a/tools/nogo/BUILD b/tools/nogo/BUILD index c21b09511..9f1fcd9c7 100644 --- a/tools/nogo/BUILD +++ b/tools/nogo/BUILD @@ -1,7 +1,18 @@ -load("//tools:defs.bzl", "go_library") +load("//tools:defs.bzl", "bzl_library", "go_library") +load("//tools/nogo:defs.bzl", "nogo_dump_tool", "nogo_stdlib") package(licenses = ["notice"]) +nogo_dump_tool( + name = "dump_tool", + visibility = ["//visibility:public"], +) + +nogo_stdlib( + name = "stdlib", + visibility = ["//visibility:public"], +) + go_library( name = "nogo", srcs = [ @@ -16,7 +27,6 @@ go_library( deps = [ "//tools/checkescape", "//tools/checkunsafe", - "//tools/nogo/data", "@org_golang_x_tools//go/analysis:go_tool_library", "@org_golang_x_tools//go/analysis/internal/facts:go_tool_library", "@org_golang_x_tools//go/analysis/passes/asmdecl:go_tool_library", @@ -47,3 +57,9 @@ go_library( "@org_golang_x_tools//go/gcexportdata:go_tool_library", ], ) + +bzl_library( + name = "defs_bzl", + srcs = ["defs.bzl"], + visibility = ["//visibility:private"], +) diff --git a/tools/nogo/build.go b/tools/nogo/build.go index 1c0d08661..39c2ae418 100644 --- a/tools/nogo/build.go +++ b/tools/nogo/build.go @@ -26,11 +26,18 @@ var ( // and should not have any special prefix applied. internalPrefix = fmt.Sprintf("^") + // internalDefault is applied when no paths are provided. + internalDefault = fmt.Sprintf("%s/.*", notPath("external")) + // externalPrefix is external workspace packages. externalPrefix = "^external/" ) // findStdPkg needs to find the bundled standard library packages. -func findStdPkg(path, GOOS, GOARCH string) (io.ReadCloser, error) { +func findStdPkg(GOOS, GOARCH, path string) (io.ReadCloser, error) { + if path == "C" { + // Cgo builds cannot be analyzed. Skip. + return nil, ErrSkip + } return os.Open(fmt.Sprintf("external/go_sdk/pkg/%s_%s/%s.a", GOOS, GOARCH, path)) } diff --git a/tools/nogo/check/BUILD b/tools/nogo/check/BUILD index e2d76cd5c..21ba2c306 100644 --- a/tools/nogo/check/BUILD +++ b/tools/nogo/check/BUILD @@ -7,6 +7,7 @@ package(licenses = ["notice"]) go_binary( name = "check", srcs = ["main.go"], + nogo = False, visibility = ["//visibility:public"], deps = ["//tools/nogo"], ) diff --git a/tools/nogo/config.go b/tools/nogo/config.go index 6958fca69..cfe7b4aa4 100644 --- a/tools/nogo/config.go +++ b/tools/nogo/config.go @@ -84,6 +84,14 @@ var analyzerConfig = map[*analysis.Analyzer]matcher{ externalExcluded( ".*protobuf/.*.go", // Bad conversions. ".*flate/huffman_bit_writer.go", // Bad conversion. + + // Runtime internal violations. + ".*reflect/value.go", + ".*encoding/xml/xml.go", + ".*runtime/pprof/internal/profile/proto.go", + ".*fmt/scan.go", + ".*go/types/conversions.go", + ".*golang.org/x/net/dns/dnsmessage/message.go", ), ), shadow.Analyzer: disableMatches(), // Disabled for now. @@ -114,3 +122,8 @@ var analyzerConfig = map[*analysis.Analyzer]matcher{ checkescape.Analyzer: internalMatches(), checkunsafe.Analyzer: internalMatches(), } + +var escapesConfig = map[*analysis.Analyzer]matcher{ + // Informational only: include all packages. + checkescape.EscapeAnalyzer: alwaysMatches(), +} diff --git a/tools/nogo/data/BUILD b/tools/nogo/data/BUILD deleted file mode 100644 index b7564cc44..000000000 --- a/tools/nogo/data/BUILD +++ /dev/null @@ -1,10 +0,0 @@ -load("//tools:defs.bzl", "go_library") - -package(licenses = ["notice"]) - -go_library( - name = "data", - srcs = ["data.go"], - nogo = False, - visibility = ["//tools:__subpackages__"], -) diff --git a/tools/nogo/data/data.go b/tools/nogo/data/data.go deleted file mode 100644 index eb84d0d27..000000000 --- a/tools/nogo/data/data.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2019 The gVisor Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package data contains shared data for nogo analysis. -// -// This is used to break a dependency cycle. -package data - -// Objdump is the dumped binary under analysis. -var Objdump string diff --git a/tools/nogo/defs.bzl b/tools/nogo/defs.bzl index 6560b57c8..480438047 100644 --- a/tools/nogo/defs.bzl +++ b/tools/nogo/defs.bzl @@ -1,6 +1,107 @@ """Nogo rules.""" -load("//tools/bazeldefs:defs.bzl", "go_context", "go_importpath", "go_rule") +load("//tools/bazeldefs:defs.bzl", "go_context", "go_importpath", "go_rule", "go_test_library") + +def _nogo_dump_tool_impl(ctx): + # Extract the Go context. + go_ctx = go_context(ctx) + + # Construct the magic dump command. + # + # Note that in some cases, the input is being fed into the tool via stdin. + # Unfortunately, the Go objdump tool expects to see a seekable file [1], so + # we need the tool to handle this case by creating a temporary file. + # + # [1] https://github.com/golang/go/issues/41051 + env_prefix = " ".join(["%s=%s" % (key, value) for (key, value) in go_ctx.env.items()]) + dumper = ctx.actions.declare_file(ctx.label.name) + ctx.actions.write(dumper, "\n".join([ + "#!/bin/bash", + "set -euo pipefail", + "if [[ $# -eq 0 ]]; then", + " T=$(mktemp -u -t libXXXXXX.a)", + " cat /dev/stdin > ${T}", + "else", + " T=$1;", + "fi", + "%s %s tool objdump ${T}" % ( + env_prefix, + go_ctx.go.path, + ), + "if [[ $# -eq 0 ]]; then", + " rm -rf ${T}", + "fi", + "", + ]), is_executable = True) + + # Include the full runfiles. + return [DefaultInfo( + runfiles = ctx.runfiles(files = go_ctx.runfiles.to_list()), + executable = dumper, + )] + +nogo_dump_tool = go_rule( + rule, + implementation = _nogo_dump_tool_impl, +) + +# NogoStdlibInfo is the set of standard library facts. +NogoStdlibInfo = provider( + "information for nogo analysis (standard library facts)", + fields = { + "facts": "serialized standard library facts", + "findings": "package findings (if relevant)", + }, +) + +def _nogo_stdlib_impl(ctx): + # Extract the Go context. + go_ctx = go_context(ctx) + + # Build the standard library facts. + facts = ctx.actions.declare_file(ctx.label.name + ".facts") + findings = ctx.actions.declare_file(ctx.label.name + ".findings") + config = struct( + Srcs = [f.path for f in go_ctx.stdlib_srcs], + GOOS = go_ctx.goos, + GOARCH = go_ctx.goarch, + Tags = go_ctx.tags, + ) + config_file = ctx.actions.declare_file(ctx.label.name + ".cfg") + ctx.actions.write(config_file, config.to_json()) + ctx.actions.run( + inputs = [config_file] + go_ctx.stdlib_srcs, + outputs = [facts, findings], + tools = depset(go_ctx.runfiles.to_list() + ctx.files._dump_tool), + executable = ctx.files._nogo[0], + mnemonic = "GoStandardLibraryAnalysis", + progress_message = "Analyzing Go Standard Library", + arguments = go_ctx.nogo_args + [ + "-dump_tool=%s" % ctx.files._dump_tool[0].path, + "-stdlib=%s" % config_file.path, + "-findings=%s" % findings.path, + "-facts=%s" % facts.path, + ], + ) + + # Return the stdlib facts as output. + return [NogoStdlibInfo( + facts = facts, + findings = findings, + )] + +nogo_stdlib = go_rule( + rule, + implementation = _nogo_stdlib_impl, + attrs = { + "_nogo": attr.label( + default = "//tools/nogo/check:check", + ), + "_dump_tool": attr.label( + default = "//tools/nogo:dump_tool", + ), + }, +) # NogoInfo is the serialized set of package facts for a nogo analysis. # @@ -8,10 +109,14 @@ load("//tools/bazeldefs:defs.bzl", "go_context", "go_importpath", "go_rule") # with the source files as input. Note however, that the individual nogo rules # are simply stubs that enter into the shadow dependency tree (the "aspect"). NogoInfo = provider( + "information for nogo analysis", fields = { "facts": "serialized package facts", + "findings": "package findings (if relevant)", "importpath": "package import path", "binaries": "package binary files", + "srcs": "original source files (for go_test support)", + "deps": "original deps (for go_test support)", }, ) @@ -21,15 +126,29 @@ def _nogo_aspect_impl(target, ctx): # All work is done in the shadow properties for go rules. For a proto # library, we simply skip the analysis portion but still need to return a # valid NogoInfo to reference the generated binary. - if ctx.rule.kind == "go_library": + if ctx.rule.kind in ("go_library", "go_binary", "go_test", "go_tool_library"): srcs = ctx.rule.files.srcs - elif ctx.rule.kind == "go_proto_library" or ctx.rule.kind == "go_wrap_cc": + deps = ctx.rule.attr.deps + elif ctx.rule.kind in ("go_proto_library", "go_wrap_cc"): srcs = [] + deps = ctx.rule.attr.deps else: return [NogoInfo()] - # Construct the Go environment from the go_context.env dictionary. - env_prefix = " ".join(["%s=%s" % (key, value) for (key, value) in go_context(ctx).env.items()]) + # Extract the Go context. + go_ctx = go_context(ctx) + + # If we're using the "library" attribute, then we need to aggregate the + # original library sources and dependencies into this target to perform + # proper type analysis. + if ctx.rule.kind == "go_test": + library = go_test_library(ctx.rule) + if library != None: + info = library[NogoInfo] + if hasattr(info, "srcs"): + srcs = srcs + info.srcs + if hasattr(info, "deps"): + deps = deps + info.deps # Start with all target files and srcs as input. inputs = target.files.to_list() + srcs @@ -39,48 +158,30 @@ def _nogo_aspect_impl(target, ctx): # to cleanly allow us redirect stdout to the actual output file. Perhaps # I'm missing something here, but the intermediate script does work. binaries = target.files.to_list() - disasm_file = ctx.actions.declare_file(target.label.name + ".out") - dumper = ctx.actions.declare_file("%s-dumper" % ctx.label.name) - ctx.actions.write(dumper, "\n".join([ - "#!/bin/bash", - "%s %s tool objdump %s > %s\n" % ( - env_prefix, - go_context(ctx).go.path, - [f.path for f in binaries if f.path.endswith(".a")][0], - disasm_file.path, - ), - ]), is_executable = True) - ctx.actions.run( - inputs = binaries, - outputs = [disasm_file], - tools = go_context(ctx).runfiles, - mnemonic = "GoObjdump", - progress_message = "Objdump %s" % target.label, - executable = dumper, - ) - inputs.append(disasm_file) + objfiles = [f for f in binaries if f.path.endswith(".a")] + if len(objfiles) > 0: + # Prefer the .a files for go_library targets. + target_objfile = objfiles[0] + else: + # Use the raw binary for go_binary and go_test targets. + target_objfile = binaries[0] + inputs.append(target_objfile) # Extract the importpath for this package. - importpath = go_importpath(target) - - # The nogo tool requires a configfile serialized in JSON format to do its - # work. This must line up with the nogo.Config fields. - facts = ctx.actions.declare_file(target.label.name + ".facts") - config = struct( - ImportPath = importpath, - GoFiles = [src.path for src in srcs if src.path.endswith(".go")], - NonGoFiles = [src.path for src in srcs if not src.path.endswith(".go")], - GOOS = go_context(ctx).goos, - GOARCH = go_context(ctx).goarch, - Tags = go_context(ctx).tags, - FactMap = {}, # Constructed below. - ImportMap = {}, # Constructed below. - FactOutput = facts.path, - Objdump = disasm_file.path, - ) + if ctx.rule.kind == "go_test": + # If this is a test, then it will not be imported by anything else. + # We can safely set the importapth to just "test". Note that this + # is necessary if the library also imports the core library (in + # addition to including the sources directly), which happens in + # some complex cases (seccomp_victim). + importpath = "test" + else: + importpath = go_importpath(target) # Collect all info from shadow dependencies. - for dep in ctx.rule.attr.deps: + fact_map = dict() + import_map = dict() + for dep in deps: # There will be no file attribute set for all transitive dependencies # that are not go_library or go_binary rules, such as a proto rules. # This is handled by the ctx.rule.kind check above. @@ -94,45 +195,83 @@ def _nogo_aspect_impl(target, ctx): x_files = [f.path for f in info.binaries if f.path.endswith(".x")] if not len(x_files): x_files = [f.path for f in info.binaries if f.path.endswith(".a")] - config.ImportMap[info.importpath] = x_files[0] - config.FactMap[info.importpath] = info.facts.path + import_map[info.importpath] = x_files[0] + fact_map[info.importpath] = info.facts.path # Ensure the above are available as inputs. inputs.append(info.facts) inputs += info.binaries - # Write the configuration and run the tool. + # Add the standard library facts. + stdlib_facts = ctx.attr._nogo_stdlib[NogoStdlibInfo].facts + inputs.append(stdlib_facts) + + # The nogo tool operates on a configuration serialized in JSON format. + facts = ctx.actions.declare_file(target.label.name + ".facts") + findings = ctx.actions.declare_file(target.label.name + ".findings") + escapes = ctx.actions.declare_file(target.label.name + ".escapes") + config = struct( + ImportPath = importpath, + GoFiles = [src.path for src in srcs if src.path.endswith(".go")], + NonGoFiles = [src.path for src in srcs if not src.path.endswith(".go")], + GOOS = go_ctx.goos, + GOARCH = go_ctx.goarch, + Tags = go_ctx.tags, + FactMap = fact_map, + ImportMap = import_map, + StdlibFacts = stdlib_facts.path, + ) config_file = ctx.actions.declare_file(target.label.name + ".cfg") ctx.actions.write(config_file, config.to_json()) inputs.append(config_file) - - # Run the nogo tool itself. ctx.actions.run( inputs = inputs, - outputs = [facts], - tools = go_context(ctx).runfiles, + outputs = [facts, findings, escapes], + tools = depset(go_ctx.runfiles.to_list() + ctx.files._dump_tool), executable = ctx.files._nogo[0], mnemonic = "GoStaticAnalysis", progress_message = "Analyzing %s" % target.label, - arguments = ["-config=%s" % config_file.path], + arguments = go_ctx.nogo_args + [ + "-binary=%s" % target_objfile.path, + "-dump_tool=%s" % ctx.files._dump_tool[0].path, + "-package=%s" % config_file.path, + "-findings=%s" % findings.path, + "-facts=%s" % facts.path, + "-escapes=%s" % escapes.path, + ], ) # Return the package facts as output. - return [NogoInfo( - facts = facts, - importpath = importpath, - binaries = binaries, - )] + return [ + NogoInfo( + facts = facts, + findings = findings, + importpath = importpath, + binaries = binaries, + srcs = srcs, + deps = deps, + ), + OutputGroupInfo( + # Expose all findings (should just be a single file). This can be + # used for build analysis of the nogo findings. + nogo_findings = depset([findings]), + # Expose all escape analysis findings (see above). + nogo_escapes = depset([escapes]), + ), + ] nogo_aspect = go_rule( aspect, implementation = _nogo_aspect_impl, - attr_aspects = ["deps"], + attr_aspects = [ + "deps", + "library", + "embed", + ], attrs = { - "_nogo": attr.label( - default = "//tools/nogo/check:check", - allow_single_file = True, - ), + "_nogo": attr.label(default = "//tools/nogo/check:check"), + "_nogo_stdlib": attr.label(default = "//tools/nogo:stdlib"), + "_dump_tool": attr.label(default = "//tools/nogo:dump_tool"), }, ) @@ -144,13 +283,26 @@ def _nogo_test_impl(ctx): # this way so that any test applied is effectively pushed down to all # upstream dependencies through the aspect. inputs = [] + findings = [] runner = ctx.actions.declare_file("%s-executer" % ctx.label.name) runner_content = ["#!/bin/bash"] for dep in ctx.attr.deps: + # Extract the findings. info = dep[NogoInfo] - inputs.append(info.facts) + inputs.append(info.findings) + findings.append(info.findings) + + # Include all source files, transitively. This will make this target + # "directly affected" for the purpose of build analysis. + inputs += info.srcs - # Draw a sweet unicode checkmark with the package name (in green). + # If there are findings, dump them and fail. + runner_content.append("if [[ -s \"%s\" ]]; then cat \"%s\" && exit 1; fi" % ( + info.findings.short_path, + info.findings.short_path, + )) + + # Otherwise, draw a sweet unicode checkmark with the package name (in green). runner_content.append("echo -e \"\\033[0;32m\\xE2\\x9C\\x94\\033[0;31m\\033[0m %s\"" % info.importpath) runner_content.append("exit 0\n") ctx.actions.write(runner, "\n".join(runner_content), is_executable = True) @@ -167,6 +319,10 @@ _nogo_test = rule( test = True, ) -def nogo_test(**kwargs): +def nogo_test(name, **kwargs): tags = kwargs.pop("tags", []) + ["nogo"] - _nogo_test(tags = tags, **kwargs) + _nogo_test( + name = name, + tags = tags, + **kwargs + ) diff --git a/tools/nogo/matchers.go b/tools/nogo/matchers.go index 57a250501..5c39be630 100644 --- a/tools/nogo/matchers.go +++ b/tools/nogo/matchers.go @@ -16,7 +16,6 @@ package nogo import ( "go/token" - "path/filepath" "regexp" "strings" @@ -44,11 +43,30 @@ type pathRegexps struct { func buildRegexps(prefix string, args ...string) []*regexp.Regexp { result := make([]*regexp.Regexp, 0, len(args)) for _, arg := range args { - result = append(result, regexp.MustCompile(filepath.Join(prefix, arg))) + result = append(result, regexp.MustCompile(prefix+arg)) } return result } +// notPath works around the lack of backtracking. +// +// It is used to construct a regular expression for non-matching components. +func notPath(name string) string { + sb := strings.Builder{} + sb.WriteString("(") + for i := range name { + if i > 0 { + sb.WriteString("|") + } + sb.WriteString(name[:i]) + sb.WriteString("[^") + sb.WriteByte(name[i]) + sb.WriteString("/][^/]*") + } + sb.WriteString(")") + return sb.String() +} + // ShouldReport implements matcher.ShouldReport. func (p *pathRegexps) ShouldReport(d analysis.Diagnostic, fs *token.FileSet) bool { fullPos := fs.Position(d.Pos).String() @@ -79,7 +97,7 @@ func externalExcluded(paths ...string) *pathRegexps { // internalMatches returns a path matcher for internal packages. func internalMatches() *pathRegexps { return &pathRegexps{ - expr: buildRegexps(internalPrefix, ".*"), + expr: buildRegexps(internalPrefix, internalDefault), include: true, } } diff --git a/tools/nogo/nogo.go b/tools/nogo/nogo.go index 203cdf688..120fdcff5 100644 --- a/tools/nogo/nogo.go +++ b/tools/nogo/nogo.go @@ -20,6 +20,7 @@ package nogo import ( "encoding/json" + "errors" "flag" "fmt" "go/ast" @@ -31,50 +32,89 @@ import ( "io/ioutil" "log" "os" + "path" "path/filepath" "reflect" + "strings" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/internal/facts" "golang.org/x/tools/go/gcexportdata" - "gvisor.dev/gvisor/tools/nogo/data" + + // Special case: flags live here and change overall behavior. + "gvisor.dev/gvisor/tools/checkescape" ) -// pkgConfig is serialized as the configuration. +// stdlibConfig is serialized as the configuration. // -// This contains everything required for the analysis. -type pkgConfig struct { - ImportPath string - GoFiles []string - NonGoFiles []string - Tags []string - GOOS string - GOARCH string - ImportMap map[string]string - FactMap map[string]string - FactOutput string - Objdump string +// This contains everything required for stdlib analysis. +type stdlibConfig struct { + Srcs []string + GOOS string + GOARCH string + Tags []string } -// loadFacts finds and loads facts per FactMap. -func (c *pkgConfig) loadFacts(path string) ([]byte, error) { - realPath, ok := c.FactMap[path] - if !ok { - return nil, nil // No facts available. - } +// packageConfig is serialized as the configuration. +// +// This contains everything required for single package analysis. +type packageConfig struct { + ImportPath string + GoFiles []string + NonGoFiles []string + Tags []string + GOOS string + GOARCH string + ImportMap map[string]string + FactMap map[string]string + StdlibFacts string +} - // Read the files file. - data, err := ioutil.ReadFile(realPath) - if err != nil { - return nil, err +// loader is a fact-loader function. +type loader func(string) ([]byte, error) + +// saver is a fact-saver function. +type saver func([]byte) error + +// factLoader returns a function that loads facts. +// +// This resolves all standard library facts and imported package facts up +// front. The returned loader function will never return an error, only +// empty facts. +// +// This is done because all stdlib data is stored together, and we don't want +// to load this data many times over. +func (c *packageConfig) factLoader() (loader, error) { + allFacts := make(map[string][]byte) + if c.StdlibFacts != "" { + data, err := ioutil.ReadFile(c.StdlibFacts) + if err != nil { + return nil, fmt.Errorf("error loading stdlib facts from %q: %w", c.StdlibFacts, err) + } + var stdlibFacts map[string][]byte + if err := json.Unmarshal(data, &stdlibFacts); err != nil { + return nil, fmt.Errorf("error loading stdlib facts: %w", err) + } + for pkg, data := range stdlibFacts { + allFacts[pkg] = data + } + } + for pkg, file := range c.FactMap { + data, err := ioutil.ReadFile(file) + if err != nil { + return nil, fmt.Errorf("error loading %q: %w", file, err) + } + allFacts[pkg] = data } - return data, nil + return func(path string) ([]byte, error) { + return allFacts[path], nil + }, nil } // shouldInclude indicates whether the file should be included. // // NOTE: This does only basic parsing of tags. -func (c *pkgConfig) shouldInclude(path string) (bool, error) { +func (c *packageConfig) shouldInclude(path string) (bool, error) { ctx := build.Default ctx.GOOS = c.GOOS ctx.GOARCH = c.GOARCH @@ -88,9 +128,11 @@ func (c *pkgConfig) shouldInclude(path string) (bool, error) { // files, and the facts. Note that this importer implementation will always // pass when a given package is not available. type importer struct { - pkgConfig - fset *token.FileSet - cache map[string]*types.Package + *packageConfig + fset *token.FileSet + cache map[string]*types.Package + lastErr error + callback func(string) error } // Import implements types.Importer.Import. @@ -101,6 +143,17 @@ func (i *importer) Import(path string) (*types.Package, error) { // analyzers are specifically looking for this. return types.Unsafe, nil } + + // Call the internal callback. This is used to resolve loading order + // for the standard library. See checkStdlib. + if i.callback != nil { + if err := i.callback(path); err != nil { + i.lastErr = err + return nil, err + } + } + + // Actually load the data. realPath, ok := i.ImportMap[path] var ( rc io.ReadCloser @@ -109,12 +162,13 @@ func (i *importer) Import(path string) (*types.Package, error) { if !ok { // Not found in the import path. Attempt to find the package // via the standard library. - rc, err = findStdPkg(path, i.GOOS, i.GOARCH) + rc, err = findStdPkg(i.GOOS, i.GOARCH, path) } else { // Open the file. rc, err = os.Open(realPath) } if err != nil { + i.lastErr = err return nil, err } defer rc.Close() @@ -128,6 +182,154 @@ func (i *importer) Import(path string) (*types.Package, error) { return gcexportdata.Read(r, i.fset, i.cache, path) } +// ErrSkip indicates the package should be skipped. +var ErrSkip = errors.New("skipped") + +// checkStdlib checks the standard library. +// +// This constructs a synthetic package configuration for each library in the +// standard library sources, and call checkPackage repeatedly. +// +// Note that not all parts of the source are expected to build. We skip obvious +// test files, and cmd files, which should not be dependencies. +func checkStdlib(config *stdlibConfig, ac map[*analysis.Analyzer]matcher) ([]string, []byte, error) { + if len(config.Srcs) == 0 { + return nil, nil, nil + } + + // Ensure all paths are normalized. + for i := 0; i < len(config.Srcs); i++ { + config.Srcs[i] = path.Clean(config.Srcs[i]) + } + + // Calculate the root source directory. This is always a directory + // named 'src', of which we simply take the first we find. This is a + // bit fragile, but works for all currently known Go source + // configurations. + // + // Note that there may be extra files outside of the root source + // directory; we simply ignore those. + rootSrcPrefix := "" + for _, file := range config.Srcs { + const src = "/src/" + i := strings.Index(file, src) + if i == -1 { + // Superfluous file. + continue + } + + // Index of first character after /src/. + i += len(src) + rootSrcPrefix = file[:i] + break + } + + // Aggregate all files by directory. + packages := make(map[string]*packageConfig) + for _, file := range config.Srcs { + if !strings.HasPrefix(file, rootSrcPrefix) { + // Superflouous file. + continue + } + + d := path.Dir(file) + if len(rootSrcPrefix) >= len(d) { + continue // Not a file. + } + pkg := d[len(rootSrcPrefix):] + // Skip cmd packages and obvious test files: see above. + if strings.HasPrefix(pkg, "cmd/") || strings.HasSuffix(file, "_test.go") { + continue + } + c, ok := packages[pkg] + if !ok { + c = &packageConfig{ + ImportPath: pkg, + GOOS: config.GOOS, + GOARCH: config.GOARCH, + Tags: config.Tags, + } + packages[pkg] = c + } + // Add the files appropriately. Note that they will be further + // filtered by architecture and build tags below, so this need + // not be done immediately. + if strings.HasSuffix(file, ".go") { + c.GoFiles = append(c.GoFiles, file) + } else { + c.NonGoFiles = append(c.NonGoFiles, file) + } + } + + // Closure to check a single package. + allFindings := make([]string, 0) + stdlibFacts := make(map[string][]byte) + var checkOne func(pkg string) error // Recursive. + checkOne = func(pkg string) error { + // Is this already done? + if _, ok := stdlibFacts[pkg]; ok { + return nil + } + + // Lookup the configuration. + config, ok := packages[pkg] + if !ok { + return nil // Not known. + } + + // Find the binary package, and provide to objdump. + rc, err := findStdPkg(config.GOOS, config.GOARCH, pkg) + if err != nil { + // If there's no binary for this package, it is likely + // not built with the distribution. That's fine, we can + // just skip analysis. + return nil + } + + // Provide the input. + oldReader := checkescape.Reader + checkescape.Reader = rc // For analysis. + defer func() { + rc.Close() + checkescape.Reader = oldReader // Restore. + }() + + // Run the analysis. + findings, factData, err := checkPackage(config, ac, checkOne) + if err != nil { + // If we can't analyze a package from the standard library, + // then we skip it. It will simply not have any findings. + return nil + } + stdlibFacts[pkg] = factData + allFindings = append(allFindings, findings...) + return nil + } + + // Check all packages. + // + // Note that this may call checkOne recursively, so it's not guaranteed + // to evaluate in the order provided here. We do ensure however, that + // all packages are evaluated. + for pkg := range packages { + checkOne(pkg) + } + + // Sanity check. + if len(stdlibFacts) == 0 { + return nil, nil, fmt.Errorf("no stdlib facts found: misconfiguration?") + } + + // Write out all findings. + factData, err := json.Marshal(stdlibFacts) + if err != nil { + return nil, nil, fmt.Errorf("error saving stdlib facts: %w", err) + } + + // Return all findings. + return allFindings, factData, nil +} + // checkPackage runs all analyzers. // // The implementation was adapted from [1], which was in turn adpated from [2]. @@ -136,11 +338,12 @@ func (i *importer) Import(path string) (*types.Package, error) { // // [1] bazelbuid/rules_go/tools/builders/nogo_main.go // [2] golang.org/x/tools/go/checker/internal/checker -func checkPackage(config pkgConfig) ([]string, error) { +func checkPackage(config *packageConfig, ac map[*analysis.Analyzer]matcher, importCallback func(string) error) ([]string, []byte, error) { imp := &importer{ - pkgConfig: config, - fset: token.NewFileSet(), - cache: make(map[string]*types.Package), + packageConfig: config, + fset: token.NewFileSet(), + cache: make(map[string]*types.Package), + callback: importCallback, } // Load all source files. @@ -148,14 +351,14 @@ func checkPackage(config pkgConfig) ([]string, error) { for _, file := range config.GoFiles { include, err := config.shouldInclude(file) if err != nil { - return nil, fmt.Errorf("error evaluating file %q: %v", file, err) + return nil, nil, fmt.Errorf("error evaluating file %q: %v", file, err) } if !include { continue } s, err := parser.ParseFile(imp.fset, file, nil, parser.ParseComments) if err != nil { - return nil, fmt.Errorf("error parsing file %q: %v", file, err) + return nil, nil, fmt.Errorf("error parsing file %q: %v", file, err) } syntax = append(syntax, s) } @@ -172,18 +375,19 @@ func checkPackage(config pkgConfig) ([]string, error) { Selections: make(map[*ast.SelectorExpr]*types.Selection), } types, err := typeConfig.Check(config.ImportPath, imp.fset, syntax, typesInfo) - if err != nil { - return nil, fmt.Errorf("error checking types: %v", err) + if err != nil && imp.lastErr != ErrSkip { + return nil, nil, fmt.Errorf("error checking types: %w", err) } // Load all package facts. - facts, err := facts.Decode(types, config.loadFacts) + loader, err := config.factLoader() if err != nil { - return nil, fmt.Errorf("error decoding facts: %v", err) + return nil, nil, fmt.Errorf("error loading facts: %w", err) + } + facts, err := facts.Decode(types, loader) + if err != nil { + return nil, nil, fmt.Errorf("error decoding facts: %w", err) } - - // Set the binary global for use. - data.Objdump = config.Objdump // Register fact types and establish dependencies between analyzers. // The visit closure will execute recursively, and populate results @@ -204,7 +408,7 @@ func checkPackage(config pkgConfig) ([]string, error) { } // Prepare the matcher. - m := analyzerConfig[a] + m := ac[a] report := func(d analysis.Diagnostic) { if m.ShouldReport(d, imp.fset) { diagnostics[a] = append(diagnostics[a], d) @@ -245,18 +449,13 @@ func checkPackage(config pkgConfig) ([]string, error) { return nil // Success. } - // Visit all analysis recursively. - for a, _ := range analyzerConfig { - if err := visit(a); err != nil { - return nil, err // Already has context. + // Visit all analyzers recursively. + for a, _ := range ac { + if imp.lastErr == ErrSkip { + continue // No local analysis. } - } - - // Write the output file. - if config.FactOutput != "" { - factData := facts.Encode() - if err := ioutil.WriteFile(config.FactOutput, factData, 0644); err != nil { - return nil, fmt.Errorf("error: unable to open facts output %q: %v", config.FactOutput, err) + if err := visit(a); err != nil { + return nil, nil, err // Already has context. } } @@ -270,47 +469,104 @@ func checkPackage(config pkgConfig) ([]string, error) { } // Return all findings. - return findings, nil + factData := facts.Encode() + return findings, factData, nil } var ( - configFile = flag.String("config", "", "configuration file (in JSON format)") + packageFile = flag.String("package", "", "package configuration file (in JSON format)") + stdlibFile = flag.String("stdlib", "", "stdlib configuration file (in JSON format)") + findingsOutput = flag.String("findings", "", "output file (or stdout, if not specified)") + factsOutput = flag.String("facts", "", "output file for facts (optional)") + escapesOutput = flag.String("escapes", "", "output file for escapes (optional)") ) -// Main is the entrypoint; it should be called directly from main. -// -// N.B. This package registers it's own flags. -func Main() { - // Parse all flags. - flag.Parse() - +func loadConfig(file string, config interface{}) interface{} { // Load the configuration. - f, err := os.Open(*configFile) + f, err := os.Open(file) if err != nil { - log.Fatalf("unable to open configuration %q: %v", *configFile, err) + log.Fatalf("unable to open configuration %q: %v", file, err) } defer f.Close() - config := new(pkgConfig) dec := json.NewDecoder(f) dec.DisallowUnknownFields() if err := dec.Decode(config); err != nil { log.Fatalf("unable to decode configuration: %v", err) } + return config +} + +// Main is the entrypoint; it should be called directly from main. +// +// N.B. This package registers it's own flags. +func Main() { + // Parse all flags. + flag.Parse() + + var ( + findings []string + factData []byte + err error + ) + + // Check the configuration. + if *packageFile != "" && *stdlibFile != "" { + log.Fatalf("unable to perform stdlib and package analysis; provide only one!") + } else if *stdlibFile != "" { + // Perform basic analysis. + c := loadConfig(*stdlibFile, new(stdlibConfig)).(*stdlibConfig) + findings, factData, err = checkStdlib(c, analyzerConfig) + } else if *packageFile != "" { + // Perform basic analysis. + c := loadConfig(*packageFile, new(packageConfig)).(*packageConfig) + findings, factData, err = checkPackage(c, analyzerConfig, nil) + // Do we need to do escape analysis? + if *escapesOutput != "" { + escapes, _, err := checkPackage(c, escapesConfig, nil) + if err != nil { + log.Fatalf("error performing escape analysis: %v", err) + } + f, err := os.OpenFile(*escapesOutput, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644) + if err != nil { + log.Fatalf("unable to open output %q: %v", *escapesOutput, err) + } + defer f.Close() + for _, escape := range escapes { + fmt.Fprintf(f, "%s\n", escape) + } + } + } else { + log.Fatalf("please provide at least one of package or stdlib!") + } + + // Save facts. + if *factsOutput != "" { + if err := ioutil.WriteFile(*factsOutput, factData, 0644); err != nil { + log.Fatalf("error saving findings to %q: %v", *factsOutput, err) + } + } - // Process the package. - findings, err := checkPackage(*config) + // Open the output file. + var w io.Writer = os.Stdout + if *findingsOutput != "" { + f, err := os.OpenFile(*findingsOutput, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644) + if err != nil { + log.Fatalf("unable to open output %q: %v", *findingsOutput, err) + } + defer f.Close() + w = f + } + + // Handle findings & errors. if err != nil { log.Fatalf("error checking package: %v", err) } - - // No findings? if len(findings) == 0 { - os.Exit(0) + return } - // Print findings and exit with non-zero code. + // Print findings. for _, finding := range findings { - fmt.Fprintf(os.Stdout, "%s\n", finding) + fmt.Fprintf(w, "%s\n", finding) } - os.Exit(1) } diff --git a/tools/nogo/register.go b/tools/nogo/register.go index 62b499661..34b173937 100644 --- a/tools/nogo/register.go +++ b/tools/nogo/register.go @@ -26,6 +26,9 @@ func analyzers() (all []*analysis.Analyzer) { for a, _ := range analyzerConfig { all = append(all, a) } + for a, _ := range escapesConfig { + all = append(all, a) + } return all } diff --git a/tools/nogo/util/BUILD b/tools/nogo/util/BUILD new file mode 100644 index 000000000..7ab340b51 --- /dev/null +++ b/tools/nogo/util/BUILD @@ -0,0 +1,9 @@ +load("//tools:defs.bzl", "go_library") + +package(licenses = ["notice"]) + +go_library( + name = "util", + srcs = ["util.go"], + visibility = ["//visibility:public"], +) diff --git a/tools/nogo/util/util.go b/tools/nogo/util/util.go new file mode 100644 index 000000000..919fec799 --- /dev/null +++ b/tools/nogo/util/util.go @@ -0,0 +1,85 @@ +// Copyright 2019 The gVisor Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package util contains nogo-related utilities. +package util + +import ( + "fmt" + "io/ioutil" + "regexp" + "strconv" + "strings" +) + +// findingRegexp is used to parse findings. +var findingRegexp = regexp.MustCompile(`([a-zA-Z0-9_\/\.-]+): (-|([a-zA-Z0-9_\/\.-]+):([0-9]+)(:([0-9]+))?): (.*)`) + +const ( + categoryIndex = 1 + fullPathAndLineIndex = 2 + fullPathIndex = 3 + lineIndex = 4 + messageIndex = 7 +) + +// Finding is a single finding. +type Finding struct { + Category string + Path string + Line int + Message string +} + +// ExtractFindingsFromFile loads findings from a file. +func ExtractFindingsFromFile(filename string) ([]Finding, error) { + content, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + return ExtractFindingsFromBytes(content) +} + +// ExtractFindingsFromBytes loads findings from bytes. +func ExtractFindingsFromBytes(content []byte) (findings []Finding, err error) { + lines := strings.Split(string(content), "\n") + for _, singleLine := range lines { + // Skip blank lines. + singleLine = strings.TrimSpace(singleLine) + if singleLine == "" { + continue + } + m := findingRegexp.FindStringSubmatch(singleLine) + if m == nil { + // We shouldn't see findings like this. + return findings, fmt.Errorf("poorly formated line: %v", singleLine) + } + if m[fullPathAndLineIndex] == "-" { + continue // No source file available. + } + // Cleanup the message. + message := m[messageIndex] + message = strings.Replace(message, " → ", "\n → ", -1) + message = strings.Replace(message, " or ", "\n or ", -1) + // Construct a new annotation. + lineNumber, _ := strconv.ParseUint(m[lineIndex], 10, 32) + findings = append(findings, Finding{ + Category: m[categoryIndex], + Path: m[fullPathIndex], + Line: int(lineNumber), + Message: message, + }) + } + return findings, nil +} |