summaryrefslogtreecommitdiffhomepage
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/BUILD11
-rw-r--r--tools/bazel.mk205
-rw-r--r--tools/bazeldefs/BUILD69
-rw-r--r--tools/bazeldefs/cc.bzl43
-rw-r--r--tools/bazeldefs/defs.bzl40
-rw-r--r--tools/bazeldefs/go.bzl142
-rw-r--r--tools/bazeldefs/pkg.bzl6
-rw-r--r--tools/bazeldefs/platforms.bzl9
-rw-r--r--tools/bazeldefs/tags.bzl56
-rw-r--r--tools/bigquery/BUILD16
-rw-r--r--tools/bigquery/bigquery.go144
-rw-r--r--tools/checkescape/BUILD15
-rw-r--r--tools/checkescape/checkescape.go918
-rw-r--r--tools/checkescape/test1/BUILD9
-rw-r--r--tools/checkescape/test1/test1.go181
-rw-r--r--tools/checkescape/test2/BUILD9
-rw-r--r--tools/checkescape/test2/test2.go89
-rw-r--r--tools/checkunsafe/BUILD13
-rw-r--r--tools/checkunsafe/check_unsafe.go56
-rw-r--r--tools/defs.bzl316
-rw-r--r--tools/github/BUILD15
-rw-r--r--tools/github/main.go186
-rw-r--r--tools/github/nogo/BUILD16
-rw-r--r--tools/github/nogo/nogo.go132
-rw-r--r--tools/github/reviver/BUILD27
-rw-r--r--tools/github/reviver/github.go173
-rw-r--r--tools/github/reviver/github_test.go55
-rw-r--r--tools/github/reviver/reviver.go192
-rw-r--r--tools/github/reviver/reviver_test.go97
-rwxr-xr-xtools/go_branch.sh132
-rw-r--r--tools/go_generics/BUILD20
-rw-r--r--tools/go_generics/defs.bzl127
-rw-r--r--tools/go_generics/generics.go286
-rw-r--r--tools/go_generics/globals/BUILD13
-rw-r--r--tools/go_generics/globals/globals_visitor.go597
-rw-r--r--tools/go_generics/globals/scope.go84
-rw-r--r--tools/go_generics/go_merge/BUILD9
-rw-r--r--tools/go_generics/go_merge/main.go141
-rw-r--r--tools/go_generics/imports.go158
-rw-r--r--tools/go_generics/remove.go105
-rw-r--r--tools/go_generics/rules_tests/BUILD43
-rw-r--r--tools/go_generics/rules_tests/template.go42
-rw-r--r--tools/go_generics/rules_tests/template_test.go48
-rw-r--r--tools/go_generics/tests/BUILD7
-rw-r--r--tools/go_generics/tests/all_stmts/BUILD16
-rw-r--r--tools/go_generics/tests/all_stmts/input.go290
-rw-r--r--tools/go_generics/tests/all_stmts/output.go288
-rw-r--r--tools/go_generics/tests/all_types/BUILD16
-rw-r--r--tools/go_generics/tests/all_types/input.go45
-rw-r--r--tools/go_generics/tests/all_types/lib/lib.go17
-rw-r--r--tools/go_generics/tests/all_types/output.go43
-rw-r--r--tools/go_generics/tests/anon/BUILD18
-rw-r--r--tools/go_generics/tests/anon/input.go46
-rw-r--r--tools/go_generics/tests/anon/output.go42
-rw-r--r--tools/go_generics/tests/consts/BUILD23
-rw-r--r--tools/go_generics/tests/consts/input.go26
-rw-r--r--tools/go_generics/tests/consts/output.go26
-rw-r--r--tools/go_generics/tests/defs.bzl67
-rw-r--r--tools/go_generics/tests/imports/BUILD24
-rw-r--r--tools/go_generics/tests/imports/input.go24
-rw-r--r--tools/go_generics/tests/imports/output.go27
-rw-r--r--tools/go_generics/tests/remove_typedef/BUILD16
-rw-r--r--tools/go_generics/tests/remove_typedef/input.go37
-rw-r--r--tools/go_generics/tests/remove_typedef/output.go29
-rw-r--r--tools/go_generics/tests/simple/BUILD17
-rw-r--r--tools/go_generics/tests/simple/input.go45
-rw-r--r--tools/go_generics/tests/simple/output.go43
-rw-r--r--tools/go_marshal/BUILD25
-rw-r--r--tools/go_marshal/README.md130
-rw-r--r--tools/go_marshal/analysis/BUILD12
-rw-r--r--tools/go_marshal/analysis/analysis_unsafe.go179
-rw-r--r--tools/go_marshal/defs.bzl68
-rw-r--r--tools/go_marshal/gomarshal/BUILD21
-rw-r--r--tools/go_marshal/gomarshal/generator.go526
-rw-r--r--tools/go_marshal/gomarshal/generator_interfaces.go276
-rw-r--r--tools/go_marshal/gomarshal/generator_interfaces_array_newtype.go146
-rw-r--r--tools/go_marshal/gomarshal/generator_interfaces_primitive_newtype.go289
-rw-r--r--tools/go_marshal/gomarshal/generator_interfaces_struct.go619
-rw-r--r--tools/go_marshal/gomarshal/generator_tests.go233
-rw-r--r--tools/go_marshal/gomarshal/util.go503
-rw-r--r--tools/go_marshal/main.go73
-rw-r--r--tools/go_marshal/test/BUILD44
-rw-r--r--tools/go_marshal/test/benchmark_test.go220
-rw-r--r--tools/go_marshal/test/escape/BUILD14
-rw-r--r--tools/go_marshal/test/escape/escape.go99
-rw-r--r--tools/go_marshal/test/external/BUILD11
-rw-r--r--tools/go_marshal/test/external/external.go31
-rw-r--r--tools/go_marshal/test/marshal_test.go515
-rw-r--r--tools/go_marshal/test/test.go200
-rw-r--r--tools/go_stateify/BUILD16
-rw-r--r--tools/go_stateify/defs.bzl60
-rw-r--r--tools/go_stateify/main.go470
-rw-r--r--tools/installers/BUILD41
-rwxr-xr-xtools/installers/containerd.sh114
-rwxr-xr-xtools/installers/head.sh27
-rwxr-xr-xtools/installers/images.sh24
-rwxr-xr-xtools/installers/master.sh34
-rwxr-xr-xtools/installers/shim.sh33
-rwxr-xr-xtools/make_apt.sh144
-rwxr-xr-xtools/make_release.sh81
-rw-r--r--tools/nogo/BUILD74
-rw-r--r--tools/nogo/README.md31
-rw-r--r--tools/nogo/analyzers.go131
-rw-r--r--tools/nogo/build.go30
-rw-r--r--tools/nogo/check/BUILD11
-rw-r--r--tools/nogo/check/main.go114
-rw-r--r--tools/nogo/config.go261
-rw-r--r--tools/nogo/defs.bzl426
-rw-r--r--tools/nogo/filter/BUILD14
-rw-r--r--tools/nogo/filter/main.go131
-rw-r--r--tools/nogo/findings.go63
-rw-r--r--tools/nogo/nogo.go473
-rw-r--r--tools/parsers/BUILD41
-rw-r--r--tools/parsers/go_parser.go150
-rw-r--r--tools/parsers/go_parser_test.go169
-rw-r--r--tools/parsers/parser_main.go129
-rw-r--r--tools/rules_go.patch14
-rwxr-xr-xtools/tag_release.sh84
-rw-r--r--tools/tags/BUILD11
-rw-r--r--tools/tags/tags.go89
-rw-r--r--tools/vm/BUILD63
-rw-r--r--tools/vm/README.md48
-rwxr-xr-xtools/vm/build.sh117
-rw-r--r--tools/vm/defs.bzl202
-rwxr-xr-xtools/vm/execute.sh160
-rw-r--r--tools/vm/test.cc27
-rwxr-xr-xtools/vm/ubuntu1604/10_core.sh43
-rwxr-xr-xtools/vm/ubuntu1604/15_gcloud.sh50
-rwxr-xr-xtools/vm/ubuntu1604/20_bazel.sh38
-rwxr-xr-xtools/vm/ubuntu1604/30_docker.sh64
-rwxr-xr-xtools/vm/ubuntu1604/40_kokoro.sh72
-rw-r--r--tools/vm/ubuntu1604/BUILD7
-rw-r--r--tools/vm/ubuntu1804/BUILD7
-rwxr-xr-xtools/vm/zone.sh17
-rwxr-xr-xtools/workspace_status.sh18
135 files changed, 0 insertions, 14934 deletions
diff --git a/tools/BUILD b/tools/BUILD
deleted file mode 100644
index faf310676..000000000
--- a/tools/BUILD
+++ /dev/null
@@ -1,11 +0,0 @@
-load("//tools:defs.bzl", "bzl_library")
-
-package(licenses = ["notice"])
-
-bzl_library(
- name = "defs_bzl",
- srcs = ["defs.bzl"],
- visibility = [
- "//:sandbox",
- ],
-)
diff --git a/tools/bazel.mk b/tools/bazel.mk
deleted file mode 100644
index 3a7de427f..000000000
--- a/tools/bazel.mk
+++ /dev/null
@@ -1,205 +0,0 @@
-#!/usr/bin/make -f
-
-# Copyright 2018 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Make hacks.
-EMPTY :=
-SPACE := $(EMPTY) $(EMPTY)
-
-# See base Makefile.
-SHELL=/bin/bash -o pipefail
-BRANCH_NAME := $(shell (git branch --show-current 2>/dev/null || \
- git rev-parse --abbrev-ref HEAD 2>/dev/null) | \
- xargs -n 1 basename 2>/dev/null)
-BUILD_ROOTS := bazel-bin/ bazel-out/
-
-# Bazel container configuration (see below).
-USER := $(shell whoami)
-HASH := $(shell readlink -m $(CURDIR) | md5sum | cut -c1-8)
-BUILDER_BASE := gvisor.dev/images/default
-BUILDER_IMAGE := gvisor.dev/images/builder
-BUILDER_NAME := gvisor-builder-$(HASH)
-DOCKER_NAME := gvisor-bazel-$(HASH)
-DOCKER_PRIVILEGED := --privileged
-BAZEL_CACHE := $(shell readlink -m ~/.cache/bazel/)
-GCLOUD_CONFIG := $(shell readlink -m ~/.config/gcloud/)
-DOCKER_SOCKET := /var/run/docker.sock
-DOCKER_CONFIG := /etc/docker/daemon.json
-
-# Bazel flags.
-BAZEL := bazel $(STARTUP_OPTIONS)
-OPTIONS += --color=no --curses=no
-
-# Basic options.
-UID := $(shell id -u ${USER})
-GID := $(shell id -g ${USER})
-USERADD_OPTIONS :=
-FULL_DOCKER_RUN_OPTIONS := $(DOCKER_RUN_OPTIONS)
-FULL_DOCKER_RUN_OPTIONS += --user $(UID):$(GID)
-FULL_DOCKER_RUN_OPTIONS += --entrypoint ""
-FULL_DOCKER_RUN_OPTIONS += --init
-FULL_DOCKER_RUN_OPTIONS += -v "$(BAZEL_CACHE):$(BAZEL_CACHE)"
-FULL_DOCKER_RUN_OPTIONS += -v "$(GCLOUD_CONFIG):$(GCLOUD_CONFIG)"
-FULL_DOCKER_RUN_OPTIONS += -v "/tmp:/tmp"
-FULL_DOCKER_EXEC_OPTIONS := --user $(UID):$(GID)
-FULL_DOCKER_EXEC_OPTIONS += --interactive
-ifeq (true,$(shell [[ -t 0 ]] && echo true))
-FULL_DOCKER_EXEC_OPTIONS += --tty
-endif
-
-# Add basic UID/GID options.
-#
-# Note that USERADD_DOCKER and GROUPADD_DOCKER are both defined as "deferred"
-# variables in Make terminology, that is they will be expanded at time of use
-# and may include other variables, including those defined below.
-#
-# NOTE: we pass -l to useradd below because otherwise you can hit a bug
-# best described here:
-# https://github.com/moby/moby/issues/5419#issuecomment-193876183
-# TLDR; trying to add to /var/log/lastlog (sparse file) runs the machine out
-# out of disk space.
-ifneq ($(UID),0)
-USERADD_DOCKER += useradd -l --uid $(UID) --non-unique --no-create-home \
- --gid $(GID) $(USERADD_OPTIONS) -d $(HOME) $(USER) &&
-endif
-ifneq ($(GID),0)
-GROUPADD_DOCKER += groupadd --gid $(GID) --non-unique $(USER) &&
-endif
-
-# Add docker passthrough options.
-ifneq ($(DOCKER_PRIVILEGED),)
-FULL_DOCKER_RUN_OPTIONS += -v "$(DOCKER_SOCKET):$(DOCKER_SOCKET)"
-# TODO(gvisor.dev/issue/1624): Remove docker config volume. This is required
-# temporarily for checking VFS1 vs VFS2 by some tests.
-FULL_DOCKER_RUN_OPTIONS += -v "$(DOCKER_CONFIG):$(DOCKER_CONFIG)"
-FULL_DOCKER_RUN_OPTIONS += $(DOCKER_PRIVILEGED)
-FULL_DOCKER_EXEC_OPTIONS += $(DOCKER_PRIVILEGED)
-DOCKER_GROUP := $(shell stat -c '%g' $(DOCKER_SOCKET))
-ifneq ($(GID),$(DOCKER_GROUP))
-USERADD_OPTIONS += --groups $(DOCKER_GROUP)
-GROUPADD_DOCKER += groupadd --gid $(DOCKER_GROUP) --non-unique docker-$(HASH) &&
-FULL_DOCKER_RUN_OPTIONS += --group-add $(DOCKER_GROUP)
-endif
-endif
-
-# Add KVM passthrough options.
-ifneq (,$(wildcard /dev/kvm))
-FULL_DOCKER_RUN_OPTIONS += --device=/dev/kvm
-KVM_GROUP := $(shell stat -c '%g' /dev/kvm)
-ifneq ($(GID),$(KVM_GROUP))
-USERADD_OPTIONS += --groups $(KVM_GROUP)
-GROUPADD_DOCKER += groupadd --gid $(KVM_GROUP) --non-unique kvm-$(HASH) &&
-FULL_DOCKER_RUN_OPTIONS += --group-add $(KVM_GROUP)
-endif
-endif
-
-# Load the appropriate config.
-ifneq (,$(BAZEL_CONFIG))
-OPTIONS += --config=$(BAZEL_CONFIG)
-endif
-
-bazel-image: load-default
- @if docker ps --all | grep $(BUILDER_NAME); then docker rm -f $(BUILDER_NAME); fi
- docker run --user 0:0 --entrypoint "" --name $(BUILDER_NAME) \
- $(BUILDER_BASE) \
- sh -c "$(GROUPADD_DOCKER) \
- $(USERADD_DOCKER) \
- if [[ -e /dev/kvm ]]; then chmod a+rw /dev/kvm; fi"
- docker commit $(BUILDER_NAME) $(BUILDER_IMAGE)
- @docker rm -f $(BUILDER_NAME)
-.PHONY: bazel-image
-
-##
-## Bazel helpers.
-##
-## This file supports targets that wrap bazel in a running Docker
-## container to simplify development. Some options are available to
-## control the behavior of this container:
-## USER - The in-container user.
-## DOCKER_RUN_OPTIONS - Options for the container (default: --privileged, required for tests).
-## DOCKER_NAME - The container name (default: gvisor-bazel-HASH).
-## BAZEL_CACHE - The bazel cache directory (default: detected).
-## GCLOUD_CONFIG - The gcloud config directory (detect: detected).
-## DOCKER_SOCKET - The Docker socket (default: detected).
-##
-bazel-server-start: bazel-image ## Starts the bazel server.
- @mkdir -p $(BAZEL_CACHE)
- @mkdir -p $(GCLOUD_CONFIG)
- @if docker ps --all | grep $(DOCKER_NAME); then docker rm -f $(DOCKER_NAME); fi
- # This command runs a bazel server, and the container sticks around
- # until the bazel server exits. This should ensure that it does not
- # exit in the middle of running a build, but also it won't stick around
- # forever. The build commands wrap around an appropriate exec into the
- # container in order to perform work via the bazel client.
- docker run -d --rm --name $(DOCKER_NAME) \
- -v "$(CURDIR):$(CURDIR)" \
- --workdir "$(CURDIR)" \
- $(FULL_DOCKER_RUN_OPTIONS) \
- $(BUILDER_IMAGE) \
- sh -c "tail -f --pid=\$$($(BAZEL) info server_pid) /dev/null"
-.PHONY: bazel-server-start
-
-bazel-shutdown: ## Shuts down a running bazel server.
- @docker exec $(FULL_DOCKER_EXEC_OPTIONS) $(DOCKER_NAME) $(BAZEL) shutdown; \
- rc=$$?; docker kill $(DOCKER_NAME) || [[ $$rc -ne 0 ]]
-.PHONY: bazel-shutdown
-
-bazel-alias: ## Emits an alias that can be used within the shell.
- @echo "alias bazel='docker exec $(FULL_DOCKER_EXEC_OPTIONS) $(DOCKER_NAME) bazel'"
-.PHONY: bazel-alias
-
-bazel-server: ## Ensures that the server exists. Used as an internal target.
- @docker exec $(FULL_DOCKER_EXEC_OPTIONS) $(DOCKER_NAME) true || $(MAKE) bazel-server-start
-.PHONY: bazel-server
-
-build_cmd = docker exec $(FULL_DOCKER_EXEC_OPTIONS) $(DOCKER_NAME) sh -o pipefail -c '$(BAZEL) build $(OPTIONS) "$(TARGETS)"'
-
-build_paths = $(build_cmd) 2>&1 \
- | tee /proc/self/fd/2 \
- | grep -A1 -E '^Target' \
- | grep -E '^ ($(subst $(SPACE),|,$(BUILD_ROOTS)))' \
- | sed "s/ /\n/g" \
- | strings -n 10 \
- | awk '{$$1=$$1};1' \
- | xargs -n 1 -I {} readlink -f "{}" \
- | xargs -n 1 -I {} sh -c "$(1)"
-
-build: bazel-server
- @$(call build_cmd)
-.PHONY: build
-
-copy: bazel-server
-ifeq (,$(DESTINATION))
- $(error Destination not provided.)
-endif
- @$(call build_paths,cp -fa {} $(DESTINATION))
-
-run: bazel-server
- @$(call build_paths,{} $(ARGS))
-.PHONY: run
-
-sudo: bazel-server
- @$(call build_paths,sudo -E {} $(ARGS))
-.PHONY: sudo
-
-test: OPTIONS += --test_output=errors --keep_going --verbose_failures=true
-test: bazel-server
- @docker exec $(FULL_DOCKER_EXEC_OPTIONS) $(DOCKER_NAME) $(BAZEL) test $(OPTIONS) $(TARGETS)
-.PHONY: test
-
-query:
- @$(MAKE) bazel-server >&2 # If we need to start, ensure stdout is not polluted.
- @docker exec $(FULL_DOCKER_EXEC_OPTIONS) $(DOCKER_NAME) sh -o pipefail -c '$(BAZEL) query $(OPTIONS) "$(TARGETS)" 2>/dev/null'
-.PHONY: query
diff --git a/tools/bazeldefs/BUILD b/tools/bazeldefs/BUILD
deleted file mode 100644
index d043caf06..000000000
--- a/tools/bazeldefs/BUILD
+++ /dev/null
@@ -1,69 +0,0 @@
-load("//tools:defs.bzl", "bzl_library", "rbe_platform", "rbe_toolchain")
-
-package(licenses = ["notice"])
-
-# In bazel, no special support is required for loopback networking. This is
-# just a dummy data target that does not change the test environment.
-genrule(
- name = "loopback",
- outs = ["loopback.txt"],
- cmd = "touch $@",
- visibility = ["//:sandbox"],
-)
-
-# We need to define a bazel platform and toolchain to specify dockerPrivileged
-# and dockerRunAsRoot options, they are required to run tests on the RBE
-# cluster in Kokoro.
-rbe_platform(
- name = "rbe_ubuntu1604",
- constraint_values = [
- "@bazel_tools//platforms:x86_64",
- "@bazel_tools//platforms:linux",
- "@bazel_tools//tools/cpp:clang",
- "@bazel_toolchains//constraints:xenial",
- "@bazel_toolchains//constraints/sanitizers:support_msan",
- ],
- remote_execution_properties = """
- properties: {
- name: "container-image"
- value:"docker://gcr.io/cloud-marketplace/google/rbe-ubuntu16-04@sha256:b516a2d69537cb40a7c6a7d92d0008abb29fba8725243772bdaf2c83f1be2272"
- }
- properties: {
- name: "dockerAddCapabilities"
- value: "SYS_ADMIN"
- }
- properties: {
- name: "dockerPrivileged"
- value: "true"
- }
- """,
-)
-
-rbe_toolchain(
- name = "cc-toolchain-clang-x86_64-default",
- exec_compatible_with = [],
- tags = [
- "manual",
- ],
- target_compatible_with = [],
- toolchain = "@bazel_toolchains//configs/ubuntu16_04_clang/11.0.0/bazel_3.1.0/cc:cc-compiler-k8",
- toolchain_type = "@bazel_tools//tools/cpp:toolchain_type",
-)
-
-bzl_library(
- name = "platforms_bzl",
- srcs = ["platforms.bzl"],
- visibility = ["//visibility:private"],
-)
-
-bzl_library(
- name = "tags_bzl",
- srcs = ["tags.bzl"],
- visibility = ["//visibility:private"],
-)
-
-bzl_library(
- name = "defs_bzl",
- srcs = ["defs.bzl"],
- visibility = ["//visibility:private"],
-)
diff --git a/tools/bazeldefs/cc.bzl b/tools/bazeldefs/cc.bzl
deleted file mode 100644
index 7f41a0142..000000000
--- a/tools/bazeldefs/cc.bzl
+++ /dev/null
@@ -1,43 +0,0 @@
-"""C++ rules."""
-
-load("@bazel_tools//tools/cpp:cc_flags_supplier.bzl", _cc_flags_supplier = "cc_flags_supplier")
-load("@rules_cc//cc:defs.bzl", _cc_binary = "cc_binary", _cc_library = "cc_library", _cc_proto_library = "cc_proto_library", _cc_test = "cc_test")
-load("@com_github_grpc_grpc//bazel:cc_grpc_library.bzl", _cc_grpc_library = "cc_grpc_library")
-
-cc_library = _cc_library
-cc_flags_supplier = _cc_flags_supplier
-cc_proto_library = _cc_proto_library
-cc_test = _cc_test
-cc_toolchain = "@bazel_tools//tools/cpp:current_cc_toolchain"
-gtest = "@com_google_googletest//:gtest"
-gbenchmark = "@com_google_benchmark//:benchmark"
-grpcpp = "@com_github_grpc_grpc//:grpc++"
-vdso_linker_option = "-fuse-ld=gold "
-
-def cc_grpc_library(name, **kwargs):
- _cc_grpc_library(name = name, grpc_only = True, **kwargs)
-
-def cc_binary(name, static = False, **kwargs):
- """Run cc_binary.
-
- Args:
- name: name of the target.
- static: make a static binary if True
- **kwargs: the rest of the args.
- """
- if static:
- # How to statically link a c++ program that uses threads, like for gRPC:
- # https://gcc.gnu.org/legacy-ml/gcc-help/2010-05/msg00029.html
- if "linkopts" not in kwargs:
- kwargs["linkopts"] = []
- kwargs["linkopts"] += [
- "-static",
- "-lstdc++",
- "-Wl,--whole-archive",
- "-lpthread",
- "-Wl,--no-whole-archive",
- ]
- _cc_binary(
- name = name,
- **kwargs
- )
diff --git a/tools/bazeldefs/defs.bzl b/tools/bazeldefs/defs.bzl
deleted file mode 100644
index ba186aace..000000000
--- a/tools/bazeldefs/defs.bzl
+++ /dev/null
@@ -1,40 +0,0 @@
-"""Meta and miscellaneous rules."""
-
-load("@bazel_skylib//rules:build_test.bzl", _build_test = "build_test")
-load("@bazel_skylib//:bzl_library.bzl", _bzl_library = "bzl_library")
-
-build_test = _build_test
-bzl_library = _bzl_library
-loopback = "//tools/bazeldefs:loopback"
-rbe_platform = native.platform
-rbe_toolchain = native.toolchain
-
-def short_path(path):
- return path
-
-def proto_library(name, has_services = None, **kwargs):
- native.proto_library(
- name = name,
- **kwargs
- )
-
-def select_arch(amd64 = "amd64", arm64 = "arm64", default = None, **kwargs):
- values = {
- "@bazel_tools//src/conditions:linux_x86_64": amd64,
- "@bazel_tools//src/conditions:linux_aarch64": arm64,
- }
- if default:
- values["//conditions:default"] = default
- return select(values, **kwargs)
-
-def select_system(linux = ["__linux__"], **kwargs):
- return linux # Only Linux supported.
-
-def default_installer():
- return None
-
-def default_net_util():
- return [] # Nothing needed.
-
-def coreutil():
- return [] # Nothing needed.
diff --git a/tools/bazeldefs/go.bzl b/tools/bazeldefs/go.bzl
deleted file mode 100644
index 661c9727e..000000000
--- a/tools/bazeldefs/go.bzl
+++ /dev/null
@@ -1,142 +0,0 @@
-"""Go rules."""
-
-load("@bazel_gazelle//:def.bzl", _gazelle = "gazelle")
-load("@io_bazel_rules_go//go:def.bzl", "GoLibrary", _go_binary = "go_binary", _go_context = "go_context", _go_embed_data = "go_embed_data", _go_library = "go_library", _go_path = "go_path", _go_test = "go_test")
-load("@io_bazel_rules_go//proto:def.bzl", _go_grpc_library = "go_grpc_library", _go_proto_library = "go_proto_library")
-load("//tools/bazeldefs:defs.bzl", "select_arch", "select_system")
-
-gazelle = _gazelle
-go_embed_data = _go_embed_data
-go_path = _go_path
-
-def _go_proto_or_grpc_library(go_library_func, name, **kwargs):
- deps = [
- dep.replace("_proto", "_go_proto")
- for dep in (kwargs.pop("deps", []) or [])
- ]
- go_library_func(
- name = name + "_go_proto",
- importpath = "gvisor.dev/gvisor/" + native.package_name() + "/" + name + "_go_proto",
- proto = ":" + name + "_proto",
- deps = deps,
- **kwargs
- )
-
-def go_proto_library(name, **kwargs):
- _go_proto_or_grpc_library(_go_proto_library, name, **kwargs)
-
-def go_grpc_and_proto_libraries(name, **kwargs):
- _go_proto_or_grpc_library(_go_grpc_library, name, **kwargs)
-
-def go_binary(name, static = False, pure = False, x_defs = None, **kwargs):
- """Build a go binary.
-
- Args:
- name: name of the target.
- static: build a static binary.
- pure: build without cgo.
- x_defs: additional definitions.
- **kwargs: rest of the arguments are passed to _go_binary.
- """
- if static:
- kwargs["static"] = "on"
- if pure:
- kwargs["pure"] = "on"
- _go_binary(
- name = name,
- x_defs = x_defs,
- **kwargs
- )
-
-def go_importpath(target):
- """Returns the importpath for the target."""
- return target[GoLibrary].importpath
-
-def go_library(name, **kwargs):
- _go_library(
- name = name,
- importpath = "gvisor.dev/gvisor/" + native.package_name(),
- **kwargs
- )
-
-def go_test(name, pure = False, library = None, **kwargs):
- """Build a go test.
-
- Args:
- name: name of the output binary.
- pure: should it be built without cgo.
- library: the library to embed.
- **kwargs: rest of the arguments to pass to _go_test.
- """
- if pure:
- kwargs["pure"] = "on"
- if library:
- kwargs["embed"] = [library]
- _go_test(
- name = name,
- **kwargs
- )
-
-def go_rule(rule, implementation, **kwargs):
- """Wraps a rule definition with Go attributes.
-
- Args:
- rule: rule function (typically rule or aspect).
- implementation: implementation function.
- **kwargs: other arguments to pass to rule.
-
- Returns:
- The result of invoking the rule.
- """
- attrs = kwargs.pop("attrs", dict())
- attrs["_go_context_data"] = attr.label(default = "@io_bazel_rules_go//:go_context_data")
- attrs["_stdlib"] = attr.label(default = "@io_bazel_rules_go//:stdlib")
- toolchains = kwargs.get("toolchains", []) + ["@io_bazel_rules_go//go:toolchain"]
- return rule(implementation, attrs = attrs, toolchains = toolchains, **kwargs)
-
-def go_embed_libraries(target):
- if hasattr(target.attr, "embed"):
- return target.attr.embed
- return []
-
-def go_context(ctx, goos = None, goarch = None, std = False):
- """Extracts a standard Go context struct.
-
- Args:
- ctx: the starlark context (required).
- goos: the GOOS value.
- goarch: the GOARCH value.
- std: ignored.
-
- Returns:
- A context Go struct with pointers to Go toolchain components.
- """
-
- # We don't change anything for the standard library analysis. All Go files
- # are available in all instances. Note that this includes the standard
- # library sources, which are analyzed by nogo.
- go_ctx = _go_context(ctx)
- if goos == None:
- goos = go_ctx.sdk.goos
- elif goos != go_ctx.sdk.goos:
- fail("Internal GOOS (%s) doesn't match GoSdk GOOS (%s)." % (goos, go_ctx.sdk.goos))
- if goarch == None:
- goarch = go_ctx.sdk.goarch
- elif goarch != go_ctx.sdk.goarch:
- fail("Internal GOARCH (%s) doesn't match GoSdk GOARCH (%s)." % (goarch, go_ctx.sdk.goarch))
- return struct(
- go = go_ctx.go,
- env = go_ctx.env,
- nogo_args = [],
- stdlib_srcs = go_ctx.sdk.srcs,
- runfiles = depset([go_ctx.go] + go_ctx.sdk.srcs + go_ctx.sdk.tools + go_ctx.stdlib.libs),
- goos = go_ctx.sdk.goos,
- goarch = go_ctx.sdk.goarch,
- tags = go_ctx.tags,
- )
-
-def select_goarch():
- return select_arch(arm64 = "arm64", amd64 = "amd64")
-
-def select_goos():
- return select_system(linux = "linux")
diff --git a/tools/bazeldefs/pkg.bzl b/tools/bazeldefs/pkg.bzl
deleted file mode 100644
index 56317d93f..000000000
--- a/tools/bazeldefs/pkg.bzl
+++ /dev/null
@@ -1,6 +0,0 @@
-"""Packaging rules."""
-
-load("@rules_pkg//:pkg.bzl", _pkg_deb = "pkg_deb", _pkg_tar = "pkg_tar")
-
-pkg_deb = _pkg_deb
-pkg_tar = _pkg_tar
diff --git a/tools/bazeldefs/platforms.bzl b/tools/bazeldefs/platforms.bzl
deleted file mode 100644
index 165b22311..000000000
--- a/tools/bazeldefs/platforms.bzl
+++ /dev/null
@@ -1,9 +0,0 @@
-"""List of platforms."""
-
-# Platform to associated tags.
-platforms = {
- "ptrace": [],
- "kvm": [],
-}
-
-default_platform = "ptrace"
diff --git a/tools/bazeldefs/tags.bzl b/tools/bazeldefs/tags.bzl
deleted file mode 100644
index f5d7a7b21..000000000
--- a/tools/bazeldefs/tags.bzl
+++ /dev/null
@@ -1,56 +0,0 @@
-"""List of special Go suffixes."""
-
-def explode(tagset, suffixes):
- """explode combines tagset and suffixes in all ways.
-
- Args:
- tagset: Original suffixes.
- suffixes: Suffixes to combine before and after.
-
- Returns:
- The set of possible combinations.
- """
- result = [t for t in tagset]
- result += [s for s in suffixes]
- for t in tagset:
- result += [t + s for s in suffixes]
- result += [s + t for s in suffixes]
- return result
-
-archs = [
- "_386",
- "_aarch64",
- "_amd64",
- "_arm",
- "_arm64",
- "_mips",
- "_mips64",
- "_mips64le",
- "_mipsle",
- "_ppc64",
- "_ppc64le",
- "_riscv64",
- "_s390x",
- "_sparc64",
- "_x86",
-]
-
-oses = [
- "_linux",
-]
-
-generic = [
- "_impl",
- "_race",
- "_norace",
- "_unsafe",
- "_opts",
-]
-
-# State explosion? Sure. This is approximately:
-# len(archs) * (1 + 2 * len(oses) * (1 + 2 * len(generic))
-#
-# This evaluates to 495 at the time of writing. So it's a lot of different
-# combinations, but not so much that it will cause issues. We can probably add
-# quite a few more variants before this becomes a genuine problem.
-go_suffixes = explode(explode(archs, oses), generic)
diff --git a/tools/bigquery/BUILD b/tools/bigquery/BUILD
deleted file mode 100644
index 1cea9e1c9..000000000
--- a/tools/bigquery/BUILD
+++ /dev/null
@@ -1,16 +0,0 @@
-load("//tools:defs.bzl", "go_library")
-
-package(licenses = ["notice"])
-
-go_library(
- name = "bigquery",
- testonly = 1,
- srcs = ["bigquery.go"],
- visibility = [
- "//:sandbox",
- ],
- deps = [
- "@com_google_cloud_go_bigquery//:go_default_library",
- "@org_golang_google_api//option:go_default_library",
- ],
-)
diff --git a/tools/bigquery/bigquery.go b/tools/bigquery/bigquery.go
deleted file mode 100644
index 34b270cc0..000000000
--- a/tools/bigquery/bigquery.go
+++ /dev/null
@@ -1,144 +0,0 @@
-// Copyright 2020 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package bigquery defines a BigQuery schema for benchmarks.
-//
-// This package contains a schema for BigQuery and methods for publishing
-// benchmark data into tables.
-package bigquery
-
-import (
- "context"
- "fmt"
- "strings"
- "time"
-
- bq "cloud.google.com/go/bigquery"
- "google.golang.org/api/option"
-)
-
-// Suite is the top level structure for a benchmark run. BigQuery
-// will infer the schema from this.
-type Suite struct {
- Name string `bq:"name"`
- Conditions []*Condition `bq:"conditions"`
- Benchmarks []*Benchmark `bq:"benchmarks"`
- Official bool `bq:"official"`
- Timestamp time.Time `bq:"timestamp"`
-}
-
-// Benchmark represents an individual benchmark in a suite.
-type Benchmark struct {
- Name string `bq:"name"`
- Condition []*Condition `bq:"condition"`
- Metric []*Metric `bq:"metric"`
-}
-
-// Condition represents qualifiers for the benchmark or suite. For example:
-// Get_Pid/1/real_time would have Benchmark Name "Get_Pid" with "1"
-// and "real_time" parameters as conditions. Suite conditions include
-// information such as the CL number and platform name.
-type Condition struct {
- Name string `bq:"name"`
- Value string `bq:"value"`
-}
-
-// Metric holds the actual metric data and unit information for this benchmark.
-type Metric struct {
- Name string `bq:"name"`
- Unit string `bq:"unit"`
- Sample float64 `bq:"sample"`
-}
-
-// InitBigQuery initializes a BigQuery dataset/table in the project. If the dataset/table already exists, it is not duplicated.
-func InitBigQuery(ctx context.Context, projectID, datasetID, tableID string, opts []option.ClientOption) error {
- client, err := bq.NewClient(ctx, projectID, opts...)
- if err != nil {
- return fmt.Errorf("failed to initialize client on project %s: %v", projectID, err)
- }
- defer client.Close()
-
- dataset := client.Dataset(datasetID)
- if err := dataset.Create(ctx, nil); err != nil && !checkDuplicateError(err) {
- return fmt.Errorf("failed to create dataset: %s: %v", datasetID, err)
- }
-
- table := dataset.Table(tableID)
- schema, err := bq.InferSchema(Suite{})
- if err != nil {
- return fmt.Errorf("failed to infer schema: %v", err)
- }
-
- if err := table.Create(ctx, &bq.TableMetadata{Schema: schema}); err != nil && !checkDuplicateError(err) {
- return fmt.Errorf("failed to create table: %s: %v", tableID, err)
- }
- return nil
-}
-
-// AddCondition adds a condition to an existing Benchmark.
-func (bm *Benchmark) AddCondition(name, value string) {
- bm.Condition = append(bm.Condition, &Condition{
- Name: name,
- Value: value,
- })
-}
-
-// AddMetric adds a metric to an existing Benchmark.
-func (bm *Benchmark) AddMetric(metricName, unit string, sample float64) {
- m := &Metric{
- Name: metricName,
- Unit: unit,
- Sample: sample,
- }
- bm.Metric = append(bm.Metric, m)
-}
-
-// NewBenchmark initializes a new benchmark.
-func NewBenchmark(name string, iters int, official bool) *Benchmark {
- return &Benchmark{
- Name: name,
- Metric: make([]*Metric, 0),
- }
-}
-
-// NewSuite initializes a new Suite.
-func NewSuite(name string) *Suite {
- return &Suite{
- Name: name,
- Timestamp: time.Now().UTC(),
- Benchmarks: make([]*Benchmark, 0),
- Conditions: make([]*Condition, 0),
- }
-}
-
-// SendBenchmarks sends the slice of benchmarks to the BigQuery dataset/table.
-func SendBenchmarks(ctx context.Context, suite *Suite, projectID, datasetID, tableID string, opts []option.ClientOption) error {
- client, err := bq.NewClient(ctx, projectID, opts...)
- if err != nil {
- return fmt.Errorf("failed to initialize client on project: %s: %v", projectID, err)
- }
- defer client.Close()
-
- uploader := client.Dataset(datasetID).Table(tableID).Uploader()
- if err = uploader.Put(ctx, suite); err != nil {
- return fmt.Errorf("failed to upload benchmarks %s to project %s, table %s.%s: %v", suite.Name, projectID, datasetID, tableID, err)
- }
-
- return nil
-}
-
-// BigQuery will error "409" for duplicate tables and datasets.
-func checkDuplicateError(err error) bool {
- return strings.Contains(err.Error(), "googleapi: Error 409: Already Exists")
-}
diff --git a/tools/checkescape/BUILD b/tools/checkescape/BUILD
deleted file mode 100644
index 8956be621..000000000
--- a/tools/checkescape/BUILD
+++ /dev/null
@@ -1,15 +0,0 @@
-load("//tools:defs.bzl", "go_library")
-
-package(licenses = ["notice"])
-
-go_library(
- name = "checkescape",
- srcs = ["checkescape.go"],
- nogo = False,
- visibility = ["//tools/nogo:__subpackages__"],
- deps = [
- "@org_golang_x_tools//go/analysis:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/buildssa:go_tool_library",
- "@org_golang_x_tools//go/ssa:go_tool_library",
- ],
-)
diff --git a/tools/checkescape/checkescape.go b/tools/checkescape/checkescape.go
deleted file mode 100644
index e5a7e23c7..000000000
--- a/tools/checkescape/checkescape.go
+++ /dev/null
@@ -1,918 +0,0 @@
-// Copyright 2020 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package checkescape allows recursive escape analysis for hot paths.
-//
-// The analysis tracks multiple types of escapes, in two categories. First,
-// 'hard' escapes are explicit allocations. Second, 'soft' escapes are
-// interface dispatches or dynamic function dispatches; these don't necessarily
-// escape but they *may* escape. The analysis is capable of making assertions
-// recursively: soft escapes cannot be analyzed in this way, and therefore
-// count as escapes for recursive purposes.
-//
-// The different types of escapes are as follows, with the category in
-// parentheses:
-//
-// heap: A direct allocation is made on the heap (hard).
-// builtin: A call is made to a built-in allocation function (hard).
-// stack: A stack split as part of a function preamble (soft).
-// interface: A call is made via an interface whicy *may* escape (soft).
-// dynamic: A dynamic function is dispatched which *may* escape (soft).
-//
-// To the use the package, annotate a function-level comment with either the
-// line "// +checkescape" or "// +checkescape:OPTION[,OPTION]". In the second
-// case, the OPTION field is either a type above, or one of:
-//
-// local: Escape analysis is limited to local hard escapes only.
-// all: All the escapes are included.
-// hard: All hard escapes are included.
-//
-// If the "// +checkescape" annotation is provided, this is equivalent to
-// provided the local and hard options.
-//
-// Some examples of this syntax are:
-//
-// +checkescape:all - Analyzes for all escapes in this function and all calls.
-// +checkescape:local - Analyzes only for default local hard escapes.
-// +checkescape:heap - Only analyzes for heap escapes.
-// +checkescape:interface,dynamic - Only checks for dynamic calls and interface calls.
-// +checkescape - Does the same as +checkescape:local,hard.
-//
-// Note that all of the above can be inverted by using +mustescape. The
-// +checkescape keyword will ensure failure if the class of escape occurs,
-// whereas +mustescape will fail if the given class of escape does not occur.
-//
-// Local exemptions can be made by a comment of the form "// escapes: reason."
-// This must appear on the line of the escape and will also apply to callers of
-// the function as well (for non-local escape analysis).
-package checkescape
-
-import (
- "bufio"
- "bytes"
- "flag"
- "fmt"
- "go/ast"
- "go/token"
- "go/types"
- "io"
- "log"
- "os"
- "os/exec"
- "path/filepath"
- "strings"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/analysis/passes/buildssa"
- "golang.org/x/tools/go/ssa"
-)
-
-const (
- // magic is the magic annotation.
- magic = "// +checkescape"
-
- // magicParams is the magic annotation with specific parameters.
- magicParams = magic + ":"
-
- // testMagic is the test magic annotation (parameters required).
- testMagic = "// +mustescape:"
-
- // exempt is the exemption annotation.
- exempt = "// escapes"
-)
-
-var (
- // Binary is the binary under analysis.
- //
- // See Reader, below.
- binary = flag.String("binary", "", "binary under analysis")
-
- // Reader is the input stream.
- //
- // This may be set instead of Binary.
- Reader io.Reader
-
- // objdumpTool is the tool used to dump a binary.
- objdumpTool = flag.String("objdump_tool", "", "tool used to dump a binary")
-)
-
-// EscapeReason is an escape reason.
-//
-// This is a simple enum.
-type EscapeReason int
-
-const (
- allocation EscapeReason = iota
- builtin
- interfaceInvoke
- dynamicCall
- stackSplit
- unknownPackage
- reasonCount // Count for below.
-)
-
-// String returns the string for the EscapeReason.
-//
-// Note that this also implicitly defines the reverse string -> EscapeReason
-// mapping, which is the word before the colon (computed below).
-func (e EscapeReason) String() string {
- switch e {
- case interfaceInvoke:
- return "interface: call to potentially allocating function"
- case unknownPackage:
- return "unknown: no package information available"
- case allocation:
- return "heap: explicit allocation"
- case builtin:
- return "builtin: call to potentially allocating builtin"
- case dynamicCall:
- return "dynamic: call to potentially allocating function"
- case stackSplit:
- return "stack: possible split on function entry"
- default:
- panic(fmt.Sprintf("unknown reason: %d", e))
- }
-}
-
-var hardReasons = []EscapeReason{
- allocation,
- builtin,
-}
-
-var softReasons = []EscapeReason{
- interfaceInvoke,
- unknownPackage,
- dynamicCall,
- stackSplit,
-}
-
-var allReasons = append(hardReasons, softReasons...)
-
-var escapeTypes = func() map[string]EscapeReason {
- result := make(map[string]EscapeReason)
- for _, r := range allReasons {
- parts := strings.Split(r.String(), ":")
- result[parts[0]] = r // Key before ':'.
- }
- return result
-}()
-
-// escapingBuiltins are builtins known to escape.
-//
-// These are lowered at an earlier stage of compilation to explicit function
-// calls, but are not available for recursive analysis.
-var escapingBuiltins = []string{
- "append",
- "makemap",
- "newobject",
- "mallocgc",
-}
-
-// packageEscapeFacts is the set of all functions in a package, and whether or
-// not they recursively pass escape analysis.
-//
-// All the type names for receivers are encoded in the full key. The key
-// represents the fully qualified package and type name used at link time.
-//
-// Note that each Escapes object is a summary. Local findings may be reported
-// using more detailed information.
-type packageEscapeFacts struct {
- Funcs map[string]Escapes
-}
-
-// AFact implements analysis.Fact.AFact.
-func (*packageEscapeFacts) AFact() {}
-
-// Analyzer includes specific results.
-var Analyzer = &analysis.Analyzer{
- Name: "checkescape",
- Doc: "escape analysis checks based on +checkescape annotations",
- Run: runSelectEscapes,
- Requires: []*analysis.Analyzer{buildssa.Analyzer},
- FactTypes: []analysis.Fact{(*packageEscapeFacts)(nil)},
-}
-
-// EscapeAnalyzer includes all local escape results.
-var EscapeAnalyzer = &analysis.Analyzer{
- Name: "checkescape",
- Doc: "complete local escape analysis results (requires Analyzer facts)",
- Run: runAllEscapes,
- Requires: []*analysis.Analyzer{buildssa.Analyzer},
-}
-
-// LinePosition is a low-resolution token.Position.
-//
-// This is used to match against possible exemptions placed in the source.
-type LinePosition struct {
- Filename string
- Line int
-}
-
-// String implements fmt.Stringer.String.
-func (e LinePosition) String() string {
- return fmt.Sprintf("%s:%d", e.Filename, e.Line)
-}
-
-// Simplified returns the simplified name.
-func (e LinePosition) Simplified() string {
- return fmt.Sprintf("%s:%d", filepath.Base(e.Filename), e.Line)
-}
-
-// CallSite is a single call site.
-//
-// These can be chained.
-type CallSite struct {
- LocalPos token.Pos
- Resolved LinePosition
-}
-
-// IsValid indicates whether the CallSite is valid or not.
-func (cs *CallSite) IsValid() bool {
- return cs.LocalPos.IsValid()
-}
-
-// Escapes is a collection of escapes.
-//
-// We record at most one escape for each reason, but record the number of
-// escapes that were omitted.
-//
-// This object should be used to summarize all escapes for a single line (local
-// analysis) or a single function (package facts).
-//
-// All fields are exported for gob.
-type Escapes struct {
- CallSites [reasonCount][]CallSite
- Details [reasonCount]string
- Omitted [reasonCount]int
-}
-
-// add is called by Add and Merge.
-func (es *Escapes) add(r EscapeReason, detail string, omitted int, callSites ...CallSite) {
- if es.CallSites[r] != nil {
- // We will either be replacing the current escape or dropping
- // the added one. Either way, we increment omitted by the
- // appropriate amount.
- es.Omitted[r]++
- // If the callSites in the other is only a single element, then
- // we will universally favor this. This provides the cleanest
- // set of escapes to summarize, and more importantly: if there
- if len(es.CallSites) == 1 || len(callSites) != 1 {
- return
- }
- }
- es.Details[r] = detail
- es.CallSites[r] = callSites
- es.Omitted[r] += omitted
-}
-
-// Add adds a single escape.
-func (es *Escapes) Add(r EscapeReason, detail string, callSites ...CallSite) {
- es.add(r, detail, 0, callSites...)
-}
-
-// IsEmpty returns true iff this Escapes is empty.
-func (es *Escapes) IsEmpty() bool {
- for _, cs := range es.CallSites {
- if cs != nil {
- return false
- }
- }
- return true
-}
-
-// Filter filters out all escapes except those matches the given reasons.
-//
-// If local is set, then non-local escapes will also be filtered.
-func (es *Escapes) Filter(reasons []EscapeReason, local bool) {
-FilterReasons:
- for r := EscapeReason(0); r < reasonCount; r++ {
- for i := 0; i < len(reasons); i++ {
- if r == reasons[i] {
- continue FilterReasons
- }
- }
- // Zap this reason.
- es.CallSites[r] = nil
- es.Details[r] = ""
- es.Omitted[r] = 0
- }
- if !local {
- return
- }
- for r := EscapeReason(0); r < reasonCount; r++ {
- // Is does meet our local requirement?
- if len(es.CallSites[r]) > 1 {
- es.CallSites[r] = nil
- es.Details[r] = ""
- es.Omitted[r] = 0
- }
- }
-}
-
-// MergeWithCall merges these escapes with another.
-//
-// If callSite is nil, no call is added.
-func (es *Escapes) MergeWithCall(other Escapes, callSite CallSite) {
- for r := EscapeReason(0); r < reasonCount; r++ {
- if other.CallSites[r] != nil {
- // Construct our new call chain.
- newCallSites := other.CallSites[r]
- if callSite.IsValid() {
- newCallSites = append([]CallSite{callSite}, newCallSites...)
- }
- // Add (potentially replacing) the underlying escape.
- es.add(r, other.Details[r], other.Omitted[r], newCallSites...)
- }
- }
-}
-
-// Reportf will call Reportf for each class of escapes.
-func (es *Escapes) Reportf(pass *analysis.Pass) {
- var b bytes.Buffer // Reused for all escapes.
- for r := EscapeReason(0); r < reasonCount; r++ {
- if es.CallSites[r] == nil {
- continue
- }
- b.Reset()
- fmt.Fprintf(&b, "%s ", r.String())
- if es.Omitted[r] > 0 {
- fmt.Fprintf(&b, "(%d omitted) ", es.Omitted[r])
- }
- for _, cs := range es.CallSites[r][1:] {
- fmt.Fprintf(&b, "→ %s ", cs.Resolved.String())
- }
- fmt.Fprintf(&b, "→ %s", es.Details[r])
- pass.Reportf(es.CallSites[r][0].LocalPos, b.String())
- }
-}
-
-// MergeAll merges a sequence of escapes.
-func MergeAll(others []Escapes) (es Escapes) {
- for _, other := range others {
- es.MergeWithCall(other, CallSite{})
- }
- return
-}
-
-// loadObjdump reads the objdump output.
-//
-// This records if there is a call any function for every source line. It is
-// used only to remove false positives for escape analysis. The call will be
-// elided if escape analysis is able to put the object on the heap exclusively.
-//
-// Note that the map uses <basename.go>:<line> because that is all that is
-// provided in the objdump format. Since this is all local, it is sufficient.
-func loadObjdump() (map[string][]string, error) {
- var (
- args []string
- stdin io.Reader
- )
- if *binary != "" {
- args = append(args, *binary)
- } else if Reader != nil {
- stdin = Reader
- } else {
- // We have no input stream or binary.
- return nil, fmt.Errorf("no binary or reader provided")
- }
-
- // Construct our command.
- cmd := exec.Command(*objdumpTool, args...)
- cmd.Stdin = stdin
- cmd.Stderr = os.Stderr
- out, err := cmd.StdoutPipe()
- if err != nil {
- return nil, err
- }
- if err := cmd.Start(); err != nil {
- return nil, err
- }
-
- // Identify calls by address or name. Note that this is also
- // constructed dynamically below, as we encounted the addresses.
- // This is because some of the functions (duffzero) may have
- // jump targets in the middle of the function itself.
- funcsAllowed := map[string]struct{}{
- "runtime.duffzero": struct{}{},
- "runtime.duffcopy": struct{}{},
- "runtime.racefuncenter": struct{}{},
- "runtime.gcWriteBarrier": struct{}{},
- "runtime.retpolineAX": struct{}{},
- "runtime.retpolineBP": struct{}{},
- "runtime.retpolineBX": struct{}{},
- "runtime.retpolineCX": struct{}{},
- "runtime.retpolineDI": struct{}{},
- "runtime.retpolineDX": struct{}{},
- "runtime.retpolineR10": struct{}{},
- "runtime.retpolineR11": struct{}{},
- "runtime.retpolineR12": struct{}{},
- "runtime.retpolineR13": struct{}{},
- "runtime.retpolineR14": struct{}{},
- "runtime.retpolineR15": struct{}{},
- "runtime.retpolineR8": struct{}{},
- "runtime.retpolineR9": struct{}{},
- "runtime.retpolineSI": struct{}{},
- "runtime.stackcheck": struct{}{},
- "runtime.settls": struct{}{},
- }
- addrsAllowed := make(map[string]struct{})
-
- // Build the map.
- nextFunc := "" // For funcsAllowed.
- m := make(map[string][]string)
- r := bufio.NewReader(out)
-NextLine:
- for {
- line, err := r.ReadString('\n')
- if err != nil && err != io.EOF {
- return nil, err
- }
- fields := strings.Fields(line)
-
- // Is this an "allowed" function definition?
- if len(fields) >= 2 && fields[0] == "TEXT" {
- nextFunc = strings.TrimSuffix(fields[1], "(SB)")
- if _, ok := funcsAllowed[nextFunc]; !ok {
- nextFunc = "" // Don't record addresses.
- }
- }
- if nextFunc != "" && len(fields) > 2 {
- // Save the given address (in hex form, as it appears).
- addrsAllowed[fields[1]] = struct{}{}
- }
-
- // We recognize lines corresponding to actual code (not the
- // symbol name or other metadata) and annotate them if they
- // correspond to an explicit CALL instruction. We assume that
- // the lack of a CALL for a given line is evidence that escape
- // analysis has eliminated an allocation.
- //
- // Lines look like this (including the first space):
- // gohacks_unsafe.go:33 0xa39 488b442408 MOVQ 0x8(SP), AX
- if len(fields) >= 5 && line[0] == ' ' {
- if !strings.Contains(fields[3], "CALL") {
- continue
- }
- site := fields[0]
- target := strings.TrimSuffix(fields[4], "(SB)")
-
- // Ignore strings containing allowed functions.
- if _, ok := funcsAllowed[target]; ok {
- continue
- }
- if _, ok := addrsAllowed[target]; ok {
- continue
- }
- if len(fields) > 5 {
- // This may be a future relocation. Some
- // objdump versions describe this differently.
- // If it contains any of the functions allowed
- // above as a string, we let it go.
- softTarget := strings.Join(fields[5:], " ")
- for name := range funcsAllowed {
- if strings.Contains(softTarget, name) {
- continue NextLine
- }
- }
- }
-
- // Does this exist already?
- existing, ok := m[site]
- if !ok {
- existing = make([]string, 0, 1)
- }
- for _, other := range existing {
- if target == other {
- continue NextLine
- }
- }
- existing = append(existing, target)
- m[site] = existing // Update.
- }
- if err == io.EOF {
- break
- }
- }
-
- // Zap any accidental false positives.
- final := make(map[string][]string)
- for site, calls := range m {
- filteredCalls := make([]string, 0, len(calls))
- for _, call := range calls {
- if _, ok := addrsAllowed[call]; ok {
- continue // Omit this call.
- }
- filteredCalls = append(filteredCalls, call)
- }
- final[site] = filteredCalls
- }
-
- // Wait for the dump to finish.
- if err := cmd.Wait(); err != nil {
- return nil, err
- }
-
- return final, nil
-}
-
-// poser is a type that implements Pos.
-type poser interface {
- Pos() token.Pos
-}
-
-// runSelectEscapes runs with only select escapes.
-func runSelectEscapes(pass *analysis.Pass) (interface{}, error) {
- return run(pass, false)
-}
-
-// runAllEscapes runs with all escapes included.
-func runAllEscapes(pass *analysis.Pass) (interface{}, error) {
- return run(pass, true)
-}
-
-// findReasons extracts reasons from the function.
-func findReasons(pass *analysis.Pass, fdecl *ast.FuncDecl) ([]EscapeReason, bool, map[EscapeReason]bool) {
- // Is there a comment?
- if fdecl.Doc == nil {
- return nil, false, nil
- }
- var (
- reasons []EscapeReason
- local bool
- testReasons = make(map[EscapeReason]bool) // reason -> local?
- )
- // Scan all lines.
- found := false
- for _, c := range fdecl.Doc.List {
- // Does the comment contain a +checkescape line?
- if !strings.HasPrefix(c.Text, magic) && !strings.HasPrefix(c.Text, testMagic) {
- continue
- }
- if c.Text == magic {
- // Default: hard reasons, local only.
- reasons = hardReasons
- local = true
- } else if strings.HasPrefix(c.Text, magicParams) {
- // Extract specific reasons.
- types := strings.Split(c.Text[len(magicParams):], ",")
- found = true // For below.
- for i := 0; i < len(types); i++ {
- if types[i] == "local" {
- // Limit search to local escapes.
- local = true
- } else if types[i] == "all" {
- // Append all reasons.
- reasons = append(reasons, allReasons...)
- } else if types[i] == "hard" {
- // Append all hard reasons.
- reasons = append(reasons, hardReasons...)
- } else {
- r, ok := escapeTypes[types[i]]
- if !ok {
- // This is not a valid escape reason.
- pass.Reportf(fdecl.Pos(), "unknown reason: %v", types[i])
- continue
- }
- reasons = append(reasons, r)
- }
- }
- } else if strings.HasPrefix(c.Text, testMagic) {
- types := strings.Split(c.Text[len(testMagic):], ",")
- local := false
- for i := 0; i < len(types); i++ {
- if types[i] == "local" {
- local = true
- } else {
- r, ok := escapeTypes[types[i]]
- if !ok {
- // This is not a valid escape reason.
- pass.Reportf(fdecl.Pos(), "unknown reason: %v", types[i])
- continue
- }
- if v, ok := testReasons[r]; ok && v {
- // Already registered as local.
- continue
- }
- testReasons[r] = local
- }
- }
- }
- }
- if len(reasons) == 0 && found {
- // A magic annotation was provided, but no reasons.
- pass.Reportf(fdecl.Pos(), "no reasons provided")
- }
- return reasons, local, testReasons
-}
-
-// run performs the analysis.
-func run(pass *analysis.Pass, localEscapes bool) (interface{}, error) {
- calls, err := loadObjdump()
- if err != nil {
- // Note that if this analysis fails, then we don't actually
- // fail the analyzer itself. We simply report every possible
- // escape. In most cases this will work just fine.
- log.Printf("WARNING: unable to load objdump: %v", err)
- }
- allEscapes := make(map[string][]Escapes)
- mergedEscapes := make(map[string]Escapes)
- linePosition := func(inst, parent poser) LinePosition {
- p := pass.Fset.Position(inst.Pos())
- if (p.Filename == "" || p.Line == 0) && parent != nil {
- p = pass.Fset.Position(parent.Pos())
- }
- return LinePosition{
- Filename: p.Filename,
- Line: p.Line,
- }
- }
- callSite := func(inst ssa.Instruction) CallSite {
- return CallSite{
- LocalPos: inst.Pos(),
- Resolved: linePosition(inst, inst.Parent()),
- }
- }
- hasCall := func(inst poser) (string, bool) {
- p := linePosition(inst, nil)
- if calls == nil {
- // See above: we don't have access to the binary
- // itself, so need to include every possible call.
- return "(possible)", true
- }
- s, ok := calls[p.Simplified()]
- if !ok {
- return "", false
- }
- // Join all calls together.
- return strings.Join(s, " or "), true
- }
- state := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
-
- // Build the exception list.
- exemptions := make(map[LinePosition]string)
- for _, f := range pass.Files {
- for _, cg := range f.Comments {
- for _, c := range cg.List {
- p := pass.Fset.Position(c.Slash)
- if strings.HasPrefix(strings.ToLower(c.Text), exempt) {
- exemptions[LinePosition{
- Filename: p.Filename,
- Line: p.Line,
- }] = c.Text[len(exempt):]
- }
- }
- }
- }
-
- var loadFunc func(*ssa.Function) Escapes // Used below.
- analyzeInstruction := func(inst ssa.Instruction) (es Escapes) {
- cs := callSite(inst)
- if _, ok := exemptions[cs.Resolved]; ok {
- return // No escape.
- }
- switch x := inst.(type) {
- case *ssa.Call:
- if x.Call.IsInvoke() {
- // This is an interface dispatch. There is no
- // way to know if this is actually escaping or
- // not, since we don't know the underlying
- // type.
- call, _ := hasCall(inst)
- es.Add(interfaceInvoke, call, cs)
- return
- }
- switch x := x.Call.Value.(type) {
- case *ssa.Function:
- if x.Pkg == nil {
- // Can't resolve the package.
- es.Add(unknownPackage, "no package", cs)
- return
- }
-
- // Is this a local function? If yes, call the
- // function to load the local function. The
- // local escapes are the escapes found in the
- // local function.
- if x.Pkg.Pkg == pass.Pkg {
- es.MergeWithCall(loadFunc(x), cs)
- return
- }
-
- // Recursively collect information from
- // the other analyzers.
- var imp packageEscapeFacts
- if !pass.ImportPackageFact(x.Pkg.Pkg, &imp) {
- // Unable to import the dependency; we must
- // declare these as escaping.
- es.Add(unknownPackage, "no analysis", cs)
- return
- }
-
- // The escapes of this instruction are the
- // escapes of the called function directly.
- // Note that this may record many escapes.
- es.MergeWithCall(imp.Funcs[x.RelString(x.Pkg.Pkg)], cs)
- return
- case *ssa.Builtin:
- // Ignore elided escapes.
- if _, has := hasCall(inst); !has {
- return
- }
-
- // Check if the builtin is escaping.
- for _, name := range escapingBuiltins {
- if x.Name() == name {
- es.Add(builtin, name, cs)
- return
- }
- }
- default:
- // All dynamic calls are counted as soft
- // escapes. They are similar to interface
- // dispatches. We cannot actually look up what
- // this refers to using static analysis alone.
- call, _ := hasCall(inst)
- es.Add(dynamicCall, call, cs)
- }
- case *ssa.Alloc:
- // Ignore non-heap allocations.
- if !x.Heap {
- return
- }
-
- // Ignore elided escapes.
- call, has := hasCall(inst)
- if !has {
- return
- }
-
- // This is a real heap allocation.
- es.Add(allocation, call, cs)
- case *ssa.MakeMap:
- es.Add(builtin, "makemap", cs)
- case *ssa.MakeSlice:
- es.Add(builtin, "makeslice", cs)
- case *ssa.MakeClosure:
- es.Add(builtin, "makeclosure", cs)
- case *ssa.MakeChan:
- es.Add(builtin, "makechan", cs)
- }
- return
- }
-
- var analyzeBasicBlock func(*ssa.BasicBlock) []Escapes // Recursive.
- analyzeBasicBlock = func(block *ssa.BasicBlock) (rval []Escapes) {
- for _, inst := range block.Instrs {
- if es := analyzeInstruction(inst); !es.IsEmpty() {
- rval = append(rval, es)
- }
- }
- return
- }
-
- loadFunc = func(fn *ssa.Function) Escapes {
- // Is this already available?
- name := fn.RelString(pass.Pkg)
- if es, ok := mergedEscapes[name]; ok {
- return es
- }
-
- // In the case of a true cycle, we assume that the current
- // function itself has no escapes.
- //
- // When evaluating the function again, the proper escapes will
- // be filled in here.
- allEscapes[name] = nil
- mergedEscapes[name] = Escapes{}
-
- // Perform the basic analysis.
- var es []Escapes
- if fn.Recover != nil {
- es = append(es, analyzeBasicBlock(fn.Recover)...)
- }
- for _, block := range fn.Blocks {
- es = append(es, analyzeBasicBlock(block)...)
- }
-
- // Check for a stack split.
- if call, has := hasCall(fn); has {
- var ss Escapes
- ss.Add(stackSplit, call, CallSite{
- LocalPos: fn.Pos(),
- Resolved: linePosition(fn, fn.Parent()),
- })
- es = append(es, ss)
- }
-
- // Save the result and return.
- //
- // Note that we merge the result when saving to the facts. It
- // doesn't really matter the specific escapes, as long as we
- // have recorded all the appropriate classes of escapes.
- summary := MergeAll(es)
- allEscapes[name] = es
- mergedEscapes[name] = summary
- return summary
- }
-
- // Complete all local functions.
- for _, fn := range state.SrcFuncs {
- loadFunc(fn)
- }
-
- if !localEscapes {
- // Export all findings for future packages. We only do this in
- // non-local escapes mode, and expect to run this analysis
- // after the SelectAnalysis.
- pass.ExportPackageFact(&packageEscapeFacts{
- Funcs: mergedEscapes,
- })
- }
-
- // Scan all functions for violations.
- for _, f := range pass.Files {
- // Scan all declarations.
- for _, decl := range f.Decls {
- // Function declaration?
- fdecl, ok := decl.(*ast.FuncDecl)
- if !ok {
- continue
- }
- var (
- reasons []EscapeReason
- local bool
- testReasons map[EscapeReason]bool
- )
- if localEscapes {
- // Find all hard escapes.
- reasons = hardReasons
- } else {
- // Find all declared reasons.
- reasons, local, testReasons = findReasons(pass, fdecl)
- }
-
- // Scan for matches.
- fn := pass.TypesInfo.Defs[fdecl.Name].(*types.Func)
- fv := state.Pkg.Prog.FuncValue(fn)
- if fv == nil {
- continue
- }
- name := fv.RelString(pass.Pkg)
- all, allOk := allEscapes[name]
- merged, mergedOk := mergedEscapes[name]
- if !allOk || !mergedOk {
- pass.Reportf(fdecl.Pos(), "internal error: function %s not found.", name)
- continue
- }
-
- // Filter reasons and report.
- //
- // For the findings, we use all escapes.
- for _, es := range all {
- es.Filter(reasons, local)
- es.Reportf(pass)
- }
-
- // Scan for test (required) matches.
- //
- // For tests we need only the merged escapes.
- testReasonsFound := make(map[EscapeReason]bool)
- for r := EscapeReason(0); r < reasonCount; r++ {
- if merged.CallSites[r] == nil {
- continue
- }
- // Is this local?
- wantLocal, ok := testReasons[r]
- isLocal := len(merged.CallSites[r]) == 1
- testReasonsFound[r] = isLocal
- if !ok {
- continue
- }
- if isLocal == wantLocal {
- delete(testReasons, r)
- }
- }
- for reason, local := range testReasons {
- // We didn't find the escapes we wanted.
- pass.Reportf(fdecl.Pos(), fmt.Sprintf("testescapes not found: reason=%s, local=%t", reason, local))
- }
- if len(testReasons) > 0 {
- // Report for debugging.
- merged.Reportf(pass)
- }
- }
- }
-
- return nil, nil
-}
diff --git a/tools/checkescape/test1/BUILD b/tools/checkescape/test1/BUILD
deleted file mode 100644
index 783403247..000000000
--- a/tools/checkescape/test1/BUILD
+++ /dev/null
@@ -1,9 +0,0 @@
-load("//tools:defs.bzl", "go_library")
-
-package(licenses = ["notice"])
-
-go_library(
- name = "test1",
- srcs = ["test1.go"],
- visibility = ["//tools/checkescape/test2:__pkg__"],
-)
diff --git a/tools/checkescape/test1/test1.go b/tools/checkescape/test1/test1.go
deleted file mode 100644
index 27991649f..000000000
--- a/tools/checkescape/test1/test1.go
+++ /dev/null
@@ -1,181 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package test1 is a test package.
-package test1
-
-import (
- "fmt"
-)
-
-// Interface is a generic interface.
-type Interface interface {
- Foo()
-}
-
-// Type is a concrete implementation of Interface.
-type Type struct {
- A uint64
- B uint64
-}
-
-// Foo implements Interface.Foo.
-//go:nosplit
-func (t Type) Foo() {
- fmt.Printf("%v", t) // Never executed.
-}
-
-// +checkescape:all,hard
-//go:nosplit
-func InterfaceFunction(i Interface) {
- // Do nothing; exported for tests.
-}
-
-// +checkesacape:all,hard
-//go:nosplit
-func TypeFunction(t *Type) {
-}
-
-// +mustescape:local,builtin
-//go:noinline
-//go:nosplit
-func BuiltinMap(x int) map[string]bool {
- return make(map[string]bool)
-}
-
-// +mustescape:builtin
-//go:noinline
-//go:nosplit
-func builtinMapRec(x int) map[string]bool {
- return BuiltinMap(x)
-}
-
-// +temustescapestescape:local,builtin
-//go:noinline
-//go:nosplit
-func BuiltinClosure(x int) func() {
- return func() {
- fmt.Printf("%v", x)
- }
-}
-
-// +mustescape:builtin
-//go:noinline
-//go:nosplit
-func builtinClosureRec(x int) func() {
- return BuiltinClosure(x)
-}
-
-// +mustescape:local,builtin
-//go:noinline
-//go:nosplit
-func BuiltinMakeSlice(x int) []byte {
- return make([]byte, x)
-}
-
-// +mustescape:builtin
-//go:noinline
-//go:nosplit
-func builtinMakeSliceRec(x int) []byte {
- return BuiltinMakeSlice(x)
-}
-
-// +mustescape:local,builtin
-//go:noinline
-//go:nosplit
-func BuiltinAppend(x []byte) []byte {
- return append(x, 0)
-}
-
-// +mustescape:builtin
-//go:noinline
-//go:nosplit
-func builtinAppendRec() []byte {
- return BuiltinAppend(nil)
-}
-
-// +mustescape:local,builtin
-//go:noinline
-//go:nosplit
-func BuiltinChan() chan int {
- return make(chan int)
-}
-
-// +mustescape:builtin
-//go:noinline
-//go:nosplit
-func builtinChanRec() chan int {
- return BuiltinChan()
-}
-
-// +mustescape:local,heap
-//go:noinline
-//go:nosplit
-func Heap() *Type {
- var t Type
- return &t
-}
-
-// +mustescape:heap
-//go:noinline
-//go:nosplit
-func heapRec() *Type {
- return Heap()
-}
-
-// +mustescape:local,interface
-//go:noinline
-//go:nosplit
-func Dispatch(i Interface) {
- i.Foo()
-}
-
-// +mustescape:interface
-//go:noinline
-//go:nosplit
-func dispatchRec(i Interface) {
- Dispatch(i)
-}
-
-// +mustescape:local,dynamic
-//go:noinline
-//go:nosplit
-func Dynamic(f func()) {
- f()
-}
-
-// +mustescape:dynamic
-//go:noinline
-//go:nosplit
-func dynamicRec(f func()) {
- Dynamic(f)
-}
-
-//go:noinline
-//go:nosplit
-func internalFunc() {
-}
-
-// +mustescape:local,stack
-//go:noinline
-func Split() {
- internalFunc()
-}
-
-// +mustescape:stack
-//go:noinline
-//go:nosplit
-func splitRec() {
- Split()
-}
diff --git a/tools/checkescape/test2/BUILD b/tools/checkescape/test2/BUILD
deleted file mode 100644
index 5a11e4b43..000000000
--- a/tools/checkescape/test2/BUILD
+++ /dev/null
@@ -1,9 +0,0 @@
-load("//tools:defs.bzl", "go_library")
-
-package(licenses = ["notice"])
-
-go_library(
- name = "test2",
- srcs = ["test2.go"],
- deps = ["//tools/checkescape/test1"],
-)
diff --git a/tools/checkescape/test2/test2.go b/tools/checkescape/test2/test2.go
deleted file mode 100644
index 067d5a1f4..000000000
--- a/tools/checkescape/test2/test2.go
+++ /dev/null
@@ -1,89 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package test2 is a test package that imports test1.
-package test2
-
-import (
- "gvisor.dev/gvisor/tools/checkescape/test1"
-)
-
-// +checkescape:all
-//go:nosplit
-func interfaceFunctionCrossPkg() {
- var i test1.Interface
- test1.InterfaceFunction(i)
-}
-
-// +checkesacape:all
-//go:nosplit
-func typeFunctionCrossPkg() {
- var t test1.Type
- test1.TypeFunction(&t)
-}
-
-// +mustescape:builtin
-//go:noinline
-func builtinMapCrossPkg(x int) map[string]bool {
- return test1.BuiltinMap(x)
-}
-
-// +mustescape:builtin
-//go:noinline
-func builtinClosureCrossPkg(x int) func() {
- return test1.BuiltinClosure(x)
-}
-
-// +mustescape:builtin
-//go:noinline
-func builtinMakeSliceCrossPkg(x int) []byte {
- return test1.BuiltinMakeSlice(x)
-}
-
-// +mustescape:builtin
-//go:noinline
-func builtinAppendCrossPkg() []byte {
- return test1.BuiltinAppend(nil)
-}
-
-// +mustescape:builtin
-//go:noinline
-func builtinChanCrossPkg() chan int {
- return test1.BuiltinChan()
-}
-
-// +mustescape:heap
-//go:noinline
-func heapCrossPkg() *test1.Type {
- return test1.Heap()
-}
-
-// +mustescape:interface
-//go:noinline
-func dispatchCrossPkg(i test1.Interface) {
- test1.Dispatch(i)
-}
-
-// +mustescape:dynamic
-//go:noinline
-func dynamicCrossPkg(f func()) {
- test1.Dynamic(f)
-}
-
-// +mustescape:stack
-//go:noinline
-//go:nosplit
-func splitCrosssPkt() {
- test1.Split()
-}
diff --git a/tools/checkunsafe/BUILD b/tools/checkunsafe/BUILD
deleted file mode 100644
index 0c264151b..000000000
--- a/tools/checkunsafe/BUILD
+++ /dev/null
@@ -1,13 +0,0 @@
-load("//tools:defs.bzl", "go_library")
-
-package(licenses = ["notice"])
-
-go_library(
- name = "checkunsafe",
- srcs = ["check_unsafe.go"],
- nogo = False,
- visibility = ["//tools/nogo:__subpackages__"],
- deps = [
- "@org_golang_x_tools//go/analysis:go_tool_library",
- ],
-)
diff --git a/tools/checkunsafe/check_unsafe.go b/tools/checkunsafe/check_unsafe.go
deleted file mode 100644
index 4ccd7cc5a..000000000
--- a/tools/checkunsafe/check_unsafe.go
+++ /dev/null
@@ -1,56 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package checkunsafe allows unsafe imports only in files named appropriately.
-package checkunsafe
-
-import (
- "fmt"
- "path"
- "strconv"
- "strings"
-
- "golang.org/x/tools/go/analysis"
-)
-
-// Analyzer defines the entrypoint.
-var Analyzer = &analysis.Analyzer{
- Name: "checkunsafe",
- Doc: "allows unsafe use only in specified files",
- Run: run,
-}
-
-func run(pass *analysis.Pass) (interface{}, error) {
- for _, f := range pass.Files {
- for _, imp := range f.Imports {
- // Is this an unsafe import?
- pkg, err := strconv.Unquote(imp.Path.Value)
- if err != nil || pkg != "unsafe" {
- continue
- }
-
- // Extract the filename.
- filename := pass.Fset.File(imp.Pos()).Name()
-
- // Allow files named _unsafe.go or _test.go to opt out.
- if strings.HasSuffix(filename, "_unsafe.go") || strings.HasSuffix(filename, "_test.go") {
- continue
- }
-
- // Throw the error.
- pass.Reportf(imp.Pos(), fmt.Sprintf("package unsafe imported by %s; must end with _unsafe.go", path.Base(filename)))
- }
- }
- return nil, nil
-}
diff --git a/tools/defs.bzl b/tools/defs.bzl
deleted file mode 100644
index d75e40863..000000000
--- a/tools/defs.bzl
+++ /dev/null
@@ -1,316 +0,0 @@
-"""Wrappers for common build rules.
-
-These wrappers apply common BUILD configurations (e.g., proto_library
-automagically creating cc_ and go_ proto targets) and act as a single point of
-change for Google-internal and bazel-compatible rules.
-"""
-
-load("//tools/go_stateify:defs.bzl", "go_stateify")
-load("//tools/go_marshal:defs.bzl", "go_marshal", "marshal_deps", "marshal_test_deps")
-load("//tools/nogo:defs.bzl", "nogo_test")
-load("//tools/bazeldefs:defs.bzl", _build_test = "build_test", _bzl_library = "bzl_library", _coreutil = "coreutil", _default_installer = "default_installer", _default_net_util = "default_net_util", _loopback = "loopback", _proto_library = "proto_library", _rbe_platform = "rbe_platform", _rbe_toolchain = "rbe_toolchain", _select_arch = "select_arch", _select_system = "select_system", _short_path = "short_path")
-load("//tools/bazeldefs:cc.bzl", _cc_binary = "cc_binary", _cc_flags_supplier = "cc_flags_supplier", _cc_grpc_library = "cc_grpc_library", _cc_library = "cc_library", _cc_proto_library = "cc_proto_library", _cc_test = "cc_test", _cc_toolchain = "cc_toolchain", _gbenchmark = "gbenchmark", _grpcpp = "grpcpp", _gtest = "gtest", _vdso_linker_option = "vdso_linker_option")
-load("//tools/bazeldefs:go.bzl", _gazelle = "gazelle", _go_binary = "go_binary", _go_embed_data = "go_embed_data", _go_grpc_and_proto_libraries = "go_grpc_and_proto_libraries", _go_library = "go_library", _go_path = "go_path", _go_proto_library = "go_proto_library", _go_test = "go_test", _select_goarch = "select_goarch", _select_goos = "select_goos")
-load("//tools/bazeldefs:pkg.bzl", _pkg_deb = "pkg_deb", _pkg_tar = "pkg_tar")
-load("//tools/bazeldefs:platforms.bzl", _default_platform = "default_platform", _platforms = "platforms")
-load("//tools/bazeldefs:tags.bzl", "go_suffixes")
-
-# Core rules.
-build_test = _build_test
-bzl_library = _bzl_library
-default_installer = _default_installer
-default_net_util = _default_net_util
-loopback = _loopback
-select_arch = _select_arch
-select_system = _select_system
-short_path = _short_path
-rbe_platform = _rbe_platform
-rbe_toolchain = _rbe_toolchain
-coreutil = _coreutil
-
-# C++ rules.
-cc_binary = _cc_binary
-cc_flags_supplier = _cc_flags_supplier
-cc_grpc_library = _cc_grpc_library
-cc_library = _cc_library
-cc_test = _cc_test
-cc_toolchain = _cc_toolchain
-gbenchmark = _gbenchmark
-gtest = _gtest
-grpcpp = _grpcpp
-vdso_linker_option = _vdso_linker_option
-
-# Go rules.
-gazelle = _gazelle
-go_embed_data = _go_embed_data
-go_path = _go_path
-select_goos = _select_goos
-select_goarch = _select_goarch
-
-# Packaging rules.
-pkg_deb = _pkg_deb
-pkg_tar = _pkg_tar
-
-# Platform options.
-default_platform = _default_platform
-platforms = _platforms
-
-def go_binary(name, nogo = True, pure = False, static = False, x_defs = None, **kwargs):
- """Wraps the standard go_binary.
-
- Args:
- name: the rule name.
- nogo: enable nogo analysis.
- pure: build a pure Go (no CGo) binary.
- static: build a static binary.
- x_defs: additional linker definitions.
- **kwargs: standard go_binary arguments.
- """
- _go_binary(
- name = name,
- pure = pure,
- static = static,
- x_defs = x_defs,
- **kwargs
- )
- if nogo:
- # Note that the nogo rule applies only for go_library and go_test
- # targets, therefore we construct a library from the binary sources.
- # This is done because the binary may not be in a form that objdump
- # supports (i.e. a pure Go binary).
- _go_library(
- name = name + "_nogo_library",
- srcs = kwargs.get("srcs", []),
- deps = kwargs.get("deps", []),
- testonly = 1,
- )
- nogo_test(
- name = name + "_nogo",
- config = "//:nogo_config",
- srcs = kwargs.get("srcs", []),
- deps = [":" + name + "_nogo_library"],
- tags = ["nogo"],
- )
-
-def calculate_sets(srcs):
- """Calculates special Go sets for templates.
-
- Args:
- srcs: the full set of Go sources.
-
- Returns:
- A dictionary of the form:
-
- "": [src1.go, src2.go]
- "suffix": [src3suffix.go, src4suffix.go]
-
- Note that suffix will typically start with '_'.
- """
- result = dict()
- for file in srcs:
- if not file.endswith(".go"):
- continue
- target = ""
- for suffix in go_suffixes:
- if file.endswith(suffix + ".go"):
- target = suffix
- if not target in result:
- result[target] = [file]
- else:
- result[target].append(file)
- return result
-
-def go_imports(name, src, out):
- """Simplify a single Go source file by eliminating unused imports."""
- native.genrule(
- name = name,
- srcs = [src],
- outs = [out],
- tools = ["@org_golang_x_tools//cmd/goimports:goimports"],
- cmd = ("$(location @org_golang_x_tools//cmd/goimports:goimports) $(SRCS) > $@"),
- )
-
-def go_library(name, srcs, deps = [], imports = [], stateify = True, marshal = False, marshal_debug = False, nogo = True, **kwargs):
- """Wraps the standard go_library and does stateification and marshalling.
-
- The recommended way is to use this rule with mostly identical configuration as the native
- go_library rule.
-
- These definitions provide additional flags (stateify, marshal) that can be used
- with the generators to automatically supplement the library code.
-
- load("//tools:defs.bzl", "go_library")
-
- go_library(
- name = "foo",
- srcs = ["foo.go"],
- )
-
- Args:
- name: the rule name.
- srcs: the library sources.
- deps: the library dependencies.
- imports: imports required for stateify.
- stateify: whether statify is enabled (default: true).
- marshal: whether marshal is enabled (default: false).
- marshal_debug: whether the gomarshal tools emits debugging output (default: false).
- nogo: enable nogo analysis.
- **kwargs: standard go_library arguments.
- """
- all_srcs = srcs
- all_deps = deps
- dirname, _, _ = native.package_name().rpartition("/")
- full_pkg = dirname + "/" + name
- if stateify:
- # Only do stateification for non-state packages without manual autogen.
- # First, we need to segregate the input files via the special suffixes,
- # and calculate the final output set.
- state_sets = calculate_sets(srcs)
- for (suffix, src_subset) in state_sets.items():
- go_stateify(
- name = name + suffix + "_state_autogen_with_imports",
- srcs = src_subset,
- imports = imports,
- package = full_pkg,
- out = name + suffix + "_state_autogen_with_imports.go",
- )
- go_imports(
- name = name + suffix + "_state_autogen",
- src = name + suffix + "_state_autogen_with_imports.go",
- out = name + suffix + "_state_autogen.go",
- )
- all_srcs = all_srcs + [
- name + suffix + "_state_autogen.go"
- for suffix in state_sets.keys()
- ]
- if "//pkg/state" not in all_deps:
- all_deps = all_deps + ["//pkg/state"]
-
- if marshal:
- # See above.
- marshal_sets = calculate_sets(srcs)
- for (suffix, src_subset) in marshal_sets.items():
- go_marshal(
- name = name + suffix + "_abi_autogen",
- srcs = src_subset,
- debug = select({
- "//tools/go_marshal:marshal_config_verbose": True,
- "//conditions:default": marshal_debug,
- }),
- imports = imports,
- package = name,
- )
- extra_deps = [
- dep
- for dep in marshal_deps
- if not dep in all_deps
- ]
- all_deps = all_deps + extra_deps
- all_srcs = all_srcs + [
- name + suffix + "_abi_autogen_unsafe.go"
- for suffix in marshal_sets.keys()
- ]
-
- _go_library(
- name = name,
- srcs = all_srcs,
- deps = all_deps,
- **kwargs
- )
- if nogo:
- nogo_test(
- name = name + "_nogo",
- config = "//:nogo_config",
- srcs = all_srcs,
- deps = [":" + name],
- tags = ["nogo"],
- )
-
- if marshal:
- # Ignore importpath for go_test.
- kwargs.pop("importpath", None)
-
- # See above.
- marshal_sets = calculate_sets(srcs)
- for (suffix, _) in marshal_sets.items():
- _go_test(
- name = name + suffix + "_abi_autogen_test",
- srcs = [
- name + suffix + "_abi_autogen_test.go",
- name + suffix + "_abi_autogen_unconditional_test.go",
- ],
- library = ":" + name,
- deps = marshal_test_deps,
- **kwargs
- )
-
-def go_test(name, nogo = True, **kwargs):
- """Wraps the standard go_test.
-
- Args:
- name: the rule name.
- nogo: enable nogo analysis.
- **kwargs: standard go_test arguments.
- """
- _go_test(
- name = name,
- **kwargs
- )
- if nogo:
- nogo_test(
- name = name + "_nogo",
- config = "//:nogo_config",
- srcs = kwargs.get("srcs", []),
- deps = [":" + name],
- tags = ["nogo"],
- )
-
-def proto_library(name, srcs, deps = None, has_services = 0, **kwargs):
- """Wraps the standard proto_library.
-
- Given a proto_library named "foo", this produces up to five different
- targets:
- - foo_proto: proto_library rule.
- - foo_go_proto: go_proto_library rule.
- - foo_cc_proto: cc_proto_library rule.
- - foo_go_grpc_proto: go_grpc_library rule.
- - foo_cc_grpc_proto: cc_grpc_library rule.
-
- Args:
- name: the name to which _proto, _go_proto, etc, will be appended.
- srcs: the proto sources.
- deps: for the proto library and the go_proto_library.
- has_services: 1 to build gRPC code, otherwise 0.
- **kwargs: standard proto_library arguments.
- """
- _proto_library(
- name = name + "_proto",
- srcs = srcs,
- deps = deps,
- has_services = has_services,
- **kwargs
- )
- if has_services:
- _go_grpc_and_proto_libraries(
- name = name,
- deps = deps,
- **kwargs
- )
- else:
- _go_proto_library(
- name = name,
- deps = deps,
- **kwargs
- )
- _cc_proto_library(
- name = name + "_cc_proto",
- deps = [":" + name + "_proto"],
- **kwargs
- )
- if has_services:
- _cc_grpc_library(
- name = name + "_cc_grpc_proto",
- srcs = [":" + name + "_proto"],
- deps = [":" + name + "_cc_proto"],
- **kwargs
- )
diff --git a/tools/github/BUILD b/tools/github/BUILD
deleted file mode 100644
index aad088d13..000000000
--- a/tools/github/BUILD
+++ /dev/null
@@ -1,15 +0,0 @@
-load("//tools:defs.bzl", "go_binary")
-
-package(licenses = ["notice"])
-
-go_binary(
- name = "github",
- srcs = ["main.go"],
- nogo = False,
- deps = [
- "//tools/github/nogo",
- "//tools/github/reviver",
- "@com_github_google_go_github_v28//github:go_default_library",
- "@org_golang_x_oauth2//:go_default_library",
- ],
-)
diff --git a/tools/github/main.go b/tools/github/main.go
deleted file mode 100644
index 681003eef..000000000
--- a/tools/github/main.go
+++ /dev/null
@@ -1,186 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Binary github is the entry point for GitHub utilities.
-package main
-
-import (
- "context"
- "flag"
- "fmt"
- "io/ioutil"
- "log"
- "os"
- "os/exec"
- "strings"
-
- "github.com/google/go-github/github"
- "golang.org/x/oauth2"
- "gvisor.dev/gvisor/tools/github/nogo"
- "gvisor.dev/gvisor/tools/github/reviver"
-)
-
-var (
- owner string
- repo string
- tokenFile string
- paths stringList
- commit string
- dryRun bool
-)
-
-type stringList []string
-
-func (s *stringList) String() string {
- return strings.Join(*s, ",")
-}
-
-func (s *stringList) Set(value string) error {
- *s = append(*s, value)
- return nil
-}
-
-// Keep the options simple for now. Supports only a single path and repo.
-func init() {
- flag.StringVar(&owner, "owner", "", "GitHub project org/owner (required, except nogo dry-run)")
- flag.StringVar(&repo, "repo", "", "GitHub repo (required, except nogo dry-run)")
- flag.StringVar(&tokenFile, "oauth-token-file", "", "file containing the GitHub token (or GITHUB_TOKEN is set)")
- flag.Var(&paths, "path", "path(s) to scan (required for revive and nogo)")
- flag.StringVar(&commit, "commit", "", "commit to associated (required for nogo, except dry-run)")
- flag.BoolVar(&dryRun, "dry-run", false, "just print changes to be made")
-}
-
-func filterPaths(paths []string) (existing []string) {
- for _, path := range paths {
- if _, err := os.Stat(path); err != nil {
- log.Printf("WARNING: skipping %v: %v", path, err)
- continue
- }
- existing = append(existing, path)
- }
- return
-}
-
-func main() {
- // Set defaults from the environment.
- repository := os.Getenv("GITHUB_REPOSITORY")
- if parts := strings.SplitN(repository, "/", 2); len(parts) == 2 {
- owner = parts[0]
- repo = parts[1]
- }
-
- // Parse flags.
- flag.Usage = func() {
- fmt.Fprintf(flag.CommandLine.Output(), "usage: %s [options] <command>\n", os.Args[0])
- fmt.Fprintf(flag.CommandLine.Output(), "commands: revive, nogo\n")
- flag.PrintDefaults()
- }
- flag.Parse()
- args := flag.Args()
- if len(args) != 1 {
- fmt.Fprintf(flag.CommandLine.Output(), "extra arguments: %s\n", strings.Join(args[1:], ", "))
- flag.Usage()
- os.Exit(1)
- }
-
- // Check for mandatory parameters.
- command := args[0]
- if len(owner) == 0 && (command != "nogo" || !dryRun) {
- fmt.Fprintln(flag.CommandLine.Output(), "missing --owner option.")
- flag.Usage()
- os.Exit(1)
- }
- if len(repo) == 0 && (command != "nogo" || !dryRun) {
- fmt.Fprintln(flag.CommandLine.Output(), "missing --repo option.")
- flag.Usage()
- os.Exit(1)
- }
- filteredPaths := filterPaths(paths)
- if len(filteredPaths) == 0 {
- fmt.Fprintln(flag.CommandLine.Output(), "no valid --path options provided.")
- flag.Usage()
- os.Exit(1)
- }
-
- // The access token may be passed as a file so it doesn't show up in
- // command line arguments. It also may be provided through the
- // environment to faciliate use through GitHub's CI system.
- token := os.Getenv("GITHUB_TOKEN")
- if len(tokenFile) != 0 {
- bytes, err := ioutil.ReadFile(tokenFile)
- if err != nil {
- fmt.Println(err.Error())
- os.Exit(1)
- }
- token = string(bytes)
- }
- var client *github.Client
- if len(token) == 0 {
- // Client is unauthenticated.
- client = github.NewClient(nil)
- } else {
- // Using the above token.
- ts := oauth2.StaticTokenSource(
- &oauth2.Token{AccessToken: token},
- )
- tc := oauth2.NewClient(context.Background(), ts)
- client = github.NewClient(tc)
- }
-
- switch command {
- case "revive":
- // Load existing GitHub bugs.
- bugger, err := reviver.NewGitHubBugger(client, owner, repo, dryRun)
- if err != nil {
- fmt.Fprintf(os.Stderr, "Error getting github issues: %v\n", err)
- os.Exit(1)
- }
- // Scan the provided path.
- rev := reviver.New(filteredPaths, []reviver.Bugger{bugger})
- if errs := rev.Run(); len(errs) > 0 {
- fmt.Fprintf(os.Stderr, "Encountered %d errors:\n", len(errs))
- for _, err := range errs {
- fmt.Fprintf(os.Stderr, "\t%v\n", err)
- }
- os.Exit(1)
- }
- case "nogo":
- // Did we get a commit? Try to extract one.
- if len(commit) == 0 && !dryRun {
- cmd := exec.Command("git", "rev-parse", "HEAD")
- revBytes, err := cmd.Output()
- if err != nil {
- fmt.Fprintf(flag.CommandLine.Output(), "missing --commit option, unable to infer: %v\n", err)
- flag.Usage()
- os.Exit(1)
- }
- commit = strings.TrimSpace(string(revBytes))
- }
- // Scan all findings.
- poster := nogo.NewFindingsPoster(client, owner, repo, commit, dryRun)
- if err := poster.Walk(filteredPaths); err != nil {
- fmt.Fprintln(os.Stderr, "Error finding nogo findings:", err)
- os.Exit(1)
- }
- // Post to GitHub.
- if err := poster.Post(); err != nil {
- fmt.Fprintln(os.Stderr, "Error posting nogo findings:", err)
- }
- default:
- // Not a known command.
- fmt.Fprintf(flag.CommandLine.Output(), "unknown command: %s\n", command)
- flag.Usage()
- os.Exit(1)
- }
-}
diff --git a/tools/github/nogo/BUILD b/tools/github/nogo/BUILD
deleted file mode 100644
index 19b7eec4d..000000000
--- a/tools/github/nogo/BUILD
+++ /dev/null
@@ -1,16 +0,0 @@
-load("//tools:defs.bzl", "go_library")
-
-package(licenses = ["notice"])
-
-go_library(
- name = "nogo",
- srcs = ["nogo.go"],
- nogo = False,
- visibility = [
- "//tools/github:__subpackages__",
- ],
- deps = [
- "//tools/nogo",
- "@com_github_google_go_github_v28//github:go_default_library",
- ],
-)
diff --git a/tools/github/nogo/nogo.go b/tools/github/nogo/nogo.go
deleted file mode 100644
index 27ab1b8eb..000000000
--- a/tools/github/nogo/nogo.go
+++ /dev/null
@@ -1,132 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package nogo provides nogo-related utilities.
-package nogo
-
-import (
- "context"
- "fmt"
- "os"
- "path/filepath"
- "strings"
- "time"
-
- "github.com/google/go-github/github"
- "gvisor.dev/gvisor/tools/nogo"
-)
-
-// FindingsPoster is a simple wrapper around the GitHub api.
-type FindingsPoster struct {
- owner string
- repo string
- commit string
- dryRun bool
- startTime time.Time
-
- findings map[nogo.Finding]struct{}
- client *github.Client
-}
-
-// NewFindingsPoster returns a object that can post findings.
-func NewFindingsPoster(client *github.Client, owner, repo, commit string, dryRun bool) *FindingsPoster {
- return &FindingsPoster{
- owner: owner,
- repo: repo,
- commit: commit,
- dryRun: dryRun,
- startTime: time.Now(),
- findings: make(map[nogo.Finding]struct{}),
- client: client,
- }
-}
-
-// Walk walks the given path tree for findings files.
-func (p *FindingsPoster) Walk(paths []string) error {
- for _, path := range paths {
- if err := filepath.Walk(path, func(filename string, info os.FileInfo, err error) error {
- if err != nil {
- return err
- }
- // Skip any directories or files not ending in .findings.
- if !strings.HasSuffix(filename, ".findings") || info.IsDir() {
- return nil
- }
- findings, err := nogo.ExtractFindingsFromFile(filename)
- if err != nil {
- return err
- }
- // Add all findings to the list. We use a map to ensure
- // that each finding is unique.
- for _, finding := range findings {
- p.findings[finding] = struct{}{}
- }
- return nil
- }); err != nil {
- return err
- }
- }
- return nil
-}
-
-// Post posts all results to the GitHub API as a check run.
-func (p *FindingsPoster) Post() error {
- // Just show results?
- if p.dryRun {
- for finding, _ := range p.findings {
- // Pretty print, so that this is useful for debugging.
- fmt.Printf("%s: (%s+%d) %s\n", finding.Category, finding.Position.Filename, finding.Position.Line, finding.Message)
- }
- return nil
- }
-
- // Construct the message.
- title := "nogo"
- count := len(p.findings)
- status := "completed"
- conclusion := "success"
- if count > 0 {
- conclusion = "failure" // Contains errors.
- }
- summary := fmt.Sprintf("%d findings.", count)
- opts := github.CreateCheckRunOptions{
- Name: title,
- HeadSHA: p.commit,
- Status: &status,
- Conclusion: &conclusion,
- StartedAt: &github.Timestamp{p.startTime},
- CompletedAt: &github.Timestamp{time.Now()},
- Output: &github.CheckRunOutput{
- Title: &title,
- Summary: &summary,
- AnnotationsCount: &count,
- },
- }
- annotationLevel := "failure" // Always.
- for finding, _ := range p.findings {
- title := string(finding.Category)
- opts.Output.Annotations = append(opts.Output.Annotations, &github.CheckRunAnnotation{
- Path: &finding.Position.Filename,
- StartLine: &finding.Position.Line,
- EndLine: &finding.Position.Line,
- Message: &finding.Message,
- Title: &title,
- AnnotationLevel: &annotationLevel,
- })
- }
-
- // Post to GitHub.
- _, _, err := p.client.Checks.CreateCheckRun(context.Background(), p.owner, p.repo, opts)
- return err
-}
diff --git a/tools/github/reviver/BUILD b/tools/github/reviver/BUILD
deleted file mode 100644
index 7d78480a7..000000000
--- a/tools/github/reviver/BUILD
+++ /dev/null
@@ -1,27 +0,0 @@
-load("//tools:defs.bzl", "go_library", "go_test")
-
-package(licenses = ["notice"])
-
-go_library(
- name = "reviver",
- srcs = [
- "github.go",
- "reviver.go",
- ],
- nogo = False,
- visibility = [
- "//tools/github:__subpackages__",
- ],
- deps = ["@com_github_google_go_github_v28//github:go_default_library"],
-)
-
-go_test(
- name = "reviver_test",
- size = "small",
- srcs = [
- "github_test.go",
- "reviver_test.go",
- ],
- library = ":reviver",
- nogo = False,
-)
diff --git a/tools/github/reviver/github.go b/tools/github/reviver/github.go
deleted file mode 100644
index c4b624f2a..000000000
--- a/tools/github/reviver/github.go
+++ /dev/null
@@ -1,173 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package reviver
-
-import (
- "context"
- "fmt"
- "strconv"
- "strings"
- "time"
-
- "github.com/google/go-github/github"
-)
-
-// GitHubBugger implements Bugger interface for github issues.
-type GitHubBugger struct {
- owner string
- repo string
- dryRun bool
-
- client *github.Client
- issues map[int]*github.Issue
-}
-
-// NewGitHubBugger creates a new GitHubBugger.
-func NewGitHubBugger(client *github.Client, owner, repo string, dryRun bool) (*GitHubBugger, error) {
- b := &GitHubBugger{
- owner: owner,
- repo: repo,
- dryRun: dryRun,
- issues: map[int]*github.Issue{},
- client: client,
- }
- if err := b.load(); err != nil {
- return nil, err
- }
- return b, nil
-}
-
-func (b *GitHubBugger) load() error {
- err := processAllPages(func(listOpts github.ListOptions) (*github.Response, error) {
- opts := &github.IssueListByRepoOptions{State: "open", ListOptions: listOpts}
- tmps, resp, err := b.client.Issues.ListByRepo(context.Background(), b.owner, b.repo, opts)
- if err != nil {
- return resp, err
- }
- for _, issue := range tmps {
- b.issues[issue.GetNumber()] = issue
- }
- return resp, nil
- })
- if err != nil {
- return err
- }
-
- fmt.Printf("Loaded %d issues from github.com/%s/%s\n", len(b.issues), b.owner, b.repo)
- return nil
-}
-
-// Activate implements Bugger.Activate.
-func (b *GitHubBugger) Activate(todo *Todo) (bool, error) {
- id, err := parseIssueNo(todo.Issue)
- if err != nil {
- return true, err
- }
- if id <= 0 {
- return false, nil
- }
-
- // Check against active issues cache.
- if _, ok := b.issues[id]; ok {
- fmt.Printf("%q is active: OK\n", todo.Issue)
- return true, nil
- }
-
- fmt.Printf("%q is not active: reopening issue %d\n", todo.Issue, id)
-
- // Format comment with TODO locations and search link.
- comment := strings.Builder{}
- fmt.Fprintln(&comment, "There are TODOs still referencing this issue:")
- for _, l := range todo.Locations {
- fmt.Fprintf(&comment,
- "1. [%s:%d](https://github.com/%s/%s/blob/HEAD/%s#%d): %s\n",
- l.File, l.Line, b.owner, b.repo, l.File, l.Line, l.Comment)
- }
- fmt.Fprintf(&comment,
- "\n\nSearch [TODO](https://github.com/%s/%s/search?q=%%22%s%%22)", b.owner, b.repo, todo.Issue)
-
- if b.dryRun {
- fmt.Printf("[dry-run: skipping change to issue %d]\n%s\n=======================\n", id, comment.String())
- return true, nil
- }
-
- ctx := context.Background()
- req := &github.IssueRequest{State: github.String("open")}
- _, _, err = b.client.Issues.Edit(ctx, b.owner, b.repo, id, req)
- if err != nil {
- return true, fmt.Errorf("failed to reactivate issue %d: %v", id, err)
- }
-
- cmt := &github.IssueComment{
- Body: github.String(comment.String()),
- Reactions: &github.Reactions{Confused: github.Int(1)},
- }
- if _, _, err := b.client.Issues.CreateComment(ctx, b.owner, b.repo, id, cmt); err != nil {
- return true, fmt.Errorf("failed to add comment to issue %d: %v", id, err)
- }
-
- return true, nil
-}
-
-var issuePrefixes = []string{
- "gvisor.dev/issue/",
- "gvisor.dev/issues/",
-}
-
-// parseIssueNo parses the issue number out of the issue url.
-//
-// 0 is returned if url does not correspond to an issue.
-func parseIssueNo(url string) (int, error) {
- // First check if I can handle the TODO.
- var idStr string
- for _, p := range issuePrefixes {
- if str := strings.TrimPrefix(url, p); len(str) < len(url) {
- idStr = str
- break
- }
- }
- if len(idStr) == 0 {
- return 0, nil
- }
-
- id, err := strconv.ParseInt(strings.TrimRight(idStr, "/"), 10, 64)
- if err != nil {
- return 0, err
- }
- return int(id), nil
-}
-
-func processAllPages(fn func(github.ListOptions) (*github.Response, error)) error {
- opts := github.ListOptions{PerPage: 1000}
- for {
- resp, err := fn(opts)
- if err != nil {
- if rateErr, ok := err.(*github.RateLimitError); ok {
- duration := rateErr.Rate.Reset.Sub(time.Now())
- if duration > 5*time.Minute {
- return fmt.Errorf("Rate limited for too long: %v", duration)
- }
- fmt.Printf("Rate limited, sleeping for: %v\n", duration)
- time.Sleep(duration)
- continue
- }
- return err
- }
- if resp.NextPage == 0 {
- return nil
- }
- opts.Page = resp.NextPage
- }
-}
diff --git a/tools/github/reviver/github_test.go b/tools/github/reviver/github_test.go
deleted file mode 100644
index 5df7e3624..000000000
--- a/tools/github/reviver/github_test.go
+++ /dev/null
@@ -1,55 +0,0 @@
-// Copyright 2020 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package reviver
-
-import (
- "testing"
-)
-
-func TestParseIssueNo(t *testing.T) {
- testCases := []struct {
- issue string
- expectErr bool
- expected int
- }{
- {
- issue: "gvisor.dev/issue/123",
- expected: 123,
- },
- {
- issue: "gvisor.dev/issue/123/",
- expected: 123,
- },
- {
- issue: "not a url",
- expected: 0,
- },
- {
- issue: "gvisor.dev/issue//",
- expectErr: true,
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.issue, func(t *testing.T) {
- id, err := parseIssueNo(tc.issue)
- if err != nil && !tc.expectErr {
- t.Errorf("got error: %v", err)
- } else if tc.expected != id {
- t.Errorf("got: %v, want: %v", id, tc.expected)
- }
- })
- }
-}
diff --git a/tools/github/reviver/reviver.go b/tools/github/reviver/reviver.go
deleted file mode 100644
index 2af7f0d59..000000000
--- a/tools/github/reviver/reviver.go
+++ /dev/null
@@ -1,192 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package reviver scans the code looking for TODOs and pass them to registered
-// Buggers to ensure TODOs point to active issues.
-package reviver
-
-import (
- "bufio"
- "fmt"
- "io/ioutil"
- "os"
- "path/filepath"
- "regexp"
- "sync"
-)
-
-// regexTodo matches a TODO or FIXME comment.
-var regexTodo = regexp.MustCompile(`(\/\/|#)\s*(TODO|FIXME)\(([a-zA-Z0-9.\/]+)\):\s*(.+)`)
-
-// Bugger interface is called for every TODO found in the code. If it can handle
-// the TODO, it must return true. If it returns false, the next Bugger is
-// called. If no Bugger handles the TODO, it's dropped on the floor.
-type Bugger interface {
- Activate(todo *Todo) (bool, error)
-}
-
-// Location saves the location where the TODO was found.
-type Location struct {
- Comment string
- File string
- Line uint
-}
-
-// Todo represents a unique TODO. There can be several TODOs pointing to the
-// same issue in the code. They are all grouped together.
-type Todo struct {
- Issue string
- Locations []Location
-}
-
-// Reviver scans the given paths for TODOs and calls Buggers to handle them.
-type Reviver struct {
- paths []string
- buggers []Bugger
-
- mu sync.Mutex
- todos map[string]*Todo
- errs []error
-}
-
-// New create a new Reviver.
-func New(paths []string, buggers []Bugger) *Reviver {
- return &Reviver{
- paths: paths,
- buggers: buggers,
- todos: map[string]*Todo{},
- }
-}
-
-// Run runs. It returns all errors found during processing, it doesn't stop
-// on errors.
-func (r *Reviver) Run() []error {
- // Process each directory in parallel.
- wg := sync.WaitGroup{}
- for _, path := range r.paths {
- wg.Add(1)
- go func(path string) {
- defer wg.Done()
- r.processPath(path, &wg)
- }(path)
- }
-
- wg.Wait()
-
- r.mu.Lock()
- defer r.mu.Unlock()
-
- fmt.Printf("Processing %d TODOs (%d errors)...\n", len(r.todos), len(r.errs))
- dropped := 0
- for _, todo := range r.todos {
- ok, err := r.processTodo(todo)
- if err != nil {
- r.errs = append(r.errs, err)
- }
- if !ok {
- dropped++
- }
- }
- fmt.Printf("Processed %d TODOs, %d were skipped (%d errors)\n", len(r.todos)-dropped, dropped, len(r.errs))
-
- return r.errs
-}
-
-func (r *Reviver) processPath(path string, wg *sync.WaitGroup) {
- fmt.Printf("Processing dir %q\n", path)
- fis, err := ioutil.ReadDir(path)
- if err != nil {
- r.addErr(fmt.Errorf("error processing dir %q: %v", path, err))
- return
- }
-
- for _, fi := range fis {
- childPath := filepath.Join(path, fi.Name())
- switch {
- case fi.Mode().IsDir():
- wg.Add(1)
- go func() {
- defer wg.Done()
- r.processPath(childPath, wg)
- }()
-
- case fi.Mode().IsRegular():
- file, err := os.Open(childPath)
- if err != nil {
- r.addErr(err)
- continue
- }
-
- scanner := bufio.NewScanner(file)
- lineno := uint(0)
- for scanner.Scan() {
- lineno++
- line := scanner.Text()
- if todo := r.processLine(line, childPath, lineno); todo != nil {
- r.addTodo(todo)
- }
- }
- }
- }
-}
-
-func (r *Reviver) processLine(line, path string, lineno uint) *Todo {
- matches := regexTodo.FindStringSubmatch(line)
- if matches == nil {
- return nil
- }
- if len(matches) != 5 {
- panic(fmt.Sprintf("regex returned wrong matches for %q: %v", line, matches))
- }
- return &Todo{
- Issue: matches[3],
- Locations: []Location{
- {
- File: path,
- Line: lineno,
- Comment: matches[4],
- },
- },
- }
-}
-
-func (r *Reviver) addTodo(newTodo *Todo) {
- r.mu.Lock()
- defer r.mu.Unlock()
-
- if todo := r.todos[newTodo.Issue]; todo == nil {
- r.todos[newTodo.Issue] = newTodo
- } else {
- todo.Locations = append(todo.Locations, newTodo.Locations...)
- }
-}
-
-func (r *Reviver) addErr(err error) {
- r.mu.Lock()
- defer r.mu.Unlock()
- r.errs = append(r.errs, err)
-}
-
-func (r *Reviver) processTodo(todo *Todo) (bool, error) {
- for _, bugger := range r.buggers {
- ok, err := bugger.Activate(todo)
- if err != nil {
- return false, err
- }
- if ok {
- return true, nil
- }
- }
- return false, nil
-}
diff --git a/tools/github/reviver/reviver_test.go b/tools/github/reviver/reviver_test.go
deleted file mode 100644
index 851306c9d..000000000
--- a/tools/github/reviver/reviver_test.go
+++ /dev/null
@@ -1,97 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package reviver
-
-import (
- "testing"
-)
-
-func TestProcessLine(t *testing.T) {
- for _, tc := range []struct {
- line string
- want *Todo
- }{
- {
- line: "// TODO(foobar.com/issue/123): comment, bla. blabla.",
- want: &Todo{
- Issue: "foobar.com/issue/123",
- Locations: []Location{
- {Comment: "comment, bla. blabla."},
- },
- },
- },
- {
- line: "// TODO(foobar.com/issues/123): comment, bla. blabla.",
- want: &Todo{
- Issue: "foobar.com/issues/123",
- Locations: []Location{
- {Comment: "comment, bla. blabla."},
- },
- },
- },
- {
- line: "// FIXME(b/123): internal bug",
- want: &Todo{
- Issue: "b/123",
- Locations: []Location{
- {Comment: "internal bug"},
- },
- },
- },
- {
- line: "TODO(issue): not todo",
- },
- {
- line: "FIXME(issue): not todo",
- },
- {
- line: "// TODO (issue): not todo",
- },
- {
- line: "// TODO(issue) not todo",
- },
- {
- line: "// todo(issue): not todo",
- },
- {
- line: "// TODO(issue):",
- },
- } {
- t.Logf("Testing: %s", tc.line)
- r := Reviver{}
- got := r.processLine(tc.line, "test", 0)
- if got == nil {
- if tc.want != nil {
- t.Errorf("failed to process line, want: %+v", tc.want)
- }
- } else {
- if tc.want == nil {
- t.Errorf("expected error, got: %+v", got)
- continue
- }
- if got.Issue != tc.want.Issue {
- t.Errorf("wrong issue, got: %v, want: %v", got.Issue, tc.want.Issue)
- }
- if len(got.Locations) != len(tc.want.Locations) {
- t.Errorf("wrong number of locations, got: %v, want: %v, locations: %+v", len(got.Locations), len(tc.want.Locations), got.Locations)
- }
- for i, wantLoc := range tc.want.Locations {
- if got.Locations[i].Comment != wantLoc.Comment {
- t.Errorf("wrong comment, got: %v, want: %v", got.Locations[i].Comment, wantLoc.Comment)
- }
- }
- }
- }
-}
diff --git a/tools/go_branch.sh b/tools/go_branch.sh
deleted file mode 100755
index 71d036b12..000000000
--- a/tools/go_branch.sh
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/bin/bash
-
-# Copyright 2019 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xeou pipefail
-
-# Discovery the package name from the go.mod file.
-declare module origpwd othersrc
-module=$(cat go.mod | grep -E "^module" | cut -d' ' -f2)
-origpwd=$(pwd)
-othersrc=("go.mod" "go.sum" "AUTHORS" "LICENSE")
-readonly module origpwd othersrc
-
-
-# Check that gopath has been built.
-declare gopath_dir
-gopath_dir="$(pwd)/bazel-bin/gopath/src/${module}"
-readonly gopath_dir
-if ! [[ -d "${gopath_dir}" ]]; then
- echo "No gopath directory found; build the :gopath target." >&2
- exit 1
-fi
-
-# Create a temporary working directory, and ensure that this directory and all
-# subdirectories are cleaned up upon exit.
-declare tmp_dir
-tmp_dir=$(mktemp -d)
-readonly tmp_dir
-finish() {
- cd # Leave tmp_dir.
- rm -rf "${tmp_dir}"
-}
-trap finish EXIT
-
-# Record the current working commit.
-declare head
-head=$(git describe --always)
-readonly head
-
-# We expect to have an existing go branch that we will use as the basis for this
-# commit. That branch may be empty, but it must exist. We search for this branch
-# using the local branch, the "origin" branch, and other remotes, in order.
-git fetch --all
-declare go_branch
-go_branch=$( \
- git show-ref --hash refs/heads/go || \
- git show-ref --hash refs/remotes/origin/go || \
- git show-ref --hash go | head -n 1 \
-)
-readonly go_branch
-
-# Clone the current repository to the temporary directory, and check out the
-# current go_branch directory. We move to the new repository for convenience.
-declare repo_orig
-repo_orig="$(pwd)"
-readonly repo_orig
-declare -r repo_new="${tmp_dir}/repository"
-git clone . "${repo_new}"
-cd "${repo_new}"
-
-# Setup the repository and checkout the branch.
-git config user.email "gvisor-bot@google.com"
-git config user.name "gVisor bot"
-git fetch origin "${go_branch}"
-git checkout -b go "${go_branch}"
-
-# Start working on a merge commit that combines the previous history with the
-# current history. Note that we don't actually want any changes yet.
-#
-# N.B. The git behavior changed at some point and the relevant flag was added
-# to allow for override, so try the only behavior first then pass the flag.
-git merge --no-commit --strategy ours "${head}" || \
- git merge --allow-unrelated-histories --no-commit --strategy ours "${head}"
-
-# Normalize the permissions on the old branch. Note that they should be
-# normalized if constructed by this tool, but we do so before the rsync.
-find . -type f -exec chmod 0644 {} \;
-find . -type d -exec chmod 0755 {} \;
-
-# Sync the entire gopath_dir.
-rsync --recursive --verbose --delete --exclude .git -L "${gopath_dir}/" .
-
-# Add additional files.
-for file in "${othersrc[@]}"; do
- cp "${origpwd}"/"${file}" .
-done
-
-# Construct a new README.md.
-cat > README.md <<EOF
-# gVisor
-
-This branch is a synthetic branch, containing only Go sources, that is
-compatible with standard Go tools. See the master branch for authoritative
-sources and tests.
-EOF
-
-# There are a few solitary files that can get left behind due to the way bazel
-# constructs the gopath target. Note that we don't find all Go files here
-# because they may correspond to unused templates, etc.
-declare -ar binaries=( "runsc" "shim/v1" "shim/v2" "webhook" )
-for target in "${binaries[@]}"; do
- mkdir -p "${target}"
- cp "${repo_orig}/${target}"/*.go "${target}/"
-done
-
-# Normalize all permissions. The way bazel constructs the :gopath tree may leave
-# some strange permissions on files. We don't have anything in this tree that
-# should be execution, only the Go source files, README.md, and ${othersrc}.
-find . -type f -exec chmod 0644 {} \;
-find . -type d -exec chmod 0755 {} \;
-
-# Update the current working set and commit.
-# If the current working commit has already been committed to the remote go
-# branch, then we have nothing to commit here. So allow empty commit. This can
-# occur when this script is run parallely (via pull_request and push events)
-# and the push workflow finishes before the pull_request workflow can run this.
-git add . && git commit --allow-empty -m "Merge ${head} (automated)"
-
-# Push the branch back to the original repository.
-git remote add orig "${repo_orig}" && git push -f orig go:go
diff --git a/tools/go_generics/BUILD b/tools/go_generics/BUILD
deleted file mode 100644
index 807c08ead..000000000
--- a/tools/go_generics/BUILD
+++ /dev/null
@@ -1,20 +0,0 @@
-load("//tools:defs.bzl", "bzl_library", "go_binary")
-
-package(licenses = ["notice"])
-
-go_binary(
- name = "go_generics",
- srcs = [
- "generics.go",
- "imports.go",
- "remove.go",
- ],
- visibility = ["//:sandbox"],
- deps = ["//tools/go_generics/globals"],
-)
-
-bzl_library(
- name = "defs_bzl",
- srcs = ["defs.bzl"],
- visibility = ["//visibility:private"],
-)
diff --git a/tools/go_generics/defs.bzl b/tools/go_generics/defs.bzl
deleted file mode 100644
index ad97208a8..000000000
--- a/tools/go_generics/defs.bzl
+++ /dev/null
@@ -1,127 +0,0 @@
-"""Generics support via go_generics.
-
-A Go template is similar to a go library, except that it has certain types that
-can be replaced before usage. For example, one could define a templatized List
-struct, whose elements are of type T, then instantiate that template for
-T=segment, where "segment" is the concrete type.
-"""
-
-TemplateInfo = provider(
- "Information about a go_generics template.",
- fields = {
- "unsafe": "whether the template requires unsafe code",
- "types": "required types",
- "opt_types": "optional types",
- "consts": "required consts",
- "opt_consts": "optional consts",
- "deps": "package dependencies",
- "template": "merged template source file",
- },
-)
-
-def _go_template_impl(ctx):
- srcs = ctx.files.srcs
- template = ctx.actions.declare_file(ctx.label.name + "_template.go")
- args = ["-o=%s" % template.path] + [f.path for f in srcs]
-
- ctx.actions.run(
- inputs = srcs,
- outputs = [template],
- mnemonic = "GoGenericsTemplate",
- progress_message = "Building Go template %s" % ctx.label,
- arguments = args,
- executable = ctx.executable._tool,
- )
-
- return [TemplateInfo(
- types = ctx.attr.types,
- opt_types = ctx.attr.opt_types,
- consts = ctx.attr.consts,
- opt_consts = ctx.attr.opt_consts,
- deps = ctx.attr.deps,
- template = template,
- )]
-
-go_template = rule(
- implementation = _go_template_impl,
- attrs = {
- "srcs": attr.label_list(doc = "the list of source files that comprise the template", mandatory = True, allow_files = True),
- "deps": attr.label_list(doc = "the standard dependency list", allow_files = True, cfg = "target"),
- "types": attr.string_list(doc = "the list of generic types in the template that are required to be specified"),
- "opt_types": attr.string_list(doc = "the list of generic types in the template that can but aren't required to be specified"),
- "consts": attr.string_list(doc = "the list of constants in the template that are required to be specified"),
- "opt_consts": attr.string_list(doc = "the list of constants in the template that can but aren't required to be specified"),
- "_tool": attr.label(executable = True, cfg = "host", default = Label("//tools/go_generics/go_merge")),
- },
-)
-
-def _go_template_instance_impl(ctx):
- info = ctx.attr.template[TemplateInfo]
- output = ctx.outputs.out
-
- # Check that all required types are defined.
- for t in info.types:
- if t not in ctx.attr.types:
- fail("Missing value for type %s in %s" % (t, ctx.attr.template.label))
-
- # Check that all defined types are expected by the template.
- for t in ctx.attr.types:
- if (t not in info.types) and (t not in info.opt_types):
- fail("Type %s it not a parameter to %s" % (t, ctx.attr.template.label))
-
- # Check that all required consts are defined.
- for t in info.consts:
- if t not in ctx.attr.consts:
- fail("Missing value for constant %s in %s" % (t, ctx.attr.template.label))
-
- # Check that all defined consts are expected by the template.
- for t in ctx.attr.consts:
- if (t not in info.consts) and (t not in info.opt_consts):
- fail("Const %s it not a parameter to %s" % (t, ctx.attr.template.label))
-
- # Build the argument list.
- args = ["-i=%s" % info.template.path, "-o=%s" % output.path]
- if ctx.attr.package:
- args.append("-p=%s" % ctx.attr.package)
-
- if len(ctx.attr.prefix) > 0:
- args.append("-prefix=%s" % ctx.attr.prefix)
-
- if len(ctx.attr.suffix) > 0:
- args.append("-suffix=%s" % ctx.attr.suffix)
-
- args += [("-t=%s=%s" % (p[0], p[1])) for p in ctx.attr.types.items()]
- args += [("-c=%s=%s" % (p[0], p[1])) for p in ctx.attr.consts.items()]
- args += [("-import=%s=%s" % (p[0], p[1])) for p in ctx.attr.imports.items()]
-
- if ctx.attr.anon:
- args.append("-anon")
-
- ctx.actions.run(
- inputs = [info.template],
- outputs = [output],
- mnemonic = "GoGenericsInstance",
- progress_message = "Building Go template instance %s" % ctx.label,
- arguments = args,
- executable = ctx.executable._tool,
- )
-
- return [DefaultInfo(
- files = depset([output]),
- )]
-
-go_template_instance = rule(
- implementation = _go_template_instance_impl,
- attrs = {
- "template": attr.label(doc = "the label of the template to be instantiated", mandatory = True),
- "prefix": attr.string(doc = "a prefix to be added to globals in the template"),
- "suffix": attr.string(doc = "a suffix to be added to globals in the template"),
- "types": attr.string_dict(doc = "the map from generic type names to concrete ones"),
- "consts": attr.string_dict(doc = "the map from constant names to their values"),
- "imports": attr.string_dict(doc = "the map from imports used in types/consts to their import paths"),
- "anon": attr.bool(doc = "whether anoymous fields should be processed", mandatory = False, default = False),
- "package": attr.string(doc = "the package for the generated source file", mandatory = False),
- "out": attr.output(doc = "output file", mandatory = True),
- "_tool": attr.label(executable = True, cfg = "host", default = Label("//tools/go_generics")),
- },
-)
diff --git a/tools/go_generics/generics.go b/tools/go_generics/generics.go
deleted file mode 100644
index 0860ca9db..000000000
--- a/tools/go_generics/generics.go
+++ /dev/null
@@ -1,286 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// go_generics reads a Go source file and writes a new version of that file with
-// a few transformations applied to each. Namely:
-//
-// 1. Global types can be explicitly renamed with the -t option. For example,
-// if -t=A=B is passed in, all references to A will be replaced with
-// references to B; a function declaration like:
-//
-// func f(arg *A)
-//
-// would be renamed to:
-//
-// func f(arg *B)
-//
-// 2. Global type definitions and their method sets will be removed when they're
-// being renamed with -t. For example, if -t=A=B is passed in, the following
-// definition and methods that existed in the input file wouldn't exist at
-// all in the output file:
-//
-// type A struct{}
-//
-// func (*A) f() {}
-//
-// 3. All global types, variables, constants and functions (not methods) are
-// prefixed and suffixed based on the option -prefix and -suffix arguments.
-// For example, if -suffix=A is passed in, the following globals:
-//
-// func f()
-// type t struct{}
-//
-// would be renamed to:
-//
-// func fA()
-// type tA struct{}
-//
-// Some special tags are also modified. For example:
-//
-// "state:.(t)"
-//
-// would become:
-//
-// "state:.(tA)"
-//
-// 4. The package is renamed to the value via the -p argument.
-// 5. Value of constants can be modified with -c argument.
-//
-// Note that not just the top-level declarations are renamed, all references to
-// them are also properly renamed as well, taking into account visibility rules
-// and shadowing. For example, if -suffix=A is passed in, the following:
-//
-// var b = 100
-//
-// func f() {
-// g(b)
-// b := 0
-// g(b)
-// }
-//
-// Would be replaced with:
-//
-// var bA = 100
-//
-// func f() {
-// g(bA)
-// b := 0
-// g(b)
-// }
-//
-// Note that the second call to g() kept "b" as an argument because it refers to
-// the local variable "b".
-//
-// Note that go_generics can handle anonymous fields with renamed types if
-// -anon is passed in, however it does not perform strict checking on parameter
-// types that share the same name as the global type and therefore will rename
-// them as well.
-//
-// You can see an example in the tools/go_generics/generics_tests/interface test.
-package main
-
-import (
- "bytes"
- "flag"
- "fmt"
- "go/ast"
- "go/format"
- "go/parser"
- "go/token"
- "io/ioutil"
- "os"
- "regexp"
- "strings"
-
- "gvisor.dev/gvisor/tools/go_generics/globals"
-)
-
-var (
- input = flag.String("i", "", "input `file`")
- output = flag.String("o", "", "output `file`")
- suffix = flag.String("suffix", "", "`suffix` to add to each global symbol")
- prefix = flag.String("prefix", "", "`prefix` to add to each global symbol")
- packageName = flag.String("p", "main", "output package `name`")
- printAST = flag.Bool("ast", false, "prints the AST")
- processAnon = flag.Bool("anon", false, "process anonymous fields")
- types = make(mapValue)
- consts = make(mapValue)
- imports = make(mapValue)
-)
-
-// mapValue implements flag.Value. We use a mapValue flag instead of a regular
-// string flag when we want to allow more than one instance of the flag. For
-// example, we allow several "-t A=B" arguments, and will rename them all.
-type mapValue map[string]string
-
-func (m mapValue) String() string {
- var b bytes.Buffer
- first := true
- for k, v := range m {
- if !first {
- b.WriteRune(',')
- } else {
- first = false
- }
- b.WriteString(k)
- b.WriteRune('=')
- b.WriteString(v)
- }
- return b.String()
-}
-
-func (m mapValue) Set(s string) error {
- sep := strings.Index(s, "=")
- if sep == -1 {
- return fmt.Errorf("missing '=' from '%s'", s)
- }
-
- m[s[:sep]] = s[sep+1:]
-
- return nil
-}
-
-// stateTagRegexp matches against the 'typed' state tags.
-var stateTagRegexp = regexp.MustCompile(`^(.*[^a-z0-9_])state:"\.\(([^\)]*)\)"(.*)$`)
-
-var identifierRegexp = regexp.MustCompile(`^(.*[^a-zA-Z_])([a-zA-Z_][a-zA-Z0-9_]*)(.*)$`)
-
-func main() {
- flag.Usage = func() {
- fmt.Fprintf(os.Stderr, "Usage: %s [options]\n", os.Args[0])
- flag.PrintDefaults()
- }
-
- flag.Var(types, "t", "rename type A to B when `A=B` is passed in. Multiple such mappings are allowed.")
- flag.Var(consts, "c", "reassign constant A to value B when `A=B` is passed in. Multiple such mappings are allowed.")
- flag.Var(imports, "import", "specifies the import libraries to use when types are not local. `name=path` specifies that 'name', used in types as name.type, refers to the package living in 'path'.")
- flag.Parse()
-
- if *input == "" || *output == "" {
- flag.Usage()
- os.Exit(1)
- }
-
- // Parse the input file.
- fset := token.NewFileSet()
- f, err := parser.ParseFile(fset, *input, nil, parser.ParseComments|parser.DeclarationErrors|parser.SpuriousErrors)
- if err != nil {
- fmt.Fprintf(os.Stderr, "%v\n", err)
- os.Exit(1)
- }
-
- // Print the AST if requested.
- if *printAST {
- ast.Print(fset, f)
- }
-
- cmap := ast.NewCommentMap(fset, f, f.Comments)
-
- // Update imports based on what's used in types and consts.
- maps := []mapValue{types, consts}
- importDecl, err := updateImports(maps, imports)
- if err != nil {
- fmt.Fprintf(os.Stderr, "%v\n", err)
- os.Exit(1)
- }
- types = maps[0]
- consts = maps[1]
-
- // Reassign all specified constants.
- for _, decl := range f.Decls {
- d, ok := decl.(*ast.GenDecl)
- if !ok || d.Tok != token.CONST {
- continue
- }
-
- for _, gs := range d.Specs {
- s := gs.(*ast.ValueSpec)
- for i, id := range s.Names {
- if n, ok := consts[id.Name]; ok {
- s.Values[i] = &ast.BasicLit{Value: n}
- }
- }
- }
- }
-
- // Go through all globals and their uses in the AST and rename the types
- // with explicitly provided names, and rename all types, variables,
- // consts and functions with the provided prefix and suffix.
- globals.Visit(fset, f, func(ident *ast.Ident, kind globals.SymKind) {
- if n, ok := types[ident.Name]; ok && kind == globals.KindType {
- ident.Name = n
- } else {
- switch kind {
- case globals.KindType, globals.KindVar, globals.KindConst, globals.KindFunction:
- if ident.Name != "_" {
- ident.Name = *prefix + ident.Name + *suffix
- }
- case globals.KindTag:
- // Modify the state tag appropriately.
- if m := stateTagRegexp.FindStringSubmatch(ident.Name); m != nil {
- if t := identifierRegexp.FindStringSubmatch(m[2]); t != nil {
- typeName := *prefix + t[2] + *suffix
- if n, ok := types[t[2]]; ok {
- typeName = n
- }
- ident.Name = m[1] + `state:".(` + t[1] + typeName + t[3] + `)"` + m[3]
- }
- }
- }
- }
- }, *processAnon)
-
- // Remove the definition of all types that are being remapped.
- set := make(typeSet)
- for _, v := range types {
- set[v] = struct{}{}
- }
- removeTypes(set, f)
-
- // Add the new imports, if any, to the top.
- if importDecl != nil {
- newDecls := make([]ast.Decl, 0, len(f.Decls)+1)
- newDecls = append(newDecls, importDecl)
- newDecls = append(newDecls, f.Decls...)
- f.Decls = newDecls
- }
-
- // Update comments to remove the ones potentially associated with the
- // type T that we removed.
- f.Comments = cmap.Filter(f).Comments()
-
- // If there are file (package) comments, delete them.
- if f.Doc != nil {
- for i, cg := range f.Comments {
- if cg == f.Doc {
- f.Comments = append(f.Comments[:i], f.Comments[i+1:]...)
- break
- }
- }
- }
-
- // Write the output file.
- f.Name.Name = *packageName
-
- var buf bytes.Buffer
- if err := format.Node(&buf, fset, f); err != nil {
- fmt.Fprintf(os.Stderr, "%v\n", err)
- os.Exit(1)
- }
-
- if err := ioutil.WriteFile(*output, buf.Bytes(), 0644); err != nil {
- fmt.Fprintf(os.Stderr, "%v\n", err)
- os.Exit(1)
- }
-}
diff --git a/tools/go_generics/globals/BUILD b/tools/go_generics/globals/BUILD
deleted file mode 100644
index 38caa3ce7..000000000
--- a/tools/go_generics/globals/BUILD
+++ /dev/null
@@ -1,13 +0,0 @@
-load("//tools:defs.bzl", "go_library")
-
-package(licenses = ["notice"])
-
-go_library(
- name = "globals",
- srcs = [
- "globals_visitor.go",
- "scope.go",
- ],
- stateify = False,
- visibility = ["//tools/go_generics:__pkg__"],
-)
diff --git a/tools/go_generics/globals/globals_visitor.go b/tools/go_generics/globals/globals_visitor.go
deleted file mode 100644
index 883f21ebe..000000000
--- a/tools/go_generics/globals/globals_visitor.go
+++ /dev/null
@@ -1,597 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package globals provides an AST visitor that calls the visit function for all
-// global identifiers.
-package globals
-
-import (
- "fmt"
-
- "go/ast"
- "go/token"
- "path/filepath"
- "strconv"
-)
-
-// globalsVisitor holds the state used while traversing the nodes of a file in
-// search of globals.
-//
-// The visitor does two passes on the global declarations: the first one adds
-// all globals to the global scope (since Go allows references to globals that
-// haven't been declared yet), and the second one calls f() for the definition
-// and uses of globals found in the first pass.
-//
-// The implementation correctly handles cases when globals are aliased by
-// locals; in such cases, f() is not called.
-type globalsVisitor struct {
- // file is the file whose nodes are being visited.
- file *ast.File
-
- // fset is the file set the file being visited belongs to.
- fset *token.FileSet
-
- // f is the visit function to be called when a global symbol is reached.
- f func(*ast.Ident, SymKind)
-
- // scope is the current scope as nodes are visited.
- scope *scope
-
- // processAnon indicates whether we should process anonymous struct fields.
- // It does not perform strict checking on parameter types that share the same name
- // as the global type and therefore will rename them as well.
- processAnon bool
-}
-
-// unexpected is called when an unexpected node appears in the AST. It dumps
-// the location of the associated token and panics because this should only
-// happen when there is a bug in the traversal code.
-func (v *globalsVisitor) unexpected(p token.Pos) {
- panic(fmt.Sprintf("Unable to parse at %v", v.fset.Position(p)))
-}
-
-// pushScope creates a new scope and pushes it to the top of the scope stack.
-func (v *globalsVisitor) pushScope() {
- v.scope = newScope(v.scope)
-}
-
-// popScope removes the scope created by the last call to pushScope.
-func (v *globalsVisitor) popScope() {
- v.scope = v.scope.outer
-}
-
-// visitType is called when an expression is known to be a type, for example,
-// on the first argument of make(). It visits all children nodes and reports
-// any globals.
-func (v *globalsVisitor) visitType(ge ast.Expr) {
- switch e := ge.(type) {
- case *ast.Ident:
- if s := v.scope.deepLookup(e.Name); s != nil && s.scope.isGlobal() {
- v.f(e, s.kind)
- }
-
- case *ast.SelectorExpr:
- id := GetIdent(e.X)
- if id == nil {
- v.unexpected(e.X.Pos())
- }
-
- case *ast.StarExpr:
- v.visitType(e.X)
- case *ast.ParenExpr:
- v.visitType(e.X)
- case *ast.ChanType:
- v.visitType(e.Value)
- case *ast.Ellipsis:
- v.visitType(e.Elt)
- case *ast.ArrayType:
- v.visitExpr(e.Len)
- v.visitType(e.Elt)
- case *ast.MapType:
- v.visitType(e.Key)
- v.visitType(e.Value)
- case *ast.StructType:
- v.visitFields(e.Fields, KindUnknown)
- case *ast.FuncType:
- v.visitFields(e.Params, KindUnknown)
- v.visitFields(e.Results, KindUnknown)
- case *ast.InterfaceType:
- v.visitFields(e.Methods, KindUnknown)
- default:
- v.unexpected(ge.Pos())
- }
-}
-
-// visitFields visits all fields, and add symbols if kind isn't KindUnknown.
-func (v *globalsVisitor) visitFields(l *ast.FieldList, kind SymKind) {
- if l == nil {
- return
- }
-
- for _, f := range l.List {
- if kind != KindUnknown {
- for _, n := range f.Names {
- v.scope.add(n.Name, kind, n.Pos())
- }
- }
- v.visitType(f.Type)
- if f.Tag != nil {
- tag := ast.NewIdent(f.Tag.Value)
- v.f(tag, KindTag)
- // Replace the tag if updated.
- if tag.Name != f.Tag.Value {
- f.Tag.Value = tag.Name
- }
- }
- }
-}
-
-// visitGenDecl is called when a generic declaration is encountered, for example,
-// on variable, constant and type declarations. It adds all newly defined
-// symbols to the current scope and reports them if the current scope is the
-// global one.
-func (v *globalsVisitor) visitGenDecl(d *ast.GenDecl) {
- switch d.Tok {
- case token.IMPORT:
- case token.TYPE:
- for _, gs := range d.Specs {
- s := gs.(*ast.TypeSpec)
- v.scope.add(s.Name.Name, KindType, s.Name.Pos())
- if v.scope.isGlobal() {
- v.f(s.Name, KindType)
- }
- v.visitType(s.Type)
- }
- case token.CONST, token.VAR:
- kind := KindConst
- if d.Tok == token.VAR {
- kind = KindVar
- }
-
- for _, gs := range d.Specs {
- s := gs.(*ast.ValueSpec)
- if s.Type != nil {
- v.visitType(s.Type)
- }
-
- for _, e := range s.Values {
- v.visitExpr(e)
- }
-
- for _, n := range s.Names {
- if v.scope.isGlobal() {
- v.f(n, kind)
- }
- v.scope.add(n.Name, kind, n.Pos())
- }
- }
- default:
- v.unexpected(d.Pos())
- }
-}
-
-// isViableType determines if the given expression is a viable type expression,
-// that is, if it could be interpreted as a type, for example, sync.Mutex,
-// myType, func(int)int, as opposed to -1, 2 * 2, a + b, etc.
-func (v *globalsVisitor) isViableType(expr ast.Expr) bool {
- switch e := expr.(type) {
- case *ast.Ident:
- // This covers the plain identifier case. When we see it, we
- // have to check if it resolves to a type; if the symbol is not
- // known, we'll claim it's viable as a type.
- s := v.scope.deepLookup(e.Name)
- return s == nil || s.kind == KindType
-
- case *ast.ChanType, *ast.ArrayType, *ast.MapType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.Ellipsis:
- // This covers the following cases:
- // 1. ChanType:
- // chan T
- // <-chan T
- // chan<- T
- // 2. ArrayType:
- // [Expr]T
- // 3. MapType:
- // map[T]U
- // 4. StructType:
- // struct { Fields }
- // 5. FuncType:
- // func(Fields)Returns
- // 6. Interface:
- // interface { Fields }
- // 7. Ellipsis:
- // ...T
- return true
-
- case *ast.SelectorExpr:
- // The only case in which an expression involving a selector can
- // be a type is if it has the following form X.T, where X is an
- // import, and T is a type exported by X.
- //
- // There's no way to know whether T is a type because we don't
- // parse imports. So we just claim that this is a viable type;
- // it doesn't affect the general result because we don't visit
- // imported symbols.
- id := GetIdent(e.X)
- if id == nil {
- return false
- }
-
- s := v.scope.deepLookup(id.Name)
- return s != nil && s.kind == KindImport
-
- case *ast.StarExpr:
- // This covers the *T case. The expression is a viable type if
- // T is.
- return v.isViableType(e.X)
-
- case *ast.ParenExpr:
- // This covers the (T) case. The expression is a viable type if
- // T is.
- return v.isViableType(e.X)
-
- default:
- return false
- }
-}
-
-// visitCallExpr visits a "call expression" which can be either a
-// function/method call (e.g., f(), pkg.f(), obj.f(), etc.) call or a type
-// conversion (e.g., int32(1), (*sync.Mutex)(ptr), etc.).
-func (v *globalsVisitor) visitCallExpr(e *ast.CallExpr) {
- if v.isViableType(e.Fun) {
- v.visitType(e.Fun)
- } else {
- v.visitExpr(e.Fun)
- }
-
- // If the function being called is new or make, the first argument is
- // a type, so it needs to be visited as such.
- first := 0
- if id := GetIdent(e.Fun); id != nil && (id.Name == "make" || id.Name == "new") {
- if len(e.Args) > 0 {
- v.visitType(e.Args[0])
- }
- first = 1
- }
-
- for i := first; i < len(e.Args); i++ {
- v.visitExpr(e.Args[i])
- }
-}
-
-// visitExpr visits all nodes of an expression, and reports any globals that it
-// finds.
-func (v *globalsVisitor) visitExpr(ge ast.Expr) {
- switch e := ge.(type) {
- case nil:
- case *ast.Ident:
- if s := v.scope.deepLookup(e.Name); s != nil && s.scope.isGlobal() {
- v.f(e, s.kind)
- }
-
- case *ast.BasicLit:
- case *ast.CompositeLit:
- v.visitType(e.Type)
- for _, ne := range e.Elts {
- v.visitExpr(ne)
- }
- case *ast.FuncLit:
- v.pushScope()
- v.visitFields(e.Type.Params, KindParameter)
- v.visitFields(e.Type.Results, KindResult)
- v.visitBlockStmt(e.Body)
- v.popScope()
-
- case *ast.BinaryExpr:
- v.visitExpr(e.X)
- v.visitExpr(e.Y)
-
- case *ast.CallExpr:
- v.visitCallExpr(e)
-
- case *ast.IndexExpr:
- v.visitExpr(e.X)
- v.visitExpr(e.Index)
-
- case *ast.KeyValueExpr:
- v.visitExpr(e.Value)
-
- case *ast.ParenExpr:
- v.visitExpr(e.X)
-
- case *ast.SelectorExpr:
- v.visitExpr(e.X)
- if v.processAnon {
- v.visitExpr(e.Sel)
- }
-
- case *ast.SliceExpr:
- v.visitExpr(e.X)
- v.visitExpr(e.Low)
- v.visitExpr(e.High)
- v.visitExpr(e.Max)
-
- case *ast.StarExpr:
- v.visitExpr(e.X)
-
- case *ast.TypeAssertExpr:
- v.visitExpr(e.X)
- if e.Type != nil {
- v.visitType(e.Type)
- }
-
- case *ast.UnaryExpr:
- v.visitExpr(e.X)
-
- default:
- v.unexpected(ge.Pos())
- }
-}
-
-// GetIdent returns the identifier associated with the given expression by
-// removing parentheses if needed.
-func GetIdent(expr ast.Expr) *ast.Ident {
- switch e := expr.(type) {
- case *ast.Ident:
- return e
- case *ast.ParenExpr:
- return GetIdent(e.X)
- default:
- return nil
- }
-}
-
-// visitStmt visits all nodes of a statement, and reports any globals that it
-// finds. It also adds to the current scope new symbols defined/declared.
-func (v *globalsVisitor) visitStmt(gs ast.Stmt) {
- switch s := gs.(type) {
- case nil, *ast.BranchStmt, *ast.EmptyStmt:
- case *ast.AssignStmt:
- for _, e := range s.Rhs {
- v.visitExpr(e)
- }
-
- // We visit the LHS after the RHS because the symbols we'll
- // potentially add to the table aren't meant to be visible to
- // the RHS.
- for _, e := range s.Lhs {
- if s.Tok == token.DEFINE {
- if n := GetIdent(e); n != nil {
- v.scope.add(n.Name, KindVar, n.Pos())
- }
- }
- v.visitExpr(e)
- }
-
- case *ast.BlockStmt:
- v.visitBlockStmt(s)
-
- case *ast.DeclStmt:
- v.visitGenDecl(s.Decl.(*ast.GenDecl))
-
- case *ast.DeferStmt:
- v.visitCallExpr(s.Call)
-
- case *ast.ExprStmt:
- v.visitExpr(s.X)
-
- case *ast.ForStmt:
- v.pushScope()
- v.visitStmt(s.Init)
- v.visitExpr(s.Cond)
- v.visitStmt(s.Post)
- v.visitBlockStmt(s.Body)
- v.popScope()
-
- case *ast.GoStmt:
- v.visitCallExpr(s.Call)
-
- case *ast.IfStmt:
- v.pushScope()
- v.visitStmt(s.Init)
- v.visitExpr(s.Cond)
- v.visitBlockStmt(s.Body)
- v.visitStmt(s.Else)
- v.popScope()
-
- case *ast.IncDecStmt:
- v.visitExpr(s.X)
-
- case *ast.LabeledStmt:
- v.visitStmt(s.Stmt)
-
- case *ast.RangeStmt:
- v.pushScope()
- v.visitExpr(s.X)
- if s.Tok == token.DEFINE {
- if n := GetIdent(s.Key); n != nil {
- v.scope.add(n.Name, KindVar, n.Pos())
- }
-
- if n := GetIdent(s.Value); n != nil {
- v.scope.add(n.Name, KindVar, n.Pos())
- }
- }
- v.visitExpr(s.Key)
- v.visitExpr(s.Value)
- v.visitBlockStmt(s.Body)
- v.popScope()
-
- case *ast.ReturnStmt:
- for _, r := range s.Results {
- v.visitExpr(r)
- }
-
- case *ast.SelectStmt:
- for _, ns := range s.Body.List {
- c := ns.(*ast.CommClause)
-
- v.pushScope()
- v.visitStmt(c.Comm)
- for _, bs := range c.Body {
- v.visitStmt(bs)
- }
- v.popScope()
- }
-
- case *ast.SendStmt:
- v.visitExpr(s.Chan)
- v.visitExpr(s.Value)
-
- case *ast.SwitchStmt:
- v.pushScope()
- v.visitStmt(s.Init)
- v.visitExpr(s.Tag)
- for _, ns := range s.Body.List {
- c := ns.(*ast.CaseClause)
- v.pushScope()
- for _, ce := range c.List {
- v.visitExpr(ce)
- }
- for _, bs := range c.Body {
- v.visitStmt(bs)
- }
- v.popScope()
- }
- v.popScope()
-
- case *ast.TypeSwitchStmt:
- v.pushScope()
- v.visitStmt(s.Init)
- v.visitStmt(s.Assign)
- for _, ns := range s.Body.List {
- c := ns.(*ast.CaseClause)
- v.pushScope()
- for _, ce := range c.List {
- v.visitType(ce)
- }
- for _, bs := range c.Body {
- v.visitStmt(bs)
- }
- v.popScope()
- }
- v.popScope()
-
- default:
- v.unexpected(gs.Pos())
- }
-}
-
-// visitBlockStmt visits all statements in the block, adding symbols to a newly
-// created scope.
-func (v *globalsVisitor) visitBlockStmt(s *ast.BlockStmt) {
- v.pushScope()
- for _, c := range s.List {
- v.visitStmt(c)
- }
- v.popScope()
-}
-
-// visitFuncDecl is called when a function or method declaration is encountered.
-// it creates a new scope for the function [optional] receiver, parameters and
-// results, and visits all children nodes.
-func (v *globalsVisitor) visitFuncDecl(d *ast.FuncDecl) {
- // We don't report methods.
- if d.Recv == nil {
- v.f(d.Name, KindFunction)
- }
-
- v.pushScope()
- v.visitFields(d.Recv, KindReceiver)
- v.visitFields(d.Type.Params, KindParameter)
- v.visitFields(d.Type.Results, KindResult)
- if d.Body != nil {
- v.visitBlockStmt(d.Body)
- }
- v.popScope()
-}
-
-// globalsFromDecl is called in the first, and adds symbols to global scope.
-func (v *globalsVisitor) globalsFromGenDecl(d *ast.GenDecl) {
- switch d.Tok {
- case token.IMPORT:
- for _, gs := range d.Specs {
- s := gs.(*ast.ImportSpec)
- if s.Name == nil {
- str, _ := strconv.Unquote(s.Path.Value)
- v.scope.add(filepath.Base(str), KindImport, s.Path.Pos())
- } else if s.Name.Name != "_" {
- v.scope.add(s.Name.Name, KindImport, s.Name.Pos())
- }
- }
- case token.TYPE:
- for _, gs := range d.Specs {
- s := gs.(*ast.TypeSpec)
- v.scope.add(s.Name.Name, KindType, s.Name.Pos())
- }
- case token.CONST, token.VAR:
- kind := KindConst
- if d.Tok == token.VAR {
- kind = KindVar
- }
-
- for _, s := range d.Specs {
- for _, n := range s.(*ast.ValueSpec).Names {
- v.scope.add(n.Name, kind, n.Pos())
- }
- }
- default:
- v.unexpected(d.Pos())
- }
-}
-
-// visit implements the visiting of globals. It does performs the two passes
-// described in the description of the globalsVisitor struct.
-func (v *globalsVisitor) visit() {
- // Gather all symbols in the global scope. This excludes methods.
- v.pushScope()
- for _, gd := range v.file.Decls {
- switch d := gd.(type) {
- case *ast.GenDecl:
- v.globalsFromGenDecl(d)
- case *ast.FuncDecl:
- if d.Recv == nil {
- v.scope.add(d.Name.Name, KindFunction, d.Name.Pos())
- }
- default:
- v.unexpected(gd.Pos())
- }
- }
-
- // Go through the contents of the declarations.
- for _, gd := range v.file.Decls {
- switch d := gd.(type) {
- case *ast.GenDecl:
- v.visitGenDecl(d)
- case *ast.FuncDecl:
- v.visitFuncDecl(d)
- }
- }
-}
-
-// Visit traverses the provided AST and calls f() for each identifier that
-// refers to global names. The global name must be defined in the file itself.
-//
-// The function f() is allowed to modify the identifier, for example, to rename
-// uses of global references.
-func Visit(fset *token.FileSet, file *ast.File, f func(*ast.Ident, SymKind), processAnon bool) {
- v := globalsVisitor{
- fset: fset,
- file: file,
- f: f,
- processAnon: processAnon,
- }
-
- v.visit()
-}
diff --git a/tools/go_generics/globals/scope.go b/tools/go_generics/globals/scope.go
deleted file mode 100644
index eec93534b..000000000
--- a/tools/go_generics/globals/scope.go
+++ /dev/null
@@ -1,84 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package globals
-
-import (
- "go/token"
-)
-
-// SymKind specifies the kind of a global symbol. For example, a variable, const
-// function, etc.
-type SymKind int
-
-// Constants for different kinds of symbols.
-const (
- KindUnknown SymKind = iota
- KindImport
- KindType
- KindVar
- KindConst
- KindFunction
- KindReceiver
- KindParameter
- KindResult
- KindTag
-)
-
-type symbol struct {
- kind SymKind
- pos token.Pos
- scope *scope
-}
-
-type scope struct {
- outer *scope
- syms map[string]*symbol
-}
-
-func newScope(outer *scope) *scope {
- return &scope{
- outer: outer,
- syms: make(map[string]*symbol),
- }
-}
-
-func (s *scope) isGlobal() bool {
- return s.outer == nil
-}
-
-func (s *scope) lookup(n string) *symbol {
- return s.syms[n]
-}
-
-func (s *scope) deepLookup(n string) *symbol {
- for x := s; x != nil; x = x.outer {
- if sym := x.lookup(n); sym != nil {
- return sym
- }
- }
- return nil
-}
-
-func (s *scope) add(name string, kind SymKind, pos token.Pos) {
- if s.syms[name] != nil {
- return
- }
-
- s.syms[name] = &symbol{
- kind: kind,
- pos: pos,
- scope: s,
- }
-}
diff --git a/tools/go_generics/go_merge/BUILD b/tools/go_generics/go_merge/BUILD
deleted file mode 100644
index 2fd5a200d..000000000
--- a/tools/go_generics/go_merge/BUILD
+++ /dev/null
@@ -1,9 +0,0 @@
-load("//tools:defs.bzl", "go_binary")
-
-package(licenses = ["notice"])
-
-go_binary(
- name = "go_merge",
- srcs = ["main.go"],
- visibility = ["//:sandbox"],
-)
diff --git a/tools/go_generics/go_merge/main.go b/tools/go_generics/go_merge/main.go
deleted file mode 100644
index e0345500f..000000000
--- a/tools/go_generics/go_merge/main.go
+++ /dev/null
@@ -1,141 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
- "bytes"
- "flag"
- "fmt"
- "go/ast"
- "go/format"
- "go/parser"
- "go/token"
- "io/ioutil"
- "os"
- "path/filepath"
- "strconv"
-)
-
-var (
- output = flag.String("o", "", "output `file`")
-)
-
-func fatalf(s string, args ...interface{}) {
- fmt.Fprintf(os.Stderr, s, args...)
- os.Exit(1)
-}
-
-func main() {
- flag.Usage = func() {
- fmt.Fprintf(os.Stderr, "Usage: %s [options] <input1> [<input2> ...]\n", os.Args[0])
- flag.PrintDefaults()
- }
-
- flag.Parse()
- if *output == "" || len(flag.Args()) == 0 {
- flag.Usage()
- os.Exit(1)
- }
-
- // Load all files.
- files := make(map[string]*ast.File)
- fset := token.NewFileSet()
- var name string
- for _, fname := range flag.Args() {
- f, err := parser.ParseFile(fset, fname, nil, parser.ParseComments|parser.DeclarationErrors|parser.SpuriousErrors)
- if err != nil {
- fatalf("%v\n", err)
- }
-
- files[fname] = f
- if name == "" {
- name = f.Name.Name
- } else if name != f.Name.Name {
- fatalf("Expected '%s' for package name instead of '%s'.\n", name, f.Name.Name)
- }
- }
-
- // Merge all files into one.
- pkg := &ast.Package{
- Name: name,
- Files: files,
- }
- f := ast.MergePackageFiles(pkg, ast.FilterUnassociatedComments|ast.FilterFuncDuplicates|ast.FilterImportDuplicates)
-
- // Create a new declaration slice with all imports at the top, merging any
- // redundant imports.
- imports := make(map[string]*ast.ImportSpec)
- var importNames []string // Keep imports in the original order to get deterministic output.
- var anonImports []*ast.ImportSpec
- for _, d := range f.Decls {
- if g, ok := d.(*ast.GenDecl); ok && g.Tok == token.IMPORT {
- for _, s := range g.Specs {
- i := s.(*ast.ImportSpec)
- p, _ := strconv.Unquote(i.Path.Value)
- var n string
- if i.Name == nil {
- n = filepath.Base(p)
- } else {
- n = i.Name.Name
- }
- if n == "_" {
- anonImports = append(anonImports, i)
- } else {
- if i2, ok := imports[n]; ok {
- if first, second := i.Path.Value, i2.Path.Value; first != second {
- fatalf("Conflicting paths for import name '%s': '%s' vs. '%s'\n", n, first, second)
- }
- } else {
- imports[n] = i
- importNames = append(importNames, n)
- }
- }
- }
- }
- }
- newDecls := make([]ast.Decl, 0, len(f.Decls))
- if l := len(imports) + len(anonImports); l > 0 {
- // Non-NoPos Lparen is needed for Go to recognize more than one spec in
- // ast.GenDecl.Specs.
- d := &ast.GenDecl{
- Tok: token.IMPORT,
- Lparen: token.NoPos + 1,
- Specs: make([]ast.Spec, 0, l),
- }
- for _, i := range importNames {
- d.Specs = append(d.Specs, imports[i])
- }
- for _, i := range anonImports {
- d.Specs = append(d.Specs, i)
- }
- newDecls = append(newDecls, d)
- }
- for _, d := range f.Decls {
- if g, ok := d.(*ast.GenDecl); !ok || g.Tok != token.IMPORT {
- newDecls = append(newDecls, d)
- }
- }
- f.Decls = newDecls
-
- // Write the output file.
- var buf bytes.Buffer
- if err := format.Node(&buf, fset, f); err != nil {
- fatalf("%v\n", err)
- }
-
- if err := ioutil.WriteFile(*output, buf.Bytes(), 0644); err != nil {
- fatalf("%v\n", err)
- }
-}
diff --git a/tools/go_generics/imports.go b/tools/go_generics/imports.go
deleted file mode 100644
index 90d3aa1e0..000000000
--- a/tools/go_generics/imports.go
+++ /dev/null
@@ -1,158 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
- "bytes"
- "fmt"
- "go/ast"
- "go/format"
- "go/parser"
- "go/token"
- "sort"
- "strconv"
-
- "gvisor.dev/gvisor/tools/go_generics/globals"
-)
-
-type importedPackage struct {
- newName string
- path string
-}
-
-// updateImportIdent modifies the given import identifier with the new name
-// stored in the used map. If the identifier doesn't exist in the used map yet,
-// a new name is generated and inserted into the map.
-func updateImportIdent(orig string, imports mapValue, id *ast.Ident, used map[string]*importedPackage) error {
- importName := id.Name
-
- // If the name is already in the table, just use the new name.
- m := used[importName]
- if m != nil {
- id.Name = m.newName
- return nil
- }
-
- // Create a new entry in the used map.
- path := imports[importName]
- if path == "" {
- return fmt.Errorf("Unknown path to package '%s', used in '%s'", importName, orig)
- }
-
- m = &importedPackage{
- newName: fmt.Sprintf("__generics_imported%d", len(used)),
- path: strconv.Quote(path),
- }
- used[importName] = m
-
- id.Name = m.newName
-
- return nil
-}
-
-// convertExpression creates a new string that is a copy of the input one with
-// all imports references renamed to the names in the "used" map. If the
-// referenced import isn't in "used" yet, a new one is created based on the path
-// in "imports" and stored in "used". For example, if string s is
-// "math.MaxUint32-math.MaxUint16+10", it would be converted to
-// "x.MaxUint32-x.MathUint16+10", where x is a generated name.
-func convertExpression(s string, imports mapValue, used map[string]*importedPackage) (string, error) {
- // Parse the expression in the input string.
- expr, err := parser.ParseExpr(s)
- if err != nil {
- return "", fmt.Errorf("Unable to parse \"%s\": %v", s, err)
- }
-
- // Go through the AST and update references.
- var retErr error
- ast.Inspect(expr, func(n ast.Node) bool {
- switch x := n.(type) {
- case *ast.SelectorExpr:
- if id := globals.GetIdent(x.X); id != nil {
- if err := updateImportIdent(s, imports, id, used); err != nil {
- retErr = err
- }
- return false
- }
- }
- return true
- })
- if retErr != nil {
- return "", retErr
- }
-
- // Convert the modified AST back to a string.
- fset := token.NewFileSet()
- var buf bytes.Buffer
- if err := format.Node(&buf, fset, expr); err != nil {
- return "", err
- }
-
- return string(buf.Bytes()), nil
-}
-
-// updateImports replaces all maps in the input slice with copies where the
-// mapped values have had all references to imported packages renamed to
-// generated names. It also returns an import declaration for all the renamed
-// import packages.
-//
-// For example, if the input maps contains A=math.B and C=math.D, the updated
-// maps will instead contain A=__generics_imported0.B and
-// C=__generics_imported0.C, and the 'import __generics_imported0 "math"' would
-// be returned as the import declaration.
-func updateImports(maps []mapValue, imports mapValue) (ast.Decl, error) {
- importsUsed := make(map[string]*importedPackage)
-
- // Update all maps.
- for i, m := range maps {
- newMap := make(mapValue)
- for n, e := range m {
- updated, err := convertExpression(e, imports, importsUsed)
- if err != nil {
- return nil, err
- }
-
- newMap[n] = updated
- }
- maps[i] = newMap
- }
-
- // Nothing else to do if no imports are used in the expressions.
- if len(importsUsed) == 0 {
- return nil, nil
- }
- var names []string
- for n := range importsUsed {
- names = append(names, n)
- }
- // Sort the new imports for deterministic build outputs.
- sort.Strings(names)
-
- // Create spec array for each new import.
- specs := make([]ast.Spec, 0, len(importsUsed))
- for _, n := range names {
- i := importsUsed[n]
- specs = append(specs, &ast.ImportSpec{
- Name: &ast.Ident{Name: i.newName},
- Path: &ast.BasicLit{Value: i.path},
- })
- }
-
- return &ast.GenDecl{
- Tok: token.IMPORT,
- Specs: specs,
- Lparen: token.NoPos + 1,
- }, nil
-}
diff --git a/tools/go_generics/remove.go b/tools/go_generics/remove.go
deleted file mode 100644
index 568a6bbd3..000000000
--- a/tools/go_generics/remove.go
+++ /dev/null
@@ -1,105 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
- "go/ast"
- "go/token"
-)
-
-type typeSet map[string]struct{}
-
-// isTypeOrPointerToType determines if the given AST expression represents a
-// type or a pointer to a type that exists in the provided type set.
-func isTypeOrPointerToType(set typeSet, expr ast.Expr, starCount int) bool {
- switch e := expr.(type) {
- case *ast.Ident:
- _, ok := set[e.Name]
- return ok
- case *ast.StarExpr:
- if starCount > 1 {
- return false
- }
- return isTypeOrPointerToType(set, e.X, starCount+1)
- case *ast.ParenExpr:
- return isTypeOrPointerToType(set, e.X, starCount)
- default:
- return false
- }
-}
-
-// isMethodOf determines if the given function declaration is a method of one
-// of the types in the provided type set. To do that, it checks if the function
-// has a receiver and that its type is either T or *T, where T is a type that
-// exists in the set. This is per the spec:
-//
-// That parameter section must declare a single parameter, the receiver. Its
-// type must be of the form T or *T (possibly using parentheses) where T is a
-// type name. The type denoted by T is called the receiver base type; it must
-// not be a pointer or interface type and it must be declared in the same
-// package as the method.
-func isMethodOf(set typeSet, f *ast.FuncDecl) bool {
- // If the function doesn't have exactly one receiver, then it's
- // definitely not a method.
- if f.Recv == nil || len(f.Recv.List) != 1 {
- return false
- }
-
- return isTypeOrPointerToType(set, f.Recv.List[0].Type, 0)
-}
-
-// removeTypeDefinitions removes the definition of all types contained in the
-// provided type set.
-func removeTypeDefinitions(set typeSet, d *ast.GenDecl) {
- if d.Tok != token.TYPE {
- return
- }
-
- i := 0
- for _, gs := range d.Specs {
- s := gs.(*ast.TypeSpec)
- if _, ok := set[s.Name.Name]; !ok {
- d.Specs[i] = gs
- i++
- }
- }
-
- d.Specs = d.Specs[:i]
-}
-
-// removeTypes removes from the AST the definition of all types and their
-// method sets that are contained in the provided type set.
-func removeTypes(set typeSet, f *ast.File) {
- // Go through the top-level declarations.
- i := 0
- for _, decl := range f.Decls {
- keep := true
- switch d := decl.(type) {
- case *ast.GenDecl:
- countBefore := len(d.Specs)
- removeTypeDefinitions(set, d)
- keep = countBefore == 0 || len(d.Specs) > 0
- case *ast.FuncDecl:
- keep = !isMethodOf(set, d)
- }
-
- if keep {
- f.Decls[i] = decl
- i++
- }
- }
-
- f.Decls = f.Decls[:i]
-}
diff --git a/tools/go_generics/rules_tests/BUILD b/tools/go_generics/rules_tests/BUILD
deleted file mode 100644
index 8a329dfc6..000000000
--- a/tools/go_generics/rules_tests/BUILD
+++ /dev/null
@@ -1,43 +0,0 @@
-load("//tools:defs.bzl", "go_test")
-load("//tools/go_generics:defs.bzl", "go_template", "go_template_instance")
-
-package(licenses = ["notice"])
-
-go_template_instance(
- name = "instance",
- out = "instance_test.go",
- consts = {
- "n": "20",
- "m": "\"test\"",
- "o": "math.MaxUint64",
- },
- imports = {
- "math": "math",
- },
- package = "template_test",
- template = ":test_template",
- types = {
- "t": "int",
- },
-)
-
-go_template(
- name = "test_template",
- srcs = [
- "template.go",
- ],
- opt_consts = [
- "n",
- "m",
- "o",
- ],
- opt_types = ["t"],
-)
-
-go_test(
- name = "template_test",
- srcs = [
- "instance_test.go",
- "template_test.go",
- ],
-)
diff --git a/tools/go_generics/rules_tests/template.go b/tools/go_generics/rules_tests/template.go
deleted file mode 100644
index aace61da1..000000000
--- a/tools/go_generics/rules_tests/template.go
+++ /dev/null
@@ -1,42 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package template
-
-type t float
-
-const (
- n t = 10.1
- m = "abc"
- o = 0
-)
-
-func max(a, b t) t {
- if a > b {
- return a
- }
- return b
-}
-
-func add(a t) t {
- return a + n
-}
-
-func getName() string {
- return m
-}
-
-func getMax() uint64 {
- return o
-}
diff --git a/tools/go_generics/rules_tests/template_test.go b/tools/go_generics/rules_tests/template_test.go
deleted file mode 100644
index b2a3446ef..000000000
--- a/tools/go_generics/rules_tests/template_test.go
+++ /dev/null
@@ -1,48 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package template_test
-
-import (
- "math"
- "testing"
-)
-
-func TestMax(t *testing.T) {
- var a int = max(10, 20)
- if a != 20 {
- t.Errorf("Bad result of max, got %v, want %v", a, 20)
- }
-}
-
-func TestIntConst(t *testing.T) {
- var a int = add(10)
- if a != 30 {
- t.Errorf("Bad result of add, got %v, want %v", a, 30)
- }
-}
-
-func TestStrConst(t *testing.T) {
- v := getName()
- if v != "test" {
- t.Errorf("Bad name, got %v, want %v", v, "test")
- }
-}
-
-func TestImport(t *testing.T) {
- v := getMax()
- if v != math.MaxUint64 {
- t.Errorf("Bad max value, got %v, want %v", v, uint64(math.MaxUint64))
- }
-}
diff --git a/tools/go_generics/tests/BUILD b/tools/go_generics/tests/BUILD
deleted file mode 100644
index 7547a6b53..000000000
--- a/tools/go_generics/tests/BUILD
+++ /dev/null
@@ -1,7 +0,0 @@
-load("//tools:defs.bzl", "bzl_library")
-
-bzl_library(
- name = "defs_bzl",
- srcs = ["defs.bzl"],
- visibility = ["//visibility:private"],
-)
diff --git a/tools/go_generics/tests/all_stmts/BUILD b/tools/go_generics/tests/all_stmts/BUILD
deleted file mode 100644
index a4a7c775a..000000000
--- a/tools/go_generics/tests/all_stmts/BUILD
+++ /dev/null
@@ -1,16 +0,0 @@
-load("//tools/go_generics/tests:defs.bzl", "go_generics_test")
-
-go_generics_test(
- name = "all_stmts",
- inputs = ["input.go"],
- output = "output.go",
- types = {
- "T": "Q",
- },
-)
-
-# @unused
-glaze_ignore = [
- "input.go",
- "output.go",
-]
diff --git a/tools/go_generics/tests/all_stmts/input.go b/tools/go_generics/tests/all_stmts/input.go
deleted file mode 100644
index 4791d1ff1..000000000
--- a/tools/go_generics/tests/all_stmts/input.go
+++ /dev/null
@@ -1,290 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package tests
-
-import (
- "sync"
-)
-
-type T int
-
-func h(T) {
-}
-
-type s struct {
- a, b int
- c []int
-}
-
-func g(T) *s {
- return &s{}
-}
-
-func f() (T, []int) {
- // Branch.
- goto T
- goto R
-
- // Labeled.
-T:
- _ = T(0)
-
- // Empty.
-R:
- ;
-
- // Assignment with definition.
- a, b, c := T(1), T(2), T(3)
- _, _, _ = a, b, c
-
- // Assignment without definition.
- g(T(0)).a, g(T(1)).b, c = int(T(1)), int(T(2)), T(3)
- _, _, _ = a, b, c
-
- // Block.
- {
- var T T
- T = 0
- _ = T
- }
-
- // Declarations.
- type Type T
- const Const T = 10
- var g1 func(T, int, ...T) (int, T)
- var v T
- var w = T(0)
- {
- var T struct {
- f []T
- }
- _ = T
- }
-
- // Defer.
- defer g1(T(0), 1)
-
- // Expression.
- h(v + w + T(1))
-
- // For statements.
- for i := T(0); i < T(10); i++ {
- var T func(int) T
- v := T(0)
- _ = v
- }
-
- for {
- var T func(int) T
- v := T(0)
- _ = v
- }
-
- // Go.
- go g1(T(0), 1)
-
- // If statements.
- if a != T(1) {
- var T func(int) T
- v := T(0)
- _ = v
- }
-
- if a := T(0); a != T(1) {
- var T func(int) T
- v := T(0)
- _ = v
- }
-
- if a := T(0); a != T(1) {
- var T func(int) T
- v := T(0)
- _ = v
- } else if b := T(0); b != T(1) {
- var T func(int) T
- v := T(0)
- _ = v
- } else if T := T(0); T != 1 {
- T++
- } else {
- T--
- }
-
- if a := T(0); a != T(1) {
- var T func(int) T
- v := T(0)
- _ = v
- } else {
- var T func(int) T
- v := T(0)
- _ = v
- }
-
- // Inc/Dec statements.
- (*(*T)(nil))++
- (*(*T)(nil))--
-
- // Range statements.
- for g(T(0)).a, g(T(1)).b = range g(T(10)).c {
- var d T
- _ = d
- }
-
- for T, b := range g(T(10)).c {
- _ = T
- _ = b
- }
-
- // Select statement.
- {
- var fch func(T) chan int
-
- select {
- case <-fch(T(30)):
- var T T
- T = 0
- _ = T
- default:
- var T T
- T = 0
- _ = T
- case T := <-fch(T(30)):
- T = 0
- _ = T
- case g(T(0)).a = <-fch(T(30)):
- var T T
- T = 0
- _ = T
- case fch(T(30)) <- int(T(0)):
- var T T
- T = 0
- _ = T
- }
- }
-
- // Send statements.
- {
- var ch chan T
- var fch func(T) chan int
-
- ch <- T(0)
- fch(T(1)) <- g(T(10)).a
- }
-
- // Switch statements.
- {
- var a T
- var b int
- switch {
- case a == T(0):
- var T T
- T = 0
- _ = T
- case a < T(0), b < g(T(10)).a:
- var T T
- T = 0
- _ = T
- default:
- var T T
- T = 0
- _ = T
- }
- }
-
- switch T(g(T(10)).a) {
- case T(0):
- var T T
- T = 0
- _ = T
- case T(1), T(g(T(10)).a):
- var T T
- T = 0
- _ = T
- default:
- var T T
- T = 0
- _ = T
- }
-
- switch b := g(T(10)); T(b.a) + T(10) {
- case T(0):
- var T T
- T = 0
- _ = T
- case T(1), T(g(T(10)).a):
- var T T
- T = 0
- _ = T
- default:
- var T T
- T = 0
- _ = T
- }
-
- // Type switch statements.
- {
- var interfaceFunc func(T) interface{}
-
- switch interfaceFunc(T(0)).(type) {
- case *T, T, int:
- var T T
- T = 0
- _ = T
- case sync.Mutex, **T:
- var T T
- T = 0
- _ = T
- default:
- var T T
- T = 0
- _ = T
- }
-
- switch x := interfaceFunc(T(0)).(type) {
- case *T, T, int:
- var T T
- T = 0
- _ = T
- _ = x
- case sync.Mutex, **T:
- var T T
- T = 0
- _ = T
- default:
- var T T
- T = 0
- _ = T
- }
-
- switch t := T(0); x := interfaceFunc(T(0) + t).(type) {
- case *T, T, int:
- var T T
- T = 0
- _ = T
- _ = x
- case sync.Mutex, **T:
- var T T
- T = 0
- _ = T
- default:
- var T T
- T = 0
- _ = T
- }
- }
-
- // Return statement.
- return T(10), g(T(11)).c
-}
diff --git a/tools/go_generics/tests/all_stmts/output.go b/tools/go_generics/tests/all_stmts/output.go
deleted file mode 100644
index a53d84535..000000000
--- a/tools/go_generics/tests/all_stmts/output.go
+++ /dev/null
@@ -1,288 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
- "sync"
-)
-
-func h(Q) {
-}
-
-type s struct {
- a, b int
- c []int
-}
-
-func g(Q) *s {
- return &s{}
-}
-
-func f() (Q, []int) {
- // Branch.
- goto T
- goto R
-
- // Labeled.
-T:
- _ = Q(0)
-
- // Empty.
-R:
- ;
-
- // Assignment with definition.
- a, b, c := Q(1), Q(2), Q(3)
- _, _, _ = a, b, c
-
- // Assignment without definition.
- g(Q(0)).a, g(Q(1)).b, c = int(Q(1)), int(Q(2)), Q(3)
- _, _, _ = a, b, c
-
- // Block.
- {
- var T Q
- T = 0
- _ = T
- }
-
- // Declarations.
- type Type Q
- const Const Q = 10
- var g1 func(Q, int, ...Q) (int, Q)
- var v Q
- var w = Q(0)
- {
- var T struct {
- f []Q
- }
- _ = T
- }
-
- // Defer.
- defer g1(Q(0), 1)
-
- // Expression.
- h(v + w + Q(1))
-
- // For statements.
- for i := Q(0); i < Q(10); i++ {
- var T func(int) Q
- v := T(0)
- _ = v
- }
-
- for {
- var T func(int) Q
- v := T(0)
- _ = v
- }
-
- // Go.
- go g1(Q(0), 1)
-
- // If statements.
- if a != Q(1) {
- var T func(int) Q
- v := T(0)
- _ = v
- }
-
- if a := Q(0); a != Q(1) {
- var T func(int) Q
- v := T(0)
- _ = v
- }
-
- if a := Q(0); a != Q(1) {
- var T func(int) Q
- v := T(0)
- _ = v
- } else if b := Q(0); b != Q(1) {
- var T func(int) Q
- v := T(0)
- _ = v
- } else if T := Q(0); T != 1 {
- T++
- } else {
- T--
- }
-
- if a := Q(0); a != Q(1) {
- var T func(int) Q
- v := T(0)
- _ = v
- } else {
- var T func(int) Q
- v := T(0)
- _ = v
- }
-
- // Inc/Dec statements.
- (*(*Q)(nil))++
- (*(*Q)(nil))--
-
- // Range statements.
- for g(Q(0)).a, g(Q(1)).b = range g(Q(10)).c {
- var d Q
- _ = d
- }
-
- for T, b := range g(Q(10)).c {
- _ = T
- _ = b
- }
-
- // Select statement.
- {
- var fch func(Q) chan int
-
- select {
- case <-fch(Q(30)):
- var T Q
- T = 0
- _ = T
- default:
- var T Q
- T = 0
- _ = T
- case T := <-fch(Q(30)):
- T = 0
- _ = T
- case g(Q(0)).a = <-fch(Q(30)):
- var T Q
- T = 0
- _ = T
- case fch(Q(30)) <- int(Q(0)):
- var T Q
- T = 0
- _ = T
- }
- }
-
- // Send statements.
- {
- var ch chan Q
- var fch func(Q) chan int
-
- ch <- Q(0)
- fch(Q(1)) <- g(Q(10)).a
- }
-
- // Switch statements.
- {
- var a Q
- var b int
- switch {
- case a == Q(0):
- var T Q
- T = 0
- _ = T
- case a < Q(0), b < g(Q(10)).a:
- var T Q
- T = 0
- _ = T
- default:
- var T Q
- T = 0
- _ = T
- }
- }
-
- switch Q(g(Q(10)).a) {
- case Q(0):
- var T Q
- T = 0
- _ = T
- case Q(1), Q(g(Q(10)).a):
- var T Q
- T = 0
- _ = T
- default:
- var T Q
- T = 0
- _ = T
- }
-
- switch b := g(Q(10)); Q(b.a) + Q(10) {
- case Q(0):
- var T Q
- T = 0
- _ = T
- case Q(1), Q(g(Q(10)).a):
- var T Q
- T = 0
- _ = T
- default:
- var T Q
- T = 0
- _ = T
- }
-
- // Type switch statements.
- {
- var interfaceFunc func(Q) interface{}
-
- switch interfaceFunc(Q(0)).(type) {
- case *Q, Q, int:
- var T Q
- T = 0
- _ = T
- case sync.Mutex, **Q:
- var T Q
- T = 0
- _ = T
- default:
- var T Q
- T = 0
- _ = T
- }
-
- switch x := interfaceFunc(Q(0)).(type) {
- case *Q, Q, int:
- var T Q
- T = 0
- _ = T
- _ = x
- case sync.Mutex, **Q:
- var T Q
- T = 0
- _ = T
- default:
- var T Q
- T = 0
- _ = T
- }
-
- switch t := Q(0); x := interfaceFunc(Q(0) + t).(type) {
- case *Q, Q, int:
- var T Q
- T = 0
- _ = T
- _ = x
- case sync.Mutex, **Q:
- var T Q
- T = 0
- _ = T
- default:
- var T Q
- T = 0
- _ = T
- }
- }
-
- // Return statement.
- return Q(10), g(Q(11)).c
-}
diff --git a/tools/go_generics/tests/all_types/BUILD b/tools/go_generics/tests/all_types/BUILD
deleted file mode 100644
index 60b1fd314..000000000
--- a/tools/go_generics/tests/all_types/BUILD
+++ /dev/null
@@ -1,16 +0,0 @@
-load("//tools/go_generics/tests:defs.bzl", "go_generics_test")
-
-go_generics_test(
- name = "all_types",
- inputs = ["input.go"],
- output = "output.go",
- types = {
- "T": "Q",
- },
-)
-
-# @unused
-glaze_ignore = [
- "input.go",
- "output.go",
-]
diff --git a/tools/go_generics/tests/all_types/input.go b/tools/go_generics/tests/all_types/input.go
deleted file mode 100644
index 6f85bbb69..000000000
--- a/tools/go_generics/tests/all_types/input.go
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package tests
-
-import (
- "./lib"
-)
-
-type T int
-
-type newType struct {
- a T
- b lib.T
- c *T
- d (T)
- e chan T
- f <-chan T
- g chan<- T
- h []T
- i [10]T
- j map[T]T
- k func(T, T) (T, T)
- l interface {
- f(T)
- }
- m struct {
- T
- a T
- }
-}
-
-func f(...T) {
-}
diff --git a/tools/go_generics/tests/all_types/lib/lib.go b/tools/go_generics/tests/all_types/lib/lib.go
deleted file mode 100644
index 988786496..000000000
--- a/tools/go_generics/tests/all_types/lib/lib.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package lib
-
-type T int32
diff --git a/tools/go_generics/tests/all_types/output.go b/tools/go_generics/tests/all_types/output.go
deleted file mode 100644
index c0bbebfe7..000000000
--- a/tools/go_generics/tests/all_types/output.go
+++ /dev/null
@@ -1,43 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
- "./lib"
-)
-
-type newType struct {
- a Q
- b lib.T
- c *Q
- d (Q)
- e chan Q
- f <-chan Q
- g chan<- Q
- h []Q
- i [10]Q
- j map[Q]Q
- k func(Q, Q) (Q, Q)
- l interface {
- f(Q)
- }
- m struct {
- Q
- a Q
- }
-}
-
-func f(...Q) {
-}
diff --git a/tools/go_generics/tests/anon/BUILD b/tools/go_generics/tests/anon/BUILD
deleted file mode 100644
index ef24f4b25..000000000
--- a/tools/go_generics/tests/anon/BUILD
+++ /dev/null
@@ -1,18 +0,0 @@
-load("//tools/go_generics/tests:defs.bzl", "go_generics_test")
-
-go_generics_test(
- name = "anon",
- anon = True,
- inputs = ["input.go"],
- output = "output.go",
- suffix = "New",
- types = {
- "T": "Q",
- },
-)
-
-# @unused
-glaze_ignore = [
- "input.go",
- "output.go",
-]
diff --git a/tools/go_generics/tests/anon/input.go b/tools/go_generics/tests/anon/input.go
deleted file mode 100644
index 44086d522..000000000
--- a/tools/go_generics/tests/anon/input.go
+++ /dev/null
@@ -1,46 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package tests
-
-type T interface {
- Apply(T) T
-}
-
-type Foo struct {
- T
- Bar map[string]T `json:"bar,omitempty"`
-}
-
-type Baz struct {
- T someTypeNotT
-}
-
-func (f Foo) GetBar(name string) T {
- b, ok := f.Bar[name]
- if ok {
- b = f.Apply(b)
- } else {
- b = f.T
- }
- return b
-}
-
-func foobar() {
- a := Baz{}
- a.T = 0 // should not be renamed, this is a limitation
-
- b := otherpkg.UnrelatedType{}
- b.T = 0 // should not be renamed, this is a limitation
-}
diff --git a/tools/go_generics/tests/anon/output.go b/tools/go_generics/tests/anon/output.go
deleted file mode 100644
index 7fa791853..000000000
--- a/tools/go_generics/tests/anon/output.go
+++ /dev/null
@@ -1,42 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-type FooNew struct {
- Q
- Bar map[string]Q `json:"bar,omitempty"`
-}
-
-type BazNew struct {
- T someTypeNotT
-}
-
-func (f FooNew) GetBar(name string) Q {
- b, ok := f.Bar[name]
- if ok {
- b = f.Apply(b)
- } else {
- b = f.Q
- }
- return b
-}
-
-func foobarNew() {
- a := BazNew{}
- a.Q = 0
-
- b := otherpkg.UnrelatedType{}
- b.Q = 0
-}
diff --git a/tools/go_generics/tests/consts/BUILD b/tools/go_generics/tests/consts/BUILD
deleted file mode 100644
index fd7caccad..000000000
--- a/tools/go_generics/tests/consts/BUILD
+++ /dev/null
@@ -1,23 +0,0 @@
-load("//tools/go_generics/tests:defs.bzl", "go_generics_test")
-
-go_generics_test(
- name = "consts",
- consts = {
- "c1": "20",
- "z": "600",
- "v": "3.3",
- "s": "\"def\"",
- "A": "20",
- "C": "100",
- "S": "\"def\"",
- "T": "\"ABC\"",
- },
- inputs = ["input.go"],
- output = "output.go",
-)
-
-# @unused
-glaze_ignore = [
- "input.go",
- "output.go",
-]
diff --git a/tools/go_generics/tests/consts/input.go b/tools/go_generics/tests/consts/input.go
deleted file mode 100644
index 04b95fcc6..000000000
--- a/tools/go_generics/tests/consts/input.go
+++ /dev/null
@@ -1,26 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package tests
-
-const c1 = 10
-const x, y, z = 100, 200, 300
-const v float32 = 1.0 + 2.0
-const s = "abc"
-const (
- A = 10
- B, C, D = 10, 20, 30
- S = "abc"
- T, U, V string = "abc", "def", "ghi"
-)
diff --git a/tools/go_generics/tests/consts/output.go b/tools/go_generics/tests/consts/output.go
deleted file mode 100644
index 18d316cc9..000000000
--- a/tools/go_generics/tests/consts/output.go
+++ /dev/null
@@ -1,26 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-const c1 = 20
-const x, y, z = 100, 200, 600
-const v float32 = 3.3
-const s = "def"
-const (
- A = 20
- B, C, D = 10, 100, 30
- S = "def"
- T, U, V string = "ABC", "def", "ghi"
-)
diff --git a/tools/go_generics/tests/defs.bzl b/tools/go_generics/tests/defs.bzl
deleted file mode 100644
index 6277c3947..000000000
--- a/tools/go_generics/tests/defs.bzl
+++ /dev/null
@@ -1,67 +0,0 @@
-"""Generics tests."""
-
-load("//tools/go_generics:defs.bzl", "go_template", "go_template_instance")
-
-def _go_generics_test_impl(ctx):
- runner = ctx.actions.declare_file(ctx.label.name)
- runner_content = "\n".join([
- "#!/bin/bash",
- "exec diff --ignore-blank-lines --ignore-matching-lines=^[[:space:]]*// %s %s" % (
- ctx.files.template_output[0].short_path,
- ctx.files.expected_output[0].short_path,
- ),
- "",
- ])
- ctx.actions.write(runner, runner_content, is_executable = True)
- return [DefaultInfo(
- executable = runner,
- runfiles = ctx.runfiles(
- files = ctx.files.template_output + ctx.files.expected_output,
- collect_default = True,
- collect_data = True,
- ),
- )]
-
-_go_generics_test = rule(
- implementation = _go_generics_test_impl,
- attrs = {
- "template_output": attr.label(mandatory = True, allow_single_file = True),
- "expected_output": attr.label(mandatory = True, allow_single_file = True),
- },
- test = True,
-)
-
-def go_generics_test(name, inputs, output, types = None, consts = None, **kwargs):
- """Instantiates a generics test.
-
- Args:
- name: the name of the test.
- inputs: all the input files.
- output: the output files.
- types: the template types (dictionary).
- consts: the template consts (dictionary).
- **kwargs: additional arguments for the template_instance.
- """
- if types == None:
- types = dict()
- if consts == None:
- consts = dict()
- go_template(
- name = name + "_template",
- srcs = inputs,
- types = types.keys(),
- consts = consts.keys(),
- )
- go_template_instance(
- name = name + "_output",
- template = ":" + name + "_template",
- out = name + "_output.go",
- types = types,
- consts = consts,
- **kwargs
- )
- _go_generics_test(
- name = name + "_test",
- template_output = name + "_output.go",
- expected_output = output,
- )
diff --git a/tools/go_generics/tests/imports/BUILD b/tools/go_generics/tests/imports/BUILD
deleted file mode 100644
index a86223d41..000000000
--- a/tools/go_generics/tests/imports/BUILD
+++ /dev/null
@@ -1,24 +0,0 @@
-load("//tools/go_generics/tests:defs.bzl", "go_generics_test")
-
-go_generics_test(
- name = "imports",
- consts = {
- "n": "math.Uint32",
- "m": "math.Uint64",
- },
- imports = {
- "sync": "sync",
- "math": "mymathpath",
- },
- inputs = ["input.go"],
- output = "output.go",
- types = {
- "T": "sync.Mutex",
- },
-)
-
-# @unused
-glaze_ignore = [
- "input.go",
- "output.go",
-]
diff --git a/tools/go_generics/tests/imports/input.go b/tools/go_generics/tests/imports/input.go
deleted file mode 100644
index 0f032c2a1..000000000
--- a/tools/go_generics/tests/imports/input.go
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package tests
-
-type T int
-
-var global T
-
-const (
- m = 0
- n = 0
-)
diff --git a/tools/go_generics/tests/imports/output.go b/tools/go_generics/tests/imports/output.go
deleted file mode 100644
index 2488ca58c..000000000
--- a/tools/go_generics/tests/imports/output.go
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
- __generics_imported1 "mymathpath"
- __generics_imported0 "sync"
-)
-
-var global __generics_imported0.Mutex
-
-const (
- m = __generics_imported1.Uint64
- n = __generics_imported1.Uint32
-)
diff --git a/tools/go_generics/tests/remove_typedef/BUILD b/tools/go_generics/tests/remove_typedef/BUILD
deleted file mode 100644
index 46457cec6..000000000
--- a/tools/go_generics/tests/remove_typedef/BUILD
+++ /dev/null
@@ -1,16 +0,0 @@
-load("//tools/go_generics/tests:defs.bzl", "go_generics_test")
-
-go_generics_test(
- name = "remove_typedef",
- inputs = ["input.go"],
- output = "output.go",
- types = {
- "T": "U",
- },
-)
-
-# @unused
-glaze_ignore = [
- "input.go",
- "output.go",
-]
diff --git a/tools/go_generics/tests/remove_typedef/input.go b/tools/go_generics/tests/remove_typedef/input.go
deleted file mode 100644
index cf632bae7..000000000
--- a/tools/go_generics/tests/remove_typedef/input.go
+++ /dev/null
@@ -1,37 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package tests
-
-func f(T) Q {
- return Q{}
-}
-
-type T struct{}
-
-type Q struct{}
-
-func (*T) f() {
-}
-
-func (T) g() {
-}
-
-func (*Q) f(T) T {
- return T{}
-}
-
-func (*Q) g(T) *T {
- return nil
-}
diff --git a/tools/go_generics/tests/remove_typedef/output.go b/tools/go_generics/tests/remove_typedef/output.go
deleted file mode 100644
index d44fd8e1c..000000000
--- a/tools/go_generics/tests/remove_typedef/output.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-func f(U) Q {
- return Q{}
-}
-
-type Q struct{}
-
-func (*Q) f(U) U {
- return U{}
-}
-
-func (*Q) g(U) *U {
- return nil
-}
diff --git a/tools/go_generics/tests/simple/BUILD b/tools/go_generics/tests/simple/BUILD
deleted file mode 100644
index 4b9265ea4..000000000
--- a/tools/go_generics/tests/simple/BUILD
+++ /dev/null
@@ -1,17 +0,0 @@
-load("//tools/go_generics/tests:defs.bzl", "go_generics_test")
-
-go_generics_test(
- name = "simple",
- inputs = ["input.go"],
- output = "output.go",
- suffix = "New",
- types = {
- "T": "Q",
- },
-)
-
-# @unused
-glaze_ignore = [
- "input.go",
- "output.go",
-]
diff --git a/tools/go_generics/tests/simple/input.go b/tools/go_generics/tests/simple/input.go
deleted file mode 100644
index 2a917f16c..000000000
--- a/tools/go_generics/tests/simple/input.go
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package tests
-
-type T int
-
-var global T
-
-func f(_ T, a int) {
-}
-
-func g(a T, b int) {
- var c T
- _ = c
-
- d := (*T)(nil)
- _ = d
-}
-
-type R struct {
- T
- a *T
-}
-
-var (
- Z *T = (*T)(nil)
-)
-
-const (
- X T = (T)(0)
-)
-
-type Y T
diff --git a/tools/go_generics/tests/simple/output.go b/tools/go_generics/tests/simple/output.go
deleted file mode 100644
index 6bfa0b25b..000000000
--- a/tools/go_generics/tests/simple/output.go
+++ /dev/null
@@ -1,43 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-var globalNew Q
-
-func fNew(_ Q, a int) {
-}
-
-func gNew(a Q, b int) {
- var c Q
- _ = c
-
- d := (*Q)(nil)
- _ = d
-}
-
-type RNew struct {
- Q
- a *Q
-}
-
-var (
- ZNew *Q = (*Q)(nil)
-)
-
-const (
- XNew Q = (Q)(0)
-)
-
-type YNew Q
diff --git a/tools/go_marshal/BUILD b/tools/go_marshal/BUILD
deleted file mode 100644
index f79defea7..000000000
--- a/tools/go_marshal/BUILD
+++ /dev/null
@@ -1,25 +0,0 @@
-load("//tools:defs.bzl", "bzl_library", "go_binary")
-
-licenses(["notice"])
-
-go_binary(
- name = "go_marshal",
- srcs = ["main.go"],
- visibility = [
- "//:sandbox",
- ],
- deps = [
- "//tools/go_marshal/gomarshal",
- ],
-)
-
-config_setting(
- name = "marshal_config_verbose",
- values = {"define": "gomarshal=verbose"},
-)
-
-bzl_library(
- name = "defs_bzl",
- srcs = ["defs.bzl"],
- visibility = ["//visibility:private"],
-)
diff --git a/tools/go_marshal/README.md b/tools/go_marshal/README.md
deleted file mode 100644
index d8045c295..000000000
--- a/tools/go_marshal/README.md
+++ /dev/null
@@ -1,130 +0,0 @@
-This package implements the go_marshal utility.
-
-# Overview
-
-`go_marshal` is a code generation utility similar to `go_stateify` for
-marshalling go data structures to and from memory.
-
-`go_marshal` attempts to improve on `binary.Write` and the sentry's
-`binary.Marshal` by moving the expensive use of reflection from runtime to
-compile-time.
-
-`go_marshal` automatically generates implementations for `marshal.Marshallable`
-interface. Data structures that require custom serialization can be accomodated
-through a manual implementation this interface.
-
-Data structures can be flagged for code generation by adding a struct-level
-comment `// +marshal`. For additional details and options, see the documentation
-for the `marshal.Marshallable` interface.
-
-# Usage
-
-See `defs.bzl`: a new rule is provided, `go_marshal`.
-
-Under the hood, the `go_marshal` rule is used to generate a file that will
-appear in a Go target; the output file should appear explicitly in a srcs list.
-For example (note that the above is the preferred method):
-
-```
-load("<PKGPATH>/gvisor/tools/go_marshal:defs.bzl", "go_marshal")
-
-go_marshal(
- name = "foo_abi",
- srcs = ["foo.go"],
- out = "foo_abi.go",
- package = "foo",
-)
-
-go_library(
- name = "foo",
- srcs = [
- "foo.go",
- "foo_abi.go",
- ],
- ...
-)
-```
-
-As part of the interface generation, `go_marshal` also generates some tests for
-sanity checking the struct definitions for potential alignment issues, and a
-simple round-trip test through Marshal/Unmarshal to verify the implementation.
-These tests use reflection to verify properties of the ABI struct, and should be
-considered part of the generated interfaces (but are too expensive to execute at
-runtime). Ensure these tests run at some point.
-
-# Restrictions
-
-Not all valid go type definitions can be used with `go_marshal`. `go_marshal` is
-intended for ABI structs, which have these additional restrictions:
-
-- At the moment, `go_marshal` only supports struct declarations.
-
-- Structs are marshalled as packed types. This means no implicit padding is
- inserted between fields shorter than the platform register size. For
- alignment, manually insert padding fields.
-
-- Structs used with `go_marshal` must have a compile-time static size. This
- means no dynamically sizes fields like slices or strings. Use statically
- sized array (byte arrays for strings) instead.
-
-- No pointers, channel, map or function pointer fields, and no fields that are
- arrays of these types. These don't make sense in an ABI data structure.
-
-- We could support opaque pointers as `uintptr`, but this is currently not
- implemented. Implementing this would require handling the architecture
- dependent native pointer size.
-
-- Fields must either be a primitive integer type (`byte`,
- `[u]int{8,16,32,64}`), or of a type that implements `marshal.Marshallable`.
-
-- `int` and `uint` fields are not allowed. Use an explicitly-sized numeric
- type.
-
-- `float*` fields are currently not supported, but could be if necessary.
-
-# Appendix
-
-## Working with Non-Packed Structs
-
-ABI structs must generally be packed types, meaning they should have no implicit
-padding between short fields. However, if a field is tagged
-`marshal:"unaligned"`, `go_marshal` will fall back to a safer but slower
-mechanism to deal with potentially unaligned fields.
-
-Note that the non-packed property is inheritted by any other struct that embeds
-this struct, since the `go_marshal` tool currently can't reason about alignments
-for embedded structs that are not aligned.
-
-Because of this, it's generally best to avoid using `marshal:"unaligned"` and
-insert explicit padding fields instead.
-
-## Modifying the `go_marshal` Tool
-
-The following are some guidelines for modifying the `go_marshal` tool:
-
-- The `go_marshal` tool currently does a single pass over all types requesting
- code generation, in arbitrary order. This means the generated code can't
- directly obtain information about embedded marshallable types at
- compile-time. One way to work around this restriction is to add a new
- Marshallable interface method providing this piece of information, and
- calling it from the generated code. Use this sparingly, as we want to rely
- on compile-time information as much as possible for performance.
-
-- No runtime reflection in the code generated for the marshallable interface.
- The entire point of the tool is to avoid runtime reflection. The generated
- tests may use reflection.
-
-## Debugging
-
-To enable debugging output from the go-marshal tool, use one of the following
-options, depending on how go-marshal is being invoked:
-
-- Pass `--define gomarshal=verbose` to the bazel command. Note that this can
- generate a lot of output depending on what's being compiled, as this will
- enable debugging for all packages built by the command.
-
-- Set `marshal_debug = True` on the top-level `go_library` BUILD rule.
-
-- Set `debug = True` on the `go_marshal` BUILD rule.
-
-- Pass `-debug` to the go-marshal tool invocation.
diff --git a/tools/go_marshal/analysis/BUILD b/tools/go_marshal/analysis/BUILD
deleted file mode 100644
index c2a4d45c4..000000000
--- a/tools/go_marshal/analysis/BUILD
+++ /dev/null
@@ -1,12 +0,0 @@
-load("//tools:defs.bzl", "go_library")
-
-licenses(["notice"])
-
-go_library(
- name = "analysis",
- testonly = 1,
- srcs = ["analysis_unsafe.go"],
- visibility = [
- "//:sandbox",
- ],
-)
diff --git a/tools/go_marshal/analysis/analysis_unsafe.go b/tools/go_marshal/analysis/analysis_unsafe.go
deleted file mode 100644
index cd55cf5cb..000000000
--- a/tools/go_marshal/analysis/analysis_unsafe.go
+++ /dev/null
@@ -1,179 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package analysis implements common functionality used by generated
-// go_marshal tests.
-package analysis
-
-// All functions in this package are unsafe and are not intended for general
-// consumption. They contain sharp edge cases and the caller is responsible for
-// ensuring none of them are hit. Callers must be carefully to pass in only sane
-// arguments. Failure to do so may cause panics at best and arbitrary memory
-// corruption at worst.
-//
-// Never use outside of tests.
-
-import (
- "fmt"
- "math/rand"
- "reflect"
- "testing"
- "unsafe"
-)
-
-// RandomizeValue assigns random value(s) to an abitrary type. This is intended
-// for used with ABI structs from go_marshal, meaning the typical restrictions
-// apply (fixed-size types, no pointers, maps, channels, etc), and should only
-// be used on zeroed values to avoid overwriting pointers to active go objects.
-//
-// Internally, we populate the type with random data by doing an unsafe cast to
-// access the underlying memory of the type and filling it as if it were a byte
-// slice. This almost gets us what we want, but padding fields named "_" are
-// normally not accessible, so we walk the type and recursively zero all "_"
-// fields.
-//
-// Precondition: x must be a pointer. x must not contain any valid
-// pointers to active go objects (pointer fields aren't allowed in ABI
-// structs anyways), or we'd be violating the go runtime contract and
-// the GC may malfunction.
-func RandomizeValue(x interface{}) {
- v := reflect.Indirect(reflect.ValueOf(x))
- if !v.CanSet() {
- panic("RandomizeType() called with an unaddressable value. You probably need to pass a pointer to the argument")
- }
-
- // Cast the underlying memory for the type into a byte slice.
- var b []byte
- hdr := (*reflect.SliceHeader)(unsafe.Pointer(&b))
- // Note: v.UnsafeAddr panics if x is passed by value. x should be a pointer.
- hdr.Data = v.UnsafeAddr()
- hdr.Len = int(v.Type().Size())
- hdr.Cap = hdr.Len
-
- // Fill the byte slice with random data, which in effect fills the type with
- // random values.
- n, err := rand.Read(b)
- if err != nil || n != len(b) {
- panic("unreachable")
- }
-
- // Normally, padding fields are not accessible, so zero them out.
- reflectZeroPaddingFields(v.Type(), b, false)
-}
-
-// reflectZeroPaddingFields assigns zero values to padding fields for the value
-// of type r, represented by the memory in data. Padding fields are defined as
-// fields with the name "_". If zero is true, the immediate value itself is
-// zeroed. In addition, the type is recursively scanned for padding fields in
-// inner types.
-//
-// This is used for zeroing padding fields after calling RandomizeValue.
-func reflectZeroPaddingFields(r reflect.Type, data []byte, zero bool) {
- if zero {
- for i, _ := range data {
- data[i] = 0
- }
- }
- switch r.Kind() {
- case reflect.Int8, reflect.Uint8, reflect.Int16, reflect.Uint16, reflect.Int32, reflect.Uint32, reflect.Int64, reflect.Uint64:
- // These types are explicitly allowed in an ABI type, but we don't need
- // to recurse further as they're scalar types.
- case reflect.Struct:
- for i, numFields := 0, r.NumField(); i < numFields; i++ {
- f := r.Field(i)
- off := f.Offset
- len := f.Type.Size()
- window := data[off : off+len]
- reflectZeroPaddingFields(f.Type, window, f.Name == "_")
- }
- case reflect.Array:
- eLen := int(r.Elem().Size())
- if int(r.Size()) != eLen*r.Len() {
- panic("Array has unexpected size?")
- }
- for i, n := 0, r.Len(); i < n; i++ {
- reflectZeroPaddingFields(r.Elem(), data[i*eLen:(i+1)*eLen], false)
- }
- default:
- panic(fmt.Sprintf("Type %v not allowed in ABI struct", r.Kind()))
-
- }
-}
-
-// AlignmentCheck ensures the definition of the type represented by typ doesn't
-// cause the go compiler to emit implicit padding between elements of the type
-// (i.e. fields in a struct).
-//
-// AlignmentCheck doesn't explicitly recurse for embedded structs because any
-// struct present in an ABI struct must also be Marshallable, and therefore
-// they're aligned by definition (or their alignment check would have failed).
-func AlignmentCheck(t *testing.T, typ reflect.Type) (ok bool, delta uint64) {
- switch typ.Kind() {
- case reflect.Int8, reflect.Uint8, reflect.Int16, reflect.Uint16, reflect.Int32, reflect.Uint32, reflect.Int64, reflect.Uint64:
- // Primitive types are always considered well aligned. Primitive types
- // that are fields in structs are checked independently, this branch
- // exists to handle recursive calls to alignmentCheck.
- case reflect.Struct:
- xOff := 0
- nextXOff := 0
- skipNext := false
- for i, numFields := 0, typ.NumField(); i < numFields; i++ {
- xOff = nextXOff
- f := typ.Field(i)
- fmt.Printf("Checking alignment of %s.%s @ %d [+%d]...\n", typ.Name(), f.Name, f.Offset, f.Type.Size())
- nextXOff = int(f.Offset + f.Type.Size())
-
- if f.Name == "_" {
- // Padding fields need not be aligned.
- fmt.Printf("Padding field of type %v\n", f.Type)
- continue
- }
-
- if tag, ok := f.Tag.Lookup("marshal"); ok && tag == "unaligned" {
- skipNext = true
- continue
- }
-
- if skipNext {
- skipNext = false
- fmt.Printf("Skipping alignment check for field %s.%s explicitly marked as unaligned.\n", typ.Name(), f.Name)
- continue
- }
-
- if xOff != int(f.Offset) {
- implicitPad := int(f.Offset) - xOff
- t.Fatalf("Suspect offset for field %s.%s, detected an implicit %d byte padding from offset %d to %d; either add %d bytes of explicit padding before this field or tag it as `marshal:\"unaligned\"`.", typ.Name(), f.Name, implicitPad, xOff, f.Offset, implicitPad)
- }
- }
-
- // Ensure structs end on a byte explicitly defined by the type.
- if typ.NumField() > 0 && nextXOff != int(typ.Size()) {
- implicitPad := int(typ.Size()) - nextXOff
- f := typ.Field(typ.NumField() - 1) // Final field
- if tag, ok := f.Tag.Lookup("marshal"); ok && tag == "unaligned" {
- // Final field explicitly marked unaligned.
- break
- }
- t.Fatalf("Suspect offset for field %s.%s at the end of %s, detected an implicit %d byte padding from offset %d to %d at the end of the struct; either add %d bytes of explict padding at end of the struct or tag the final field %s as `marshal:\"unaligned\"`.",
- typ.Name(), f.Name, typ.Name(), implicitPad, nextXOff, typ.Size(), implicitPad, f.Name)
- }
- case reflect.Array:
- // Independent arrays are also always considered well aligned. We only
- // need to worry about their alignment when they're embedded in structs,
- // which we handle above.
- default:
- t.Fatalf("Unsupported type in ABI struct while checking for field alignment for type: %v", typ.Kind())
- }
- return true, uint64(typ.Size())
-}
diff --git a/tools/go_marshal/defs.bzl b/tools/go_marshal/defs.bzl
deleted file mode 100644
index f44f83eab..000000000
--- a/tools/go_marshal/defs.bzl
+++ /dev/null
@@ -1,68 +0,0 @@
-"""Marshal is a tool for generating marshalling interfaces for Go types."""
-
-def _go_marshal_impl(ctx):
- """Execute the go_marshal tool."""
- output = ctx.outputs.lib
- output_test = ctx.outputs.test
- output_test_unconditional = ctx.outputs.test_unconditional
-
- # Run the marshal command.
- args = ["-output=%s" % output.path]
- args.append("-pkg=%s" % ctx.attr.package)
- args.append("-output_test=%s" % output_test.path)
- args.append("-output_test_unconditional=%s" % output_test_unconditional.path)
-
- if ctx.attr.debug:
- args += ["-debug"]
-
- args += ["--"]
- for src in ctx.attr.srcs:
- args += [f.path for f in src.files.to_list()]
- ctx.actions.run(
- inputs = ctx.files.srcs,
- outputs = [output, output_test, output_test_unconditional],
- mnemonic = "GoMarshal",
- progress_message = "go_marshal: %s" % ctx.label,
- arguments = args,
- executable = ctx.executable._tool,
- )
-
-# Generates save and restore logic from a set of Go files.
-#
-# Args:
-# name: the name of the rule.
-# srcs: the input source files. These files should include all structs in the
-# package that need to be saved.
-# imports: an optional list of extra, non-aliased, Go-style absolute import
-# paths.
-# out: the name of the generated file output. This must not conflict with any
-# other files and must be added to the srcs of the relevant go_library.
-# package: the package name for the input sources.
-go_marshal = rule(
- implementation = _go_marshal_impl,
- attrs = {
- "srcs": attr.label_list(mandatory = True, allow_files = True),
- "imports": attr.string_list(mandatory = False),
- "package": attr.string(mandatory = True),
- "debug": attr.bool(doc = "enable debugging output from the go_marshal tool"),
- "_tool": attr.label(executable = True, cfg = "host", default = Label("//tools/go_marshal:go_marshal")),
- },
- outputs = {
- "lib": "%{name}_unsafe.go",
- "test": "%{name}_test.go",
- "test_unconditional": "%{name}_unconditional_test.go",
- },
-)
-
-# marshal_deps are the dependencies requied by generated code.
-marshal_deps = [
- "//pkg/gohacks",
- "//pkg/safecopy",
- "//pkg/usermem",
- "//pkg/marshal",
-]
-
-# marshal_test_deps are required by test targets.
-marshal_test_deps = [
- "//tools/go_marshal/analysis",
-]
diff --git a/tools/go_marshal/gomarshal/BUILD b/tools/go_marshal/gomarshal/BUILD
deleted file mode 100644
index 44cb33ae4..000000000
--- a/tools/go_marshal/gomarshal/BUILD
+++ /dev/null
@@ -1,21 +0,0 @@
-load("//tools:defs.bzl", "go_library")
-
-licenses(["notice"])
-
-go_library(
- name = "gomarshal",
- srcs = [
- "generator.go",
- "generator_interfaces.go",
- "generator_interfaces_array_newtype.go",
- "generator_interfaces_primitive_newtype.go",
- "generator_interfaces_struct.go",
- "generator_tests.go",
- "util.go",
- ],
- stateify = False,
- visibility = [
- "//:sandbox",
- ],
- deps = ["//tools/tags"],
-)
diff --git a/tools/go_marshal/gomarshal/generator.go b/tools/go_marshal/gomarshal/generator.go
deleted file mode 100644
index 4a53d25be..000000000
--- a/tools/go_marshal/gomarshal/generator.go
+++ /dev/null
@@ -1,526 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package gomarshal implements the go_marshal code generator. See README.md.
-package gomarshal
-
-import (
- "bytes"
- "fmt"
- "go/ast"
- "go/parser"
- "go/token"
- "os"
- "sort"
- "strings"
-
- "gvisor.dev/gvisor/tools/tags"
-)
-
-// List of identifiers we use in generated code that may conflict with a
-// similarly-named source identifier. Abort gracefully when we see these to
-// avoid potentially confusing compilation failures in generated code.
-//
-// This only applies to import aliases at the moment. All other identifiers
-// are qualified by a receiver argument, since they're struct fields.
-//
-// All recievers are single letters, so we don't allow import aliases to be a
-// single letter.
-var badIdents = []string{
- "addr", "blk", "buf", "cc", "dst", "dsts", "count", "err", "hdr", "idx",
- "inner", "length", "limit", "ptr", "size", "src", "srcs", "val",
- // All single-letter identifiers.
-}
-
-// Constructed fromt badIdents in init().
-var badIdentsMap map[string]struct{}
-
-func init() {
- badIdentsMap = make(map[string]struct{})
- for _, ident := range badIdents {
- badIdentsMap[ident] = struct{}{}
- }
-}
-
-// Generator drives code generation for a single invocation of the go_marshal
-// utility.
-//
-// The Generator holds arguments passed to the tool, and drives parsing,
-// processing and code Generator for all types marked with +marshal declared in
-// the input files.
-//
-// See Generator.run() as the entry point.
-type Generator struct {
- // Paths to input go source files.
- inputs []string
- // Output file to write generated go source.
- output *os.File
- // Output file to write generated tests.
- outputTest *os.File
- // Output file to write unconditionally generated tests.
- outputTestUC *os.File
- // Package name for the generated file.
- pkg string
- // Set of extra packages to import in the generated file.
- imports *importTable
-}
-
-// NewGenerator creates a new code Generator.
-func NewGenerator(srcs []string, out, outTest, outTestUnconditional, pkg string, imports []string) (*Generator, error) {
- f, err := os.OpenFile(out, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644)
- if err != nil {
- return nil, fmt.Errorf("couldn't open output file %q: %w", out, err)
- }
- fTest, err := os.OpenFile(outTest, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644)
- if err != nil {
- return nil, fmt.Errorf("couldn't open test output file %q: %w", out, err)
- }
- fTestUC, err := os.OpenFile(outTestUnconditional, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644)
- if err != nil {
- return nil, fmt.Errorf("couldn't open unconditional test output file %q: %w", out, err)
- }
- g := Generator{
- inputs: srcs,
- output: f,
- outputTest: fTest,
- outputTestUC: fTestUC,
- pkg: pkg,
- imports: newImportTable(),
- }
- for _, i := range imports {
- // All imports on the extra imports list are unconditionally marked as
- // used, so that they're always added to the generated code.
- g.imports.add(i).markUsed()
- }
-
- // The following imports may or may not be used by the generated code,
- // depending on what's required for the target types. Don't mark these as
- // used by default.
- g.imports.add("io")
- g.imports.add("reflect")
- g.imports.add("runtime")
- g.imports.add("unsafe")
- g.imports.add("gvisor.dev/gvisor/pkg/gohacks")
- g.imports.add("gvisor.dev/gvisor/pkg/safecopy")
- g.imports.add("gvisor.dev/gvisor/pkg/usermem")
- g.imports.add("gvisor.dev/gvisor/pkg/marshal")
-
- return &g, nil
-}
-
-// writeHeader writes the header for the generated source file. The header
-// includes the package name, package level comments and import statements.
-func (g *Generator) writeHeader() error {
- var b sourceBuffer
- b.emit("// Automatically generated marshal implementation. See tools/go_marshal.\n\n")
-
- // Emit build tags.
- if t := tags.Aggregate(g.inputs); len(t) > 0 {
- b.emit(strings.Join(t.Lines(), "\n"))
- b.emit("\n\n")
- }
-
- // Package header.
- b.emit("package %s\n\n", g.pkg)
- if err := b.write(g.output); err != nil {
- return err
- }
-
- return g.imports.write(g.output)
-}
-
-// writeTypeChecks writes a statement to force the compiler to perform a type
-// check for all Marshallable types referenced by the generated code.
-func (g *Generator) writeTypeChecks(ms map[string]struct{}) error {
- if len(ms) == 0 {
- return nil
- }
-
- msl := make([]string, 0, len(ms))
- for m, _ := range ms {
- msl = append(msl, m)
- }
- sort.Strings(msl)
-
- var buf bytes.Buffer
- fmt.Fprint(&buf, "// Marshallable types used by this file.\n")
-
- for _, m := range msl {
- fmt.Fprintf(&buf, "var _ marshal.Marshallable = (*%s)(nil)\n", m)
- }
- fmt.Fprint(&buf, "\n")
-
- _, err := fmt.Fprint(g.output, buf.String())
- return err
-}
-
-// parse processes all input files passed this generator and produces a set of
-// parsed go ASTs.
-func (g *Generator) parse() ([]*ast.File, []*token.FileSet, error) {
- debugf("go_marshal invoked with %d input files:\n", len(g.inputs))
- for _, path := range g.inputs {
- debugf(" %s\n", path)
- }
-
- files := make([]*ast.File, 0, len(g.inputs))
- fsets := make([]*token.FileSet, 0, len(g.inputs))
-
- for _, path := range g.inputs {
- fset := token.NewFileSet()
- f, err := parser.ParseFile(fset, path, nil, parser.ParseComments)
- if err != nil {
- // Not a valid input file?
- return nil, nil, fmt.Errorf("input %q can't be parsed: %w", path, err)
- }
-
- if debugEnabled() {
- debugf("AST for %q:\n", path)
- ast.Print(fset, f)
- }
-
- files = append(files, f)
- fsets = append(fsets, fset)
- }
-
- return files, fsets, nil
-}
-
-// sliceAPI carries information about the '+marshal slice' directive.
-type sliceAPI struct {
- // Comment node in the AST containing the +marshal tag.
- comment *ast.Comment
- // Identifier fragment to use when naming generated functions for the slice
- // API.
- ident string
- // Whether the generated functions should reference the newtype name, or the
- // inner type name. Only meaningful on newtype declarations on primitives.
- inner bool
-}
-
-// marshallableType carries information about a type marked with the '+marshal'
-// directive.
-type marshallableType struct {
- spec *ast.TypeSpec
- slice *sliceAPI
-}
-
-func newMarshallableType(fset *token.FileSet, tagLine *ast.Comment, spec *ast.TypeSpec) marshallableType {
- mt := marshallableType{
- spec: spec,
- slice: nil,
- }
-
- var unhandledTags []string
-
- for _, tag := range strings.Fields(strings.TrimPrefix(tagLine.Text, "// +marshal")) {
- if strings.HasPrefix(tag, "slice:") {
- tokens := strings.Split(tag, ":")
- if len(tokens) < 2 || len(tokens) > 3 {
- abortAt(fset.Position(tagLine.Slash), fmt.Sprintf("+marshal directive has invalid 'slice' clause. Expecting format 'slice:<IDENTIFIER>[:inner]', got '%v'", tag))
- }
- if len(tokens[1]) == 0 {
- abortAt(fset.Position(tagLine.Slash), "+marshal slice directive has empty identifier argument. Expecting '+marshal slice:identifier'")
- }
-
- sa := &sliceAPI{
- comment: tagLine,
- ident: tokens[1],
- }
- mt.slice = sa
-
- if len(tokens) == 3 {
- if tokens[2] != "inner" {
- abortAt(fset.Position(tagLine.Slash), "+marshal slice directive has an invalid argument. Expecting '+marshal slice:<IDENTIFIER>[:inner]'")
- }
- sa.inner = true
- }
-
- continue
- }
-
- unhandledTags = append(unhandledTags, tag)
- }
-
- if len(unhandledTags) > 0 {
- abortAt(fset.Position(tagLine.Slash), fmt.Sprintf("+marshal directive contained the following unknown clauses: %v", strings.Join(unhandledTags, " ")))
- }
-
- return mt
-}
-
-// collectMarshallableTypes walks the parsed AST and collects a list of type
-// declarations for which we need to generate the Marshallable interface.
-func (g *Generator) collectMarshallableTypes(a *ast.File, f *token.FileSet) []marshallableType {
- var types []marshallableType
- for _, decl := range a.Decls {
- gdecl, ok := decl.(*ast.GenDecl)
- // Type declaration?
- if !ok || gdecl.Tok != token.TYPE {
- debugfAt(f.Position(decl.Pos()), "Skipping declaration since it's not a type declaration.\n")
- continue
- }
- // Does it have a comment?
- if gdecl.Doc == nil {
- debugfAt(f.Position(gdecl.Pos()), "Skipping declaration since it doesn't have a comment.\n")
- continue
- }
- // Does the comment contain a "+marshal" line?
- marked := false
- var tagLine *ast.Comment
- for _, c := range gdecl.Doc.List {
- if strings.HasPrefix(c.Text, "// +marshal") {
- marked = true
- tagLine = c
- break
- }
- }
- if !marked {
- debugfAt(f.Position(gdecl.Pos()), "Skipping declaration since it doesn't have a comment containing +marshal line.\n")
- continue
- }
- for _, spec := range gdecl.Specs {
- // We already confirmed we're in a type declaration earlier, so this
- // cast will succeed.
- t := spec.(*ast.TypeSpec)
- switch t.Type.(type) {
- case *ast.StructType:
- debugfAt(f.Position(t.Pos()), "Collected marshallable struct %s.\n", t.Name.Name)
- case *ast.Ident: // Newtype on primitive.
- debugfAt(f.Position(t.Pos()), "Collected marshallable newtype on primitive %s.\n", t.Name.Name)
- case *ast.ArrayType: // Newtype on array.
- debugfAt(f.Position(t.Pos()), "Collected marshallable newtype on array %s.\n", t.Name.Name)
- default:
- // A user specifically requested marshalling on this type, but we
- // don't support it.
- abortAt(f.Position(t.Pos()), fmt.Sprintf("Marshalling codegen was requested on type '%s', but go-marshal doesn't support this kind of declaration.\n", t.Name))
- }
- types = append(types, newMarshallableType(f, tagLine, t))
-
- }
- }
- return types
-}
-
-// collectImports collects all imports from all input source files. Some of
-// these imports are copied to the generated output, if they're referenced by
-// the generated code.
-//
-// collectImports de-duplicates imports while building the list, and ensures
-// identifiers in the generated code don't conflict with any imported package
-// names.
-func (g *Generator) collectImports(a *ast.File, f *token.FileSet) map[string]importStmt {
- is := make(map[string]importStmt)
- for _, decl := range a.Decls {
- gdecl, ok := decl.(*ast.GenDecl)
- // Import statement?
- if !ok || gdecl.Tok != token.IMPORT {
- continue
- }
- for _, spec := range gdecl.Specs {
- i := g.imports.addFromSpec(spec.(*ast.ImportSpec), f)
- debugf("Collected import '%s' as '%s'\n", i.path, i.name)
-
- // Make sure we have an import that doesn't use any local names that
- // would conflict with identifiers in the generated code.
- if len(i.name) == 1 && i.name != "_" {
- abortAt(f.Position(spec.Pos()), fmt.Sprintf("Import has a single character local name '%s'; this may conflict with code generated by go_marshal, use a multi-character import alias", i.name))
- }
- if _, ok := badIdentsMap[i.name]; ok {
- abortAt(f.Position(spec.Pos()), fmt.Sprintf("Import name '%s' is likely to conflict with code generated by go_marshal, use a different import alias", i.name))
- }
- }
- }
- return is
-
-}
-
-func (g *Generator) generateOne(t marshallableType, fset *token.FileSet) *interfaceGenerator {
- i := newInterfaceGenerator(t.spec, fset)
- switch ty := t.spec.Type.(type) {
- case *ast.StructType:
- i.validateStruct(t.spec, ty)
- i.emitMarshallableForStruct(ty)
- if t.slice != nil {
- i.emitMarshallableSliceForStruct(ty, t.slice)
- }
- case *ast.Ident:
- i.validatePrimitiveNewtype(ty)
- i.emitMarshallableForPrimitiveNewtype(ty)
- if t.slice != nil {
- i.emitMarshallableSliceForPrimitiveNewtype(ty, t.slice)
- }
- case *ast.ArrayType:
- i.validateArrayNewtype(t.spec.Name, ty)
- // After validate, we can safely call arrayLen.
- i.emitMarshallableForArrayNewtype(t.spec.Name, ty, ty.Elt.(*ast.Ident))
- if t.slice != nil {
- abortAt(fset.Position(t.slice.comment.Slash), fmt.Sprintf("Array type marked as '+marshal slice:...', but this is not supported. Perhaps fold one of the dimensions?"))
- }
- default:
- // This should've been filtered out by collectMarshallabeTypes.
- panic(fmt.Sprintf("Unexpected type %+v", ty))
- }
- return i
-}
-
-// generateOneTestSuite generates a test suite for the automatically generated
-// implementations type t.
-func (g *Generator) generateOneTestSuite(t marshallableType) *testGenerator {
- i := newTestGenerator(t.spec)
- i.emitTests(t.slice)
- return i
-}
-
-// Run is the entry point to code generation using g.
-//
-// Run parses all input source files specified in g and emits generated code.
-func (g *Generator) Run() error {
- // Parse our input source files into ASTs and token sets.
- asts, fsets, err := g.parse()
- if err != nil {
- return err
- }
-
- if len(asts) != len(fsets) {
- panic("ASTs and FileSets don't match")
- }
-
- // Map of imports in source files; key = local package name, value = import
- // path.
- is := make(map[string]importStmt)
- for i, a := range asts {
- // Collect all imports from the source files. We may need to copy some
- // of these to the generated code if they're referenced. This has to be
- // done before the loop below because we need to process all ASTs before
- // we start requesting imports to be copied one by one as we encounter
- // them in each generated source.
- for name, i := range g.collectImports(a, fsets[i]) {
- is[name] = i
- }
- }
-
- var impls []*interfaceGenerator
- var ts []*testGenerator
- // Set of Marshallable types referenced by generated code.
- ms := make(map[string]struct{})
- for i, a := range asts {
- // Collect type declarations marked for code generation and generate
- // Marshallable interfaces.
- for _, t := range g.collectMarshallableTypes(a, fsets[i]) {
- impl := g.generateOne(t, fsets[i])
- // Collect Marshallable types referenced by the generated code.
- for ref := range impl.ms {
- ms[ref] = struct{}{}
- }
- impls = append(impls, impl)
- // Collect imports referenced by the generated code and add them to
- // the list of imports we need to copy to the generated code.
- for name := range impl.is {
- if !g.imports.markUsed(name) {
- panic(fmt.Sprintf("Generated code for '%s' referenced a non-existent import with local name '%s'. Either go-marshal needs to add an import to the generated file, or a package in an input source file has a package name differ from the final component of its path, which go-marshal doesn't know how to detect; use an import alias to work around this limitation.", impl.typeName(), name))
- }
- }
- ts = append(ts, g.generateOneTestSuite(t))
- }
- }
-
- // Write output file header. These include things like package name and
- // import statements.
- if err := g.writeHeader(); err != nil {
- return err
- }
-
- // Write type checks for referenced marshallable types to output file.
- if err := g.writeTypeChecks(ms); err != nil {
- return err
- }
-
- // Write generated interfaces to output file.
- for _, i := range impls {
- if err := i.write(g.output); err != nil {
- return err
- }
- }
-
- // Write generated tests to test file.
- return g.writeTests(ts)
-}
-
-// writeTests outputs tests for the generated interface implementations to a go
-// source file.
-func (g *Generator) writeTests(ts []*testGenerator) error {
- var b sourceBuffer
-
- // Write the unconditional test file. This file is always compiled,
- // regardless of what build tags were specified on the original input
- // files. We use this file to guarantee we never end up with an empty test
- // file, as that causes the build to fail with "no tests/benchmarks/examples
- // found".
- //
- // There's no easy way to determine ahead of time if we'll end up with an
- // empty build file since build constraints can arbitrarily cause some of
- // the original types to be not defined. We also have no way to tell bazel
- // to omit the entire test suite since the output files are already defined
- // before go-marshal is called.
- b.emit("// Automatically generated marshal tests. See tools/go_marshal.\n\n")
- b.emit("package %s\n\n", g.pkg)
- b.emit("func Example() {\n")
- b.inIndent(func() {
- b.emit("// This example is intentionally empty, and ensures this package contains at\n")
- b.emit("// least one testable entity. go-marshal is forced to emit a test package if the\n")
- b.emit("// input package is marked marshallable, but emitting no testable entities \n")
- b.emit("// results in a build failure.\n")
- })
- b.emit("}\n")
- if err := b.write(g.outputTestUC); err != nil {
- return err
- }
-
- // Now generate the real test file that contains the real types we
- // processed. These need to be conditionally compiled according to the build
- // tags, as the original types may not be defined under all build
- // configurations.
-
- b.reset()
- b.emit("// Automatically generated marshal tests. See tools/go_marshal.\n\n")
-
- // Emit build tags.
- if t := tags.Aggregate(g.inputs); len(t) > 0 {
- b.emit(strings.Join(t.Lines(), "\n"))
- b.emit("\n\n")
- }
-
- b.emit("package %s\n\n", g.pkg)
- if err := b.write(g.outputTest); err != nil {
- return err
- }
-
- // Collect and write test import statements.
- imports := newImportTable()
- for _, t := range ts {
- imports.merge(t.imports)
- }
-
- if err := imports.write(g.outputTest); err != nil {
- return err
- }
-
- // Write test functions.
- for _, t := range ts {
- if err := t.write(g.outputTest); err != nil {
- return err
- }
- }
- return nil
-}
diff --git a/tools/go_marshal/gomarshal/generator_interfaces.go b/tools/go_marshal/gomarshal/generator_interfaces.go
deleted file mode 100644
index 36447b86b..000000000
--- a/tools/go_marshal/gomarshal/generator_interfaces.go
+++ /dev/null
@@ -1,276 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package gomarshal
-
-import (
- "fmt"
- "go/ast"
- "go/token"
- "strings"
-)
-
-// interfaceGenerator generates marshalling interfaces for a single type.
-//
-// getState is not thread-safe.
-type interfaceGenerator struct {
- sourceBuffer
-
- // The type we're serializing.
- t *ast.TypeSpec
-
- // Receiver argument for generated methods.
- r string
-
- // FileSet containing the tokens for the type we're processing.
- f *token.FileSet
-
- // is records external packages referenced by the generated implementation.
- is map[string]struct{}
-
- // ms records Marshallable types referenced by the generated implementation
- // of t's interfaces.
- ms map[string]struct{}
-
- // as records fields in t that are potentially not packed. The key is the
- // accessor for the field.
- as map[string]struct{}
-}
-
-// typeName returns the name of the type this g represents.
-func (g *interfaceGenerator) typeName() string {
- return g.t.Name.Name
-}
-
-// newinterfaceGenerator creates a new interface generator.
-func newInterfaceGenerator(t *ast.TypeSpec, fset *token.FileSet) *interfaceGenerator {
- g := &interfaceGenerator{
- t: t,
- r: receiverName(t),
- f: fset,
- is: make(map[string]struct{}),
- ms: make(map[string]struct{}),
- as: make(map[string]struct{}),
- }
- g.recordUsedMarshallable(g.typeName())
- return g
-}
-
-func (g *interfaceGenerator) recordUsedMarshallable(m string) {
- g.ms[m] = struct{}{}
-
-}
-
-func (g *interfaceGenerator) recordUsedImport(i string) {
- g.is[i] = struct{}{}
-}
-
-func (g *interfaceGenerator) recordPotentiallyNonPackedField(fieldName string) {
- g.as[fieldName] = struct{}{}
-}
-
-// abortAt aborts the go_marshal tool with the given error message, with a
-// reference position to the input source. Same as abortAt, but uses g to
-// resolve p to position.
-func (g *interfaceGenerator) abortAt(p token.Pos, msg string) {
- abortAt(g.f.Position(p), msg)
-}
-
-// scalarSize returns the size of type identified by t. If t isn't a primitive
-// type, the size isn't known at code generation time, and must be resolved via
-// the marshal.Marshallable interface.
-func (g *interfaceGenerator) scalarSize(t *ast.Ident) (size int, unknownSize bool) {
- switch t.Name {
- case "int8", "uint8", "byte":
- return 1, false
- case "int16", "uint16":
- return 2, false
- case "int32", "uint32":
- return 4, false
- case "int64", "uint64":
- return 8, false
- default:
- return 0, true
- }
-}
-
-func (g *interfaceGenerator) shift(bufVar string, n int) {
- g.emit("%s = %s[%d:]\n", bufVar, bufVar, n)
-}
-
-func (g *interfaceGenerator) shiftDynamic(bufVar, name string) {
- g.emit("%s = %s[%s.SizeBytes():]\n", bufVar, bufVar, name)
-}
-
-// marshalScalar writes a single scalar to a byte slice.
-func (g *interfaceGenerator) marshalScalar(accessor, typ, bufVar string) {
- switch typ {
- case "int8", "uint8", "byte":
- g.emit("%s[0] = byte(%s)\n", bufVar, accessor)
- g.shift(bufVar, 1)
- case "int16", "uint16":
- g.recordUsedImport("usermem")
- g.emit("usermem.ByteOrder.PutUint16(%s[:2], uint16(%s))\n", bufVar, accessor)
- g.shift(bufVar, 2)
- case "int32", "uint32":
- g.recordUsedImport("usermem")
- g.emit("usermem.ByteOrder.PutUint32(%s[:4], uint32(%s))\n", bufVar, accessor)
- g.shift(bufVar, 4)
- case "int64", "uint64":
- g.recordUsedImport("usermem")
- g.emit("usermem.ByteOrder.PutUint64(%s[:8], uint64(%s))\n", bufVar, accessor)
- g.shift(bufVar, 8)
- default:
- g.emit("%s.MarshalBytes(%s[:%s.SizeBytes()])\n", accessor, bufVar, accessor)
- g.shiftDynamic(bufVar, accessor)
- }
-}
-
-// unmarshalScalar reads a single scalar from a byte slice.
-func (g *interfaceGenerator) unmarshalScalar(accessor, typ, bufVar string) {
- switch typ {
- case "byte":
- g.emit("%s = %s[0]\n", accessor, bufVar)
- g.shift(bufVar, 1)
- case "int8", "uint8":
- g.emit("%s = %s(%s[0])\n", accessor, typ, bufVar)
- g.shift(bufVar, 1)
- case "int16", "uint16":
- g.recordUsedImport("usermem")
- g.emit("%s = %s(usermem.ByteOrder.Uint16(%s[:2]))\n", accessor, typ, bufVar)
- g.shift(bufVar, 2)
- case "int32", "uint32":
- g.recordUsedImport("usermem")
- g.emit("%s = %s(usermem.ByteOrder.Uint32(%s[:4]))\n", accessor, typ, bufVar)
- g.shift(bufVar, 4)
- case "int64", "uint64":
- g.recordUsedImport("usermem")
- g.emit("%s = %s(usermem.ByteOrder.Uint64(%s[:8]))\n", accessor, typ, bufVar)
- g.shift(bufVar, 8)
- default:
- g.emit("%s.UnmarshalBytes(%s[:%s.SizeBytes()])\n", accessor, bufVar, accessor)
- g.shiftDynamic(bufVar, accessor)
- g.recordPotentiallyNonPackedField(accessor)
- }
-}
-
-// emitCastToByteSlice unsafely casts an arbitrary type's underlying memory to a
-// byte slice, bypassing escape analysis. The caller is responsible for ensuring
-// srcPtr lives until they're done with dstVar, the runtime does not consider
-// dstVar dependent on srcPtr due to the escape analysis bypass.
-//
-// srcPtr must be a pointer.
-//
-// This function uses internally uses the identifier "hdr", and cannot be used
-// in a context where it is already bound.
-func (g *interfaceGenerator) emitCastToByteSlice(srcPtr, dstVar, lenExpr string) {
- g.recordUsedImport("gohacks")
- g.emit("// Construct a slice backed by dst's underlying memory.\n")
- g.emit("var %s []byte\n", dstVar)
- g.emit("hdr := (*reflect.SliceHeader)(unsafe.Pointer(&%s))\n", dstVar)
- g.emit("hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(%s)))\n", srcPtr)
- g.emit("hdr.Len = %s\n", lenExpr)
- g.emit("hdr.Cap = %s\n\n", lenExpr)
-}
-
-// emitCastToByteSlice unsafely casts a slice with elements of an abitrary type
-// to a byte slice. As part of the cast, the byte slice is made to look
-// independent of the src slice by bypassing escape analysis. This means the
-// byte slice can be used without causing the source to escape. The caller is
-// responsible for ensuring srcPtr lives until they're done with dstVar, as the
-// runtime no longer considers dstVar dependent on srcPtr and is free to GC it.
-//
-// srcPtr must be a pointer.
-//
-// This function uses internally uses the identifiers "ptr", "val" and "hdr",
-// and cannot be used in a context where these identifiers are already bound.
-func (g *interfaceGenerator) emitCastSliceToByteSlice(srcPtr, dstVar, lenExpr string) {
- g.emitNoEscapeSliceDataPointer(srcPtr, "val")
-
- g.emit("// Construct a slice backed by dst's underlying memory.\n")
- g.emit("var %s []byte\n", dstVar)
- g.emit("hdr := (*reflect.SliceHeader)(unsafe.Pointer(&%s))\n", dstVar)
- g.emit("hdr.Data = uintptr(val)\n")
- g.emit("hdr.Len = %s\n", lenExpr)
- g.emit("hdr.Cap = %s\n\n", lenExpr)
-}
-
-// emitNoEscapeSliceDataPointer unsafely casts a slice's data pointer to an
-// unsafe.Pointer, bypassing escape analysis. The caller is responsible for
-// ensuring srcPtr lives until they're done with dstVar, as the runtime no
-// longer considers dstVar dependent on srcPtr and is free to GC it.
-//
-// srcPtr must be a pointer.
-//
-// This function uses internally uses the identifier "ptr" cannot be used in a
-// context where this identifier is already bound.
-func (g *interfaceGenerator) emitNoEscapeSliceDataPointer(srcPtr, dstVar string) {
- g.recordUsedImport("gohacks")
- g.emit("ptr := unsafe.Pointer(%s)\n", srcPtr)
- g.emit("%s := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data))\n\n", dstVar)
-}
-
-func (g *interfaceGenerator) emitKeepAlive(ptrVar string) {
- g.emit("// Since we bypassed the compiler's escape analysis, indicate that %s\n", ptrVar)
- g.emit("// must live until the use above.\n")
- g.emit("runtime.KeepAlive(%s) // escapes: replaced by intrinsic.\n", ptrVar)
-}
-
-func (g *interfaceGenerator) expandBinaryExpr(b *strings.Builder, e *ast.BinaryExpr) {
- switch x := e.X.(type) {
- case *ast.BinaryExpr:
- // Recursively expand sub-expression.
- g.expandBinaryExpr(b, x)
- case *ast.Ident:
- fmt.Fprintf(b, "%s", x.Name)
- case *ast.BasicLit:
- fmt.Fprintf(b, "%s", x.Value)
- default:
- g.abortAt(e.Pos(), "Cannot convert binary expression to output code. Go-marshal currently only handles simple expressions of literals, constants and basic identifiers")
- }
-
- fmt.Fprintf(b, "%s", e.Op)
-
- switch y := e.Y.(type) {
- case *ast.BinaryExpr:
- // Recursively expand sub-expression.
- g.expandBinaryExpr(b, y)
- case *ast.Ident:
- fmt.Fprintf(b, "%s", y.Name)
- case *ast.BasicLit:
- fmt.Fprintf(b, "%s", y.Value)
- default:
- g.abortAt(e.Pos(), "Cannot convert binary expression to output code. Go-marshal currently only handles simple expressions of literals, constants and basic identifiers")
- }
-}
-
-// arrayLenExpr returns a string containing a valid golang expression
-// representing the length of array a. The returned expression should be treated
-// as a single value, and will be already parenthesized as required.
-func (g *interfaceGenerator) arrayLenExpr(a *ast.ArrayType) string {
- var b strings.Builder
-
- switch l := a.Len.(type) {
- case *ast.Ident:
- fmt.Fprintf(&b, "%s", l.Name)
- case *ast.BasicLit:
- fmt.Fprintf(&b, "%s", l.Value)
- case *ast.BinaryExpr:
- g.expandBinaryExpr(&b, l)
- return fmt.Sprintf("(%s)", b.String())
- default:
- g.abortAt(l.Pos(), "Cannot convert this array len expression to output code. Go-marshal currently only handles simple expressions of literals, constants and basic identifiers")
- }
- return b.String()
-}
diff --git a/tools/go_marshal/gomarshal/generator_interfaces_array_newtype.go b/tools/go_marshal/gomarshal/generator_interfaces_array_newtype.go
deleted file mode 100644
index 7525b52da..000000000
--- a/tools/go_marshal/gomarshal/generator_interfaces_array_newtype.go
+++ /dev/null
@@ -1,146 +0,0 @@
-// Copyright 2020 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// This file contains the bits of the code generator specific to marshalling
-// newtypes on arrays.
-
-package gomarshal
-
-import (
- "fmt"
- "go/ast"
-)
-
-func (g *interfaceGenerator) validateArrayNewtype(n *ast.Ident, a *ast.ArrayType) {
- if a.Len == nil {
- g.abortAt(a.Pos(), fmt.Sprintf("Dynamically sized slice '%s' cannot be marshalled, arrays must be statically sized", n.Name))
- }
-
- if _, ok := a.Elt.(*ast.Ident); !ok {
- g.abortAt(a.Elt.Pos(), fmt.Sprintf("Marshalling not supported for arrays with %s elements, array elements must be primitive types", kindString(a.Elt)))
- }
-}
-
-func (g *interfaceGenerator) emitMarshallableForArrayNewtype(n *ast.Ident, a *ast.ArrayType, elt *ast.Ident) {
- g.recordUsedImport("io")
- g.recordUsedImport("marshal")
- g.recordUsedImport("reflect")
- g.recordUsedImport("runtime")
- g.recordUsedImport("safecopy")
- g.recordUsedImport("unsafe")
- g.recordUsedImport("usermem")
-
- lenExpr := g.arrayLenExpr(a)
-
- g.emit("// SizeBytes implements marshal.Marshallable.SizeBytes.\n")
- g.emit("//go:nosplit\n")
- g.emit("func (%s *%s) SizeBytes() int {\n", g.r, g.typeName())
- g.inIndent(func() {
- if size, dynamic := g.scalarSize(elt); !dynamic {
- g.emit("return %d * %s\n", size, lenExpr)
- } else {
- g.emit("return (*%s)(nil).SizeBytes() * %s\n", n.Name, lenExpr)
- }
- })
- g.emit("}\n\n")
-
- g.emit("// MarshalBytes implements marshal.Marshallable.MarshalBytes.\n")
- g.emit("func (%s *%s) MarshalBytes(dst []byte) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emit("for idx := 0; idx < %s; idx++ {\n", lenExpr)
- g.inIndent(func() {
- g.marshalScalar(fmt.Sprintf("%s[idx]", g.r), elt.Name, "dst")
- })
- g.emit("}\n")
- })
- g.emit("}\n\n")
-
- g.emit("// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.\n")
- g.emit("func (%s *%s) UnmarshalBytes(src []byte) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emit("for idx := 0; idx < %s; idx++ {\n", lenExpr)
- g.inIndent(func() {
- g.unmarshalScalar(fmt.Sprintf("%s[idx]", g.r), elt.Name, "src")
- })
- g.emit("}\n")
- })
- g.emit("}\n\n")
-
- g.emit("// Packed implements marshal.Marshallable.Packed.\n")
- g.emit("//go:nosplit\n")
- g.emit("func (%s *%s) Packed() bool {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emit("// Array newtypes are always packed.\n")
- g.emit("return true\n")
- })
- g.emit("}\n\n")
-
- g.emit("// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.\n")
- g.emit("func (%s *%s) MarshalUnsafe(dst []byte) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emit("safecopy.CopyIn(dst, unsafe.Pointer(%s))\n", g.r)
- })
- g.emit("}\n\n")
-
- g.emit("// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.\n")
- g.emit("func (%s *%s) UnmarshalUnsafe(src []byte) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emit("safecopy.CopyOut(unsafe.Pointer(%s), src)\n", g.r)
- })
- g.emit("}\n\n")
-
- g.emit("// CopyOutN implements marshal.Marshallable.CopyOutN.\n")
- g.emit("//go:nosplit\n")
- g.emit("func (%s *%s) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r))
-
- g.emit("length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.\n")
- g.emitKeepAlive(g.r)
- g.emit("return length, err\n")
- })
- g.emit("}\n\n")
-
- g.emit("// CopyOut implements marshal.Marshallable.CopyOut.\n")
- g.emit("//go:nosplit\n")
- g.emit("func (%s *%s) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emit("return %s.CopyOutN(cc, addr, %s.SizeBytes())\n", g.r, g.r)
- })
- g.emit("}\n\n")
-
- g.emit("// CopyIn implements marshal.Marshallable.CopyIn.\n")
- g.emit("//go:nosplit\n")
- g.emit("func (%s *%s) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r))
-
- g.emit("length, err := cc.CopyInBytes(addr, buf) // escapes: okay.\n")
- g.emitKeepAlive(g.r)
- g.emit("return length, err\n")
- })
- g.emit("}\n\n")
-
- g.emit("// WriteTo implements io.WriterTo.WriteTo.\n")
- g.emit("func (%s *%s) WriteTo(w io.Writer) (int64, error) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r))
-
- g.emit("length, err := w.Write(buf)\n")
- g.emitKeepAlive(g.r)
- g.emit("return int64(length), err\n")
-
- })
- g.emit("}\n\n")
-}
diff --git a/tools/go_marshal/gomarshal/generator_interfaces_primitive_newtype.go b/tools/go_marshal/gomarshal/generator_interfaces_primitive_newtype.go
deleted file mode 100644
index 7edaf666c..000000000
--- a/tools/go_marshal/gomarshal/generator_interfaces_primitive_newtype.go
+++ /dev/null
@@ -1,289 +0,0 @@
-// Copyright 2020 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// This file contains the bits of the code generator specific to marshalling
-// newtypes on primitives.
-
-package gomarshal
-
-import (
- "fmt"
- "go/ast"
-)
-
-// marshalPrimitiveScalar writes a single primitive variable to a byte
-// slice.
-func (g *interfaceGenerator) marshalPrimitiveScalar(accessor, typ, bufVar string) {
- switch typ {
- case "int8", "uint8", "byte":
- g.emit("%s[0] = byte(*%s)\n", bufVar, accessor)
- case "int16", "uint16":
- g.recordUsedImport("usermem")
- g.emit("usermem.ByteOrder.PutUint16(%s[:2], uint16(*%s))\n", bufVar, accessor)
- case "int32", "uint32":
- g.recordUsedImport("usermem")
- g.emit("usermem.ByteOrder.PutUint32(%s[:4], uint32(*%s))\n", bufVar, accessor)
- case "int64", "uint64":
- g.recordUsedImport("usermem")
- g.emit("usermem.ByteOrder.PutUint64(%s[:8], uint64(*%s))\n", bufVar, accessor)
- default:
- g.emit("// Explicilty cast to the underlying type before dispatching to\n")
- g.emit("// MarshalBytes, so we don't recursively call %s.MarshalBytes\n", accessor)
- g.emit("inner := (*%s)(%s)\n", typ, accessor)
- g.emit("inner.MarshalBytes(%s[:%s.SizeBytes()])\n", bufVar, accessor)
- }
-}
-
-// unmarshalPrimitiveScalar read a single primitive variable from a byte slice.
-func (g *interfaceGenerator) unmarshalPrimitiveScalar(accessor, typ, bufVar, typeCast string) {
- switch typ {
- case "byte":
- g.emit("*%s = %s(%s[0])\n", accessor, typeCast, bufVar)
- case "int8", "uint8":
- g.emit("*%s = %s(%s(%s[0]))\n", accessor, typeCast, typ, bufVar)
- case "int16", "uint16":
- g.recordUsedImport("usermem")
- g.emit("*%s = %s(%s(usermem.ByteOrder.Uint16(%s[:2])))\n", accessor, typeCast, typ, bufVar)
- case "int32", "uint32":
- g.recordUsedImport("usermem")
- g.emit("*%s = %s(%s(usermem.ByteOrder.Uint32(%s[:4])))\n", accessor, typeCast, typ, bufVar)
- case "int64", "uint64":
- g.recordUsedImport("usermem")
- g.emit("*%s = %s(%s(usermem.ByteOrder.Uint64(%s[:8])))\n", accessor, typeCast, typ, bufVar)
- default:
- g.emit("// Explicilty cast to the underlying type before dispatching to\n")
- g.emit("// UnmarshalBytes, so we don't recursively call %s.UnmarshalBytes\n", accessor)
- g.emit("inner := (*%s)(%s)\n", typ, accessor)
- g.emit("inner.UnmarshalBytes(%s[:%s.SizeBytes()])\n", bufVar, accessor)
- }
-}
-
-func (g *interfaceGenerator) validatePrimitiveNewtype(t *ast.Ident) {
- switch t.Name {
- case "int8", "uint8", "byte", "int16", "uint16", "int32", "uint32", "int64", "uint64":
- // These are the only primitive types we're allow. Below, we provide
- // suggestions for some disallowed types and reject them, then attempt
- // to marshal any remaining types by invoking the marshal.Marshallable
- // interface on them. If these types don't actually implement
- // marshal.Marshallable, compilation of the generated code will fail
- // with an appropriate error message.
- return
- case "int":
- g.abortAt(t.Pos(), "Type 'int' has ambiguous width, use int32 or int64")
- case "uint":
- g.abortAt(t.Pos(), "Type 'uint' has ambiguous width, use uint32 or uint64")
- case "string":
- g.abortAt(t.Pos(), "Type 'string' is dynamically-sized and cannot be marshalled, use a fixed size byte array '[...]byte' instead")
- default:
- debugfAt(g.f.Position(t.Pos()), fmt.Sprintf("Found derived type '%s', will attempt dispatch via marshal.Marshallable.\n", t.Name))
- }
-}
-
-// emitMarshallableForPrimitiveNewtype outputs code to implement the
-// marshal.Marshallable interface for a newtype on a primitive. Primitive
-// newtypes are always packed, so we can omit the various fallbacks required for
-// non-packed structs.
-func (g *interfaceGenerator) emitMarshallableForPrimitiveNewtype(nt *ast.Ident) {
- g.recordUsedImport("io")
- g.recordUsedImport("marshal")
- g.recordUsedImport("reflect")
- g.recordUsedImport("runtime")
- g.recordUsedImport("safecopy")
- g.recordUsedImport("unsafe")
- g.recordUsedImport("usermem")
-
- g.emit("// SizeBytes implements marshal.Marshallable.SizeBytes.\n")
- g.emit("//go:nosplit\n")
- g.emit("func (%s *%s) SizeBytes() int {\n", g.r, g.typeName())
- g.inIndent(func() {
- if size, dynamic := g.scalarSize(nt); !dynamic {
- g.emit("return %d\n", size)
- } else {
- g.emit("return (*%s)(nil).SizeBytes()\n", nt.Name)
- }
- })
- g.emit("}\n\n")
-
- g.emit("// MarshalBytes implements marshal.Marshallable.MarshalBytes.\n")
- g.emit("func (%s *%s) MarshalBytes(dst []byte) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.marshalPrimitiveScalar(g.r, nt.Name, "dst")
- })
- g.emit("}\n\n")
-
- g.emit("// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.\n")
- g.emit("func (%s *%s) UnmarshalBytes(src []byte) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.unmarshalPrimitiveScalar(g.r, nt.Name, "src", g.typeName())
- })
- g.emit("}\n\n")
-
- g.emit("// Packed implements marshal.Marshallable.Packed.\n")
- g.emit("//go:nosplit\n")
- g.emit("func (%s *%s) Packed() bool {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emit("// Scalar newtypes are always packed.\n")
- g.emit("return true\n")
- })
- g.emit("}\n\n")
-
- g.emit("// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.\n")
- g.emit("func (%s *%s) MarshalUnsafe(dst []byte) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emit("safecopy.CopyIn(dst, unsafe.Pointer(%s))\n", g.r)
- })
- g.emit("}\n\n")
-
- g.emit("// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.\n")
- g.emit("func (%s *%s) UnmarshalUnsafe(src []byte) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emit("safecopy.CopyOut(unsafe.Pointer(%s), src)\n", g.r)
- })
- g.emit("}\n\n")
-
- g.emit("// CopyOutN implements marshal.Marshallable.CopyOutN.\n")
- g.emit("//go:nosplit\n")
- g.emit("func (%s *%s) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r))
-
- g.emit("length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.\n")
- g.emitKeepAlive(g.r)
- g.emit("return length, err\n")
- })
- g.emit("}\n\n")
-
- g.emit("// CopyOut implements marshal.Marshallable.CopyOut.\n")
- g.emit("//go:nosplit\n")
- g.emit("func (%s *%s) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emit("return %s.CopyOutN(cc, addr, %s.SizeBytes())\n", g.r, g.r)
- })
- g.emit("}\n\n")
-
- g.emit("// CopyIn implements marshal.Marshallable.CopyIn.\n")
- g.emit("//go:nosplit\n")
- g.emit("func (%s *%s) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r))
-
- g.emit("length, err := cc.CopyInBytes(addr, buf) // escapes: okay.\n")
- g.emitKeepAlive(g.r)
- g.emit("return length, err\n")
- })
- g.emit("}\n\n")
-
- g.emit("// WriteTo implements io.WriterTo.WriteTo.\n")
- g.emit("func (%s *%s) WriteTo(w io.Writer) (int64, error) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r))
-
- g.emit("length, err := w.Write(buf)\n")
- g.emitKeepAlive(g.r)
- g.emit("return int64(length), err\n")
-
- })
- g.emit("}\n\n")
-}
-
-func (g *interfaceGenerator) emitMarshallableSliceForPrimitiveNewtype(nt *ast.Ident, slice *sliceAPI) {
- g.recordUsedImport("marshal")
- g.recordUsedImport("usermem")
- g.recordUsedImport("reflect")
- g.recordUsedImport("runtime")
- g.recordUsedImport("unsafe")
-
- eltType := g.typeName()
- if slice.inner {
- eltType = nt.Name
- }
-
- g.emit("// Copy%sIn copies in a slice of %s objects from the task's memory.\n", slice.ident, eltType)
- g.emit("//go:nosplit\n")
- g.emit("func Copy%sIn(cc marshal.CopyContext, addr usermem.Addr, dst []%s) (int, error) {\n", slice.ident, eltType)
- g.inIndent(func() {
- g.emit("count := len(dst)\n")
- g.emit("if count == 0 {\n")
- g.inIndent(func() {
- g.emit("return 0, nil\n")
- })
- g.emit("}\n")
- g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName())
-
- g.emitCastSliceToByteSlice("&dst", "buf", "size * count")
-
- g.emit("length, err := cc.CopyInBytes(addr, buf) // escapes: okay.\n")
- g.emitKeepAlive("dst")
- g.emit("return length, err\n")
- })
- g.emit("}\n\n")
-
- g.emit("// Copy%sOut copies a slice of %s objects to the task's memory.\n", slice.ident, eltType)
- g.emit("//go:nosplit\n")
- g.emit("func Copy%sOut(cc marshal.CopyContext, addr usermem.Addr, src []%s) (int, error) {\n", slice.ident, eltType)
- g.inIndent(func() {
- g.emit("count := len(src)\n")
- g.emit("if count == 0 {\n")
- g.inIndent(func() {
- g.emit("return 0, nil\n")
- })
- g.emit("}\n")
- g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName())
-
- g.emitCastSliceToByteSlice("&src", "buf", "size * count")
-
- g.emit("length, err := cc.CopyOutBytes(addr, buf) // escapes: okay.\n")
- g.emitKeepAlive("src")
- g.emit("return length, err\n")
- })
- g.emit("}\n\n")
-
- g.emit("// MarshalUnsafe%s is like %s.MarshalUnsafe, but for a []%s.\n", slice.ident, g.typeName(), g.typeName())
- g.emit("func MarshalUnsafe%s(src []%s, dst []byte) (int, error) {\n", slice.ident, g.typeName())
- g.inIndent(func() {
- g.emit("count := len(src)\n")
- g.emit("if count == 0 {\n")
- g.inIndent(func() {
- g.emit("return 0, nil\n")
- })
- g.emit("}\n")
- g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName())
-
- g.emitNoEscapeSliceDataPointer("&src", "val")
-
- g.emit("length, err := safecopy.CopyIn(dst[:(size*count)], val)\n")
- g.emitKeepAlive("src")
- g.emit("return length, err\n")
- })
- g.emit("}\n\n")
-
- g.emit("// UnmarshalUnsafe%s is like %s.UnmarshalUnsafe, but for a []%s.\n", slice.ident, g.typeName(), g.typeName())
- g.emit("func UnmarshalUnsafe%s(dst []%s, src []byte) (int, error) {\n", slice.ident, g.typeName())
- g.inIndent(func() {
- g.emit("count := len(dst)\n")
- g.emit("if count == 0 {\n")
- g.inIndent(func() {
- g.emit("return 0, nil\n")
- })
- g.emit("}\n")
- g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName())
-
- g.emitNoEscapeSliceDataPointer("&dst", "val")
-
- g.emit("length, err := safecopy.CopyOut(val, src[:(size*count)])\n")
- g.emitKeepAlive("dst")
- g.emit("return length, err\n")
- })
- g.emit("}\n\n")
-}
diff --git a/tools/go_marshal/gomarshal/generator_interfaces_struct.go b/tools/go_marshal/gomarshal/generator_interfaces_struct.go
deleted file mode 100644
index fe76d3785..000000000
--- a/tools/go_marshal/gomarshal/generator_interfaces_struct.go
+++ /dev/null
@@ -1,619 +0,0 @@
-// Copyright 2020 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// This file contains the bits of the code generator specific to marshalling
-// structs.
-
-package gomarshal
-
-import (
- "fmt"
- "go/ast"
- "sort"
- "strings"
-)
-
-func (g *interfaceGenerator) fieldAccessor(n *ast.Ident) string {
- return fmt.Sprintf("%s.%s", g.r, n.Name)
-}
-
-// areFieldsPackedExpression returns a go expression checking whether g.t's fields are
-// packed. Returns "", false if g.t has no fields that may be potentially
-// packed, otherwise returns <clause>, true, where <clause> is an expression
-// like "t.a.Packed() && t.b.Packed() && t.c.Packed()".
-func (g *interfaceGenerator) areFieldsPackedExpression() (string, bool) {
- if len(g.as) == 0 {
- return "", false
- }
-
- cs := make([]string, 0, len(g.as))
- for accessor, _ := range g.as {
- cs = append(cs, fmt.Sprintf("%s.Packed()", accessor))
- }
- // Sort expressions for determinstic build outputs.
- sort.Strings(cs)
- return strings.Join(cs, " && "), true
-}
-
-// validateStruct ensures the type we're working with can be marshalled. These
-// checks are done ahead of time and in one place so we can make assumptions
-// later.
-func (g *interfaceGenerator) validateStruct(ts *ast.TypeSpec, st *ast.StructType) {
- forEachStructField(st, func(f *ast.Field) {
- fieldDispatcher{
- primitive: func(_, t *ast.Ident) {
- g.validatePrimitiveNewtype(t)
- },
- selector: func(_, _, _ *ast.Ident) {
- // No validation to perform on selector fields. However this
- // callback must still be provided.
- },
- array: func(n *ast.Ident, a *ast.ArrayType, _ *ast.Ident) {
- g.validateArrayNewtype(n, a)
- },
- unhandled: func(_ *ast.Ident) {
- g.abortAt(f.Pos(), fmt.Sprintf("Marshalling not supported for %s fields", kindString(f.Type)))
- },
- }.dispatch(f)
- })
-}
-
-func (g *interfaceGenerator) isStructPacked(st *ast.StructType) bool {
- packed := true
- forEachStructField(st, func(f *ast.Field) {
- if f.Tag != nil {
- if f.Tag.Value == "`marshal:\"unaligned\"`" {
- if packed {
- debugfAt(g.f.Position(g.t.Pos()),
- fmt.Sprintf("Marking type '%s' as not packed due to tag `marshal:\"unaligned\"`.\n", g.t.Name))
- packed = false
- }
- }
- }
- })
- return packed
-}
-
-func (g *interfaceGenerator) emitMarshallableForStruct(st *ast.StructType) {
- thisPacked := g.isStructPacked(st)
-
- g.emit("// SizeBytes implements marshal.Marshallable.SizeBytes.\n")
- g.emit("func (%s *%s) SizeBytes() int {\n", g.r, g.typeName())
- g.inIndent(func() {
- primitiveSize := 0
- var dynamicSizeTerms []string
-
- forEachStructField(st, fieldDispatcher{
- primitive: func(_, t *ast.Ident) {
- if size, dynamic := g.scalarSize(t); !dynamic {
- primitiveSize += size
- } else {
- g.recordUsedMarshallable(t.Name)
- dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("(*%s)(nil).SizeBytes()", t.Name))
- }
- },
- selector: func(_, tX, tSel *ast.Ident) {
- tName := fmt.Sprintf("%s.%s", tX.Name, tSel.Name)
- g.recordUsedImport(tX.Name)
- g.recordUsedMarshallable(tName)
- dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("(*%s)(nil).SizeBytes()", tName))
- },
- array: func(_ *ast.Ident, a *ast.ArrayType, t *ast.Ident) {
- lenExpr := g.arrayLenExpr(a)
- if size, dynamic := g.scalarSize(t); !dynamic {
- dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("%d*%s", size, lenExpr))
- } else {
- g.recordUsedMarshallable(t.Name)
- dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("(*%s)(nil).SizeBytes()*%s", t.Name, lenExpr))
- }
- },
- }.dispatch)
- g.emit("return %d", primitiveSize)
- if len(dynamicSizeTerms) > 0 {
- g.incIndent()
- }
- {
- for _, d := range dynamicSizeTerms {
- g.emitNoIndent(" +\n")
- g.emit(d)
- }
- }
- if len(dynamicSizeTerms) > 0 {
- g.decIndent()
- }
- })
- g.emit("\n}\n\n")
-
- g.emit("// MarshalBytes implements marshal.Marshallable.MarshalBytes.\n")
- g.emit("func (%s *%s) MarshalBytes(dst []byte) {\n", g.r, g.typeName())
- g.inIndent(func() {
- forEachStructField(st, fieldDispatcher{
- primitive: func(n, t *ast.Ident) {
- if n.Name == "_" {
- g.emit("// Padding: dst[:sizeof(%s)] ~= %s(0)\n", t.Name, t.Name)
- if len, dynamic := g.scalarSize(t); !dynamic {
- g.shift("dst", len)
- } else {
- // We can't use shiftDynamic here because we don't have
- // an instance of the dynamic type we can reference here
- // (since the version in this struct is anonymous). Use
- // a typed nil pointer to call SizeBytes() instead.
- g.emit("dst = dst[(*%s)(nil).SizeBytes():]\n", t.Name)
- }
- return
- }
- g.marshalScalar(g.fieldAccessor(n), t.Name, "dst")
- },
- selector: func(n, tX, tSel *ast.Ident) {
- if n.Name == "_" {
- g.emit("// Padding: dst[:sizeof(%s)] ~= %s(0)\n", tX.Name, tSel.Name)
- g.emit("dst = dst[(*%s.%s)(nil).SizeBytes():]\n", tX.Name, tSel.Name)
- return
- }
- g.marshalScalar(g.fieldAccessor(n), fmt.Sprintf("%s.%s", tX.Name, tSel.Name), "dst")
- },
- array: func(n *ast.Ident, a *ast.ArrayType, t *ast.Ident) {
- lenExpr := g.arrayLenExpr(a)
- if n.Name == "_" {
- g.emit("// Padding: dst[:sizeof(%s)*%s] ~= [%s]%s{0}\n", t.Name, lenExpr, lenExpr, t.Name)
- if size, dynamic := g.scalarSize(t); !dynamic {
- g.emit("dst = dst[%d*(%s):]\n", size, lenExpr)
- } else {
- // We can't use shiftDynamic here because we don't have
- // an instance of the dynamic type we can reference here
- // (since the version in this struct is anonymous). Use
- // a typed nil pointer to call SizeBytes() instead.
- g.emit("dst = dst[(*%s)(nil).SizeBytes()*(%s):]\n", t.Name, lenExpr)
- }
- return
- }
-
- g.emit("for idx := 0; idx < %s; idx++ {\n", lenExpr)
- g.inIndent(func() {
- g.marshalScalar(fmt.Sprintf("%s[idx]", g.fieldAccessor(n)), t.Name, "dst")
- })
- g.emit("}\n")
- },
- }.dispatch)
- })
- g.emit("}\n\n")
-
- g.emit("// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.\n")
- g.emit("func (%s *%s) UnmarshalBytes(src []byte) {\n", g.r, g.typeName())
- g.inIndent(func() {
- forEachStructField(st, fieldDispatcher{
- primitive: func(n, t *ast.Ident) {
- if n.Name == "_" {
- g.emit("// Padding: var _ %s ~= src[:sizeof(%s)]\n", t.Name, t.Name)
- if len, dynamic := g.scalarSize(t); !dynamic {
- g.shift("src", len)
- } else {
- // We don't have an instance of the dynamic type we can
- // reference here (since the version in this struct is
- // anonymous). Use a typed nil pointer to call
- // SizeBytes() instead.
- g.shiftDynamic("src", fmt.Sprintf("(*%s)(nil)", t.Name))
- g.recordPotentiallyNonPackedField(fmt.Sprintf("(*%s)(nil)", t.Name))
- }
- return
- }
- g.unmarshalScalar(g.fieldAccessor(n), t.Name, "src")
- },
- selector: func(n, tX, tSel *ast.Ident) {
- if n.Name == "_" {
- g.emit("// Padding: %s ~= src[:sizeof(%s.%s)]\n", g.fieldAccessor(n), tX.Name, tSel.Name)
- g.emit("src = src[(*%s.%s)(nil).SizeBytes():]\n", tX.Name, tSel.Name)
- g.recordPotentiallyNonPackedField(fmt.Sprintf("(*%s.%s)(nil)", tX.Name, tSel.Name))
- return
- }
- g.unmarshalScalar(g.fieldAccessor(n), fmt.Sprintf("%s.%s", tX.Name, tSel.Name), "src")
- },
- array: func(n *ast.Ident, a *ast.ArrayType, t *ast.Ident) {
- lenExpr := g.arrayLenExpr(a)
- if n.Name == "_" {
- g.emit("// Padding: ~ copy([%s]%s(%s), src[:sizeof(%s)*%s])\n", lenExpr, t.Name, g.fieldAccessor(n), t.Name, lenExpr)
- if size, dynamic := g.scalarSize(t); !dynamic {
- g.emit("src = src[%d*(%s):]\n", size, lenExpr)
- } else {
- // We can't use shiftDynamic here because we don't have
- // an instance of the dynamic type we can referece here
- // (since the version in this struct is anonymous). Use
- // a typed nil pointer to call SizeBytes() instead.
- g.emit("src = src[(*%s)(nil).SizeBytes()*(%s):]\n", t.Name, lenExpr)
- }
- return
- }
-
- g.emit("for idx := 0; idx < %s; idx++ {\n", lenExpr)
- g.inIndent(func() {
- g.unmarshalScalar(fmt.Sprintf("%s[idx]", g.fieldAccessor(n)), t.Name, "src")
- })
- g.emit("}\n")
- },
- }.dispatch)
- })
- g.emit("}\n\n")
-
- g.emit("// Packed implements marshal.Marshallable.Packed.\n")
- g.emit("//go:nosplit\n")
- g.emit("func (%s *%s) Packed() bool {\n", g.r, g.typeName())
- g.inIndent(func() {
- expr, fieldsMaybePacked := g.areFieldsPackedExpression()
- switch {
- case !thisPacked:
- g.emit("return false\n")
- case fieldsMaybePacked:
- g.emit("return %s\n", expr)
- default:
- g.emit("return true\n")
-
- }
- })
- g.emit("}\n\n")
-
- g.emit("// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.\n")
- g.emit("func (%s *%s) MarshalUnsafe(dst []byte) {\n", g.r, g.typeName())
- g.inIndent(func() {
- fallback := func() {
- g.emit("// Type %s doesn't have a packed layout in memory, fallback to MarshalBytes.\n", g.typeName())
- g.emit("%s.MarshalBytes(dst)\n", g.r)
- }
- if thisPacked {
- g.recordUsedImport("safecopy")
- g.recordUsedImport("unsafe")
- if cond, ok := g.areFieldsPackedExpression(); ok {
- g.emit("if %s {\n", cond)
- g.inIndent(func() {
- g.emit("safecopy.CopyIn(dst, unsafe.Pointer(%s))\n", g.r)
- })
- g.emit("} else {\n")
- g.inIndent(fallback)
- g.emit("}\n")
- } else {
- g.emit("safecopy.CopyIn(dst, unsafe.Pointer(%s))\n", g.r)
- }
- } else {
- fallback()
- }
- })
- g.emit("}\n\n")
-
- g.emit("// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.\n")
- g.emit("func (%s *%s) UnmarshalUnsafe(src []byte) {\n", g.r, g.typeName())
- g.inIndent(func() {
- fallback := func() {
- g.emit("// Type %s doesn't have a packed layout in memory, fallback to UnmarshalBytes.\n", g.typeName())
- g.emit("%s.UnmarshalBytes(src)\n", g.r)
- }
- if thisPacked {
- g.recordUsedImport("safecopy")
- g.recordUsedImport("unsafe")
- if cond, ok := g.areFieldsPackedExpression(); ok {
- g.emit("if %s {\n", cond)
- g.inIndent(func() {
- g.emit("safecopy.CopyOut(unsafe.Pointer(%s), src)\n", g.r)
- })
- g.emit("} else {\n")
- g.inIndent(fallback)
- g.emit("}\n")
- } else {
- g.emit("safecopy.CopyOut(unsafe.Pointer(%s), src)\n", g.r)
- }
- } else {
- fallback()
- }
- })
- g.emit("}\n\n")
-
- g.emit("// CopyOutN implements marshal.Marshallable.CopyOutN.\n")
- g.emit("//go:nosplit\n")
- g.recordUsedImport("marshal")
- g.recordUsedImport("usermem")
- g.emit("func (%s *%s) CopyOutN(cc marshal.CopyContext, addr usermem.Addr, limit int) (int, error) {\n", g.r, g.typeName())
- g.inIndent(func() {
- fallback := func() {
- g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName())
- g.emit("buf := cc.CopyScratchBuffer(%s.SizeBytes()) // escapes: okay.\n", g.r)
- g.emit("%s.MarshalBytes(buf) // escapes: fallback.\n", g.r)
- g.emit("return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.\n")
- }
- if thisPacked {
- g.recordUsedImport("reflect")
- g.recordUsedImport("runtime")
- g.recordUsedImport("unsafe")
- if cond, ok := g.areFieldsPackedExpression(); ok {
- g.emit("if !%s {\n", cond)
- g.inIndent(fallback)
- g.emit("}\n\n")
- }
- // Fast serialization.
- g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r))
-
- g.emit("length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.\n")
- g.emitKeepAlive(g.r)
- g.emit("return length, err\n")
- } else {
- fallback()
- }
- })
- g.emit("}\n\n")
-
- g.emit("// CopyOut implements marshal.Marshallable.CopyOut.\n")
- g.emit("//go:nosplit\n")
- g.recordUsedImport("marshal")
- g.recordUsedImport("usermem")
- g.emit("func (%s *%s) CopyOut(cc marshal.CopyContext, addr usermem.Addr) (int, error) {\n", g.r, g.typeName())
- g.inIndent(func() {
- g.emit("return %s.CopyOutN(cc, addr, %s.SizeBytes())\n", g.r, g.r)
- })
- g.emit("}\n\n")
-
- g.emit("// CopyIn implements marshal.Marshallable.CopyIn.\n")
- g.emit("//go:nosplit\n")
- g.recordUsedImport("marshal")
- g.recordUsedImport("usermem")
- g.emit("func (%s *%s) CopyIn(cc marshal.CopyContext, addr usermem.Addr) (int, error) {\n", g.r, g.typeName())
- g.inIndent(func() {
- fallback := func() {
- g.emit("// Type %s doesn't have a packed layout in memory, fall back to UnmarshalBytes.\n", g.typeName())
- g.emit("buf := cc.CopyScratchBuffer(%s.SizeBytes()) // escapes: okay.\n", g.r)
- g.emit("length, err := cc.CopyInBytes(addr, buf) // escapes: okay.\n")
- g.emit("// Unmarshal unconditionally. If we had a short copy-in, this results in a\n")
- g.emit("// partially unmarshalled struct.\n")
- g.emit("%s.UnmarshalBytes(buf) // escapes: fallback.\n", g.r)
- g.emit("return length, err\n")
- }
- if thisPacked {
- g.recordUsedImport("reflect")
- g.recordUsedImport("runtime")
- g.recordUsedImport("unsafe")
- if cond, ok := g.areFieldsPackedExpression(); ok {
- g.emit("if !%s {\n", cond)
- g.inIndent(fallback)
- g.emit("}\n\n")
- }
- // Fast deserialization.
- g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r))
-
- g.emit("length, err := cc.CopyInBytes(addr, buf) // escapes: okay.\n")
- g.emitKeepAlive(g.r)
- g.emit("return length, err\n")
- } else {
- fallback()
- }
- })
- g.emit("}\n\n")
-
- g.emit("// WriteTo implements io.WriterTo.WriteTo.\n")
- g.recordUsedImport("io")
- g.emit("func (%s *%s) WriteTo(writer io.Writer) (int64, error) {\n", g.r, g.typeName())
- g.inIndent(func() {
- fallback := func() {
- g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName())
- g.emit("buf := make([]byte, %s.SizeBytes())\n", g.r)
- g.emit("%s.MarshalBytes(buf)\n", g.r)
- g.emit("length, err := writer.Write(buf)\n")
- g.emit("return int64(length), err\n")
- }
- if thisPacked {
- g.recordUsedImport("reflect")
- g.recordUsedImport("runtime")
- g.recordUsedImport("unsafe")
- if cond, ok := g.areFieldsPackedExpression(); ok {
- g.emit("if !%s {\n", cond)
- g.inIndent(fallback)
- g.emit("}\n\n")
- }
- // Fast serialization.
- g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r))
-
- g.emit("length, err := writer.Write(buf)\n")
- g.emitKeepAlive(g.r)
- g.emit("return int64(length), err\n")
- } else {
- fallback()
- }
- })
- g.emit("}\n\n")
-}
-
-func (g *interfaceGenerator) emitMarshallableSliceForStruct(st *ast.StructType, slice *sliceAPI) {
- thisPacked := g.isStructPacked(st)
-
- if slice.inner {
- abortAt(g.f.Position(slice.comment.Slash), fmt.Sprintf("The ':inner' argument to '+marshal slice:%s:inner' is only applicable to newtypes on primitives. Remove it from this struct declaration.", slice.ident))
- }
-
- g.recordUsedImport("marshal")
- g.recordUsedImport("usermem")
-
- g.emit("// Copy%sIn copies in a slice of %s objects from the task's memory.\n", slice.ident, g.typeName())
- g.emit("func Copy%sIn(cc marshal.CopyContext, addr usermem.Addr, dst []%s) (int, error) {\n", slice.ident, g.typeName())
- g.inIndent(func() {
- g.emit("count := len(dst)\n")
- g.emit("if count == 0 {\n")
- g.inIndent(func() {
- g.emit("return 0, nil\n")
- })
- g.emit("}\n")
- g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName())
-
- fallback := func() {
- g.emit("// Type %s doesn't have a packed layout in memory, fall back to UnmarshalBytes.\n", g.typeName())
- g.emit("buf := cc.CopyScratchBuffer(size * count)\n")
- g.emit("length, err := cc.CopyInBytes(addr, buf)\n\n")
-
- g.emit("// Unmarshal as much as possible, even on error. First handle full objects.\n")
- g.emit("limit := length/size\n")
- g.emit("for idx := 0; idx < limit; idx++ {\n")
- g.inIndent(func() {
- g.emit("dst[idx].UnmarshalBytes(buf[size*idx:size*(idx+1)])\n")
- })
- g.emit("}\n\n")
-
- g.emit("// Handle any final partial object. buf is guaranteed to be long enough for the\n")
- g.emit("// final element, but may not contain valid data for the entire range. This may\n")
- g.emit("// result in unmarshalling zero values for some parts of the object.\n")
- g.emit("if length%size != 0 {\n")
- g.inIndent(func() {
- g.emit("idx := limit\n")
- g.emit("dst[idx].UnmarshalBytes(buf[size*idx:size*(idx+1)])\n")
- })
- g.emit("}\n\n")
-
- g.emit("return length, err\n")
- }
- if thisPacked {
- g.recordUsedImport("reflect")
- g.recordUsedImport("runtime")
- g.recordUsedImport("unsafe")
- if _, ok := g.areFieldsPackedExpression(); ok {
- g.emit("if !dst[0].Packed() {\n")
- g.inIndent(fallback)
- g.emit("}\n\n")
- }
- // Fast deserialization.
- g.emitCastSliceToByteSlice("&dst", "buf", "size * count")
-
- g.emit("length, err := cc.CopyInBytes(addr, buf)\n")
- g.emitKeepAlive("dst")
- g.emit("return length, err\n")
- } else {
- fallback()
- }
- })
- g.emit("}\n\n")
-
- g.emit("// Copy%sOut copies a slice of %s objects to the task's memory.\n", slice.ident, g.typeName())
- g.emit("func Copy%sOut(cc marshal.CopyContext, addr usermem.Addr, src []%s) (int, error) {\n", slice.ident, g.typeName())
- g.inIndent(func() {
- g.emit("count := len(src)\n")
- g.emit("if count == 0 {\n")
- g.inIndent(func() {
- g.emit("return 0, nil\n")
- })
- g.emit("}\n")
- g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName())
-
- fallback := func() {
- g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName())
- g.emit("buf := cc.CopyScratchBuffer(size * count)\n")
- g.emit("for idx := 0; idx < count; idx++ {\n")
- g.inIndent(func() {
- g.emit("src[idx].MarshalBytes(buf[size*idx:size*(idx+1)])\n")
- })
- g.emit("}\n")
- g.emit("return cc.CopyOutBytes(addr, buf)\n")
- }
- if thisPacked {
- g.recordUsedImport("reflect")
- g.recordUsedImport("runtime")
- g.recordUsedImport("unsafe")
- if _, ok := g.areFieldsPackedExpression(); ok {
- g.emit("if !src[0].Packed() {\n")
- g.inIndent(fallback)
- g.emit("}\n\n")
- }
- // Fast serialization.
- g.emitCastSliceToByteSlice("&src", "buf", "size * count")
-
- g.emit("length, err := cc.CopyOutBytes(addr, buf)\n")
- g.emitKeepAlive("src")
- g.emit("return length, err\n")
- } else {
- fallback()
- }
- })
- g.emit("}\n\n")
-
- g.emit("// MarshalUnsafe%s is like %s.MarshalUnsafe, but for a []%s.\n", slice.ident, g.typeName(), g.typeName())
- g.emit("func MarshalUnsafe%s(src []%s, dst []byte) (int, error) {\n", slice.ident, g.typeName())
- g.inIndent(func() {
- g.emit("count := len(src)\n")
- g.emit("if count == 0 {\n")
- g.inIndent(func() {
- g.emit("return 0, nil\n")
- })
- g.emit("}\n")
- g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName())
-
- fallback := func() {
- g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName())
- g.emit("for idx := 0; idx < count; idx++ {\n")
- g.inIndent(func() {
- g.emit("src[idx].MarshalBytes(dst[size*idx:(size)*(idx+1)])\n")
- })
- g.emit("}\n")
- g.emit("return size * count, nil\n")
- }
- if thisPacked {
- g.recordUsedImport("reflect")
- g.recordUsedImport("runtime")
- g.recordUsedImport("unsafe")
- if _, ok := g.areFieldsPackedExpression(); ok {
- g.emit("if !src[0].Packed() {\n")
- g.inIndent(fallback)
- g.emit("}\n\n")
- }
- g.emitNoEscapeSliceDataPointer("&src", "val")
-
- g.emit("length, err := safecopy.CopyIn(dst[:(size*count)], val)\n")
- g.emitKeepAlive("src")
- g.emit("return length, err\n")
- } else {
- fallback()
- }
- })
- g.emit("}\n\n")
-
- g.emit("// UnmarshalUnsafe%s is like %s.UnmarshalUnsafe, but for a []%s.\n", slice.ident, g.typeName(), g.typeName())
- g.emit("func UnmarshalUnsafe%s(dst []%s, src []byte) (int, error) {\n", slice.ident, g.typeName())
- g.inIndent(func() {
- g.emit("count := len(dst)\n")
- g.emit("if count == 0 {\n")
- g.inIndent(func() {
- g.emit("return 0, nil\n")
- })
- g.emit("}\n")
- g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName())
-
- fallback := func() {
- g.emit("// Type %s doesn't have a packed layout in memory, fall back to UnmarshalBytes.\n", g.typeName())
- g.emit("for idx := 0; idx < count; idx++ {\n")
- g.inIndent(func() {
- g.emit("dst[idx].UnmarshalBytes(src[size*idx:size*(idx+1)])\n")
- })
- g.emit("}\n")
- g.emit("return size * count, nil\n")
- }
- if thisPacked {
- g.recordUsedImport("reflect")
- g.recordUsedImport("runtime")
- g.recordUsedImport("unsafe")
- if _, ok := g.areFieldsPackedExpression(); ok {
- g.emit("if !dst[0].Packed() {\n")
- g.inIndent(fallback)
- g.emit("}\n\n")
- }
- g.emitNoEscapeSliceDataPointer("&dst", "val")
-
- g.emit("length, err := safecopy.CopyOut(val, src[:(size*count)])\n")
- g.emitKeepAlive("dst")
- g.emit("return length, err\n")
- } else {
- fallback()
- }
- })
- g.emit("}\n\n")
-}
diff --git a/tools/go_marshal/gomarshal/generator_tests.go b/tools/go_marshal/gomarshal/generator_tests.go
deleted file mode 100644
index 631295373..000000000
--- a/tools/go_marshal/gomarshal/generator_tests.go
+++ /dev/null
@@ -1,233 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package gomarshal
-
-import (
- "fmt"
- "go/ast"
- "io"
- "strings"
-)
-
-var standardImports = []string{
- "bytes",
- "fmt",
- "reflect",
- "testing",
-
- "gvisor.dev/gvisor/tools/go_marshal/analysis",
-}
-
-var sliceAPIImports = []string{
- "encoding/binary",
- "gvisor.dev/gvisor/pkg/usermem",
-}
-
-type testGenerator struct {
- sourceBuffer
-
- // The type we're serializing.
- t *ast.TypeSpec
-
- // Receiver argument for generated methods.
- r string
-
- // Imports used by generated code.
- imports *importTable
-
- // Import statement for the package declaring the type we generated code
- // for. We need this to construct test instances for the type, since the
- // tests aren't written in the same package.
- decl *importStmt
-}
-
-func newTestGenerator(t *ast.TypeSpec) *testGenerator {
- g := &testGenerator{
- t: t,
- r: receiverName(t),
- imports: newImportTable(),
- }
-
- for _, i := range standardImports {
- g.imports.add(i).markUsed()
- }
- // These imports are used if a type requests the slice API. Don't
- // mark them as used by default.
- for _, i := range sliceAPIImports {
- g.imports.add(i)
- }
-
- return g
-}
-
-func (g *testGenerator) typeName() string {
- return g.t.Name.Name
-}
-
-func (g *testGenerator) testFuncName(base string) string {
- return fmt.Sprintf("%s%s", base, strings.Title(g.t.Name.Name))
-}
-
-func (g *testGenerator) inTestFunction(name string, body func()) {
- g.emit("func %s(t *testing.T) {\n", g.testFuncName(name))
- g.inIndent(body)
- g.emit("}\n\n")
-}
-
-func (g *testGenerator) emitTestNonZeroSize() {
- g.inTestFunction("TestSizeNonZero", func() {
- g.emit("var x %v\n", g.typeName())
- g.emit("if x.SizeBytes() == 0 {\n")
- g.inIndent(func() {
- g.emit("t.Fatal(\"Marshallable.SizeBytes() should not return zero\")\n")
- })
- g.emit("}\n")
- })
-}
-
-func (g *testGenerator) emitTestSuspectAlignment() {
- g.inTestFunction("TestSuspectAlignment", func() {
- g.emit("var x %v\n", g.typeName())
- g.emit("analysis.AlignmentCheck(t, reflect.TypeOf(x))\n")
- })
-}
-
-func (g *testGenerator) emitTestMarshalUnmarshalPreservesData() {
- g.inTestFunction("TestSafeMarshalUnmarshalPreservesData", func() {
- g.emit("var x, y, z, yUnsafe, zUnsafe %s\n", g.typeName())
- g.emit("analysis.RandomizeValue(&x)\n\n")
-
- g.emit("buf := make([]byte, x.SizeBytes())\n")
- g.emit("x.MarshalBytes(buf)\n")
- g.emit("bufUnsafe := make([]byte, x.SizeBytes())\n")
- g.emit("x.MarshalUnsafe(bufUnsafe)\n\n")
-
- g.emit("y.UnmarshalBytes(buf)\n")
- g.emit("if !reflect.DeepEqual(x, y) {\n")
- g.inIndent(func() {
- g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across MarshalBytes/UnmarshalBytes cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, y))\n")
- })
- g.emit("}\n")
- g.emit("yUnsafe.UnmarshalBytes(bufUnsafe)\n")
- g.emit("if !reflect.DeepEqual(x, yUnsafe) {\n")
- g.inIndent(func() {
- g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across MarshalUnsafe/UnmarshalBytes cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, yUnsafe))\n")
- })
- g.emit("}\n\n")
-
- g.emit("z.UnmarshalUnsafe(buf)\n")
- g.emit("if !reflect.DeepEqual(x, z) {\n")
- g.inIndent(func() {
- g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across MarshalBytes/UnmarshalUnsafe cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, z))\n")
- })
- g.emit("}\n")
- g.emit("zUnsafe.UnmarshalUnsafe(bufUnsafe)\n")
- g.emit("if !reflect.DeepEqual(x, zUnsafe) {\n")
- g.inIndent(func() {
- g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across MarshalUnsafe/UnmarshalUnsafe cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, zUnsafe))\n")
- })
- g.emit("}\n")
- })
-}
-
-func (g *testGenerator) emitTestMarshalUnmarshalSlicePreservesData(slice *sliceAPI) {
- for _, name := range []string{"binary", "usermem"} {
- if !g.imports.markUsed(name) {
- panic(fmt.Sprintf("Generated test for '%s' referenced a non-existent import with local name '%s'", g.typeName(), name))
- }
- }
-
- g.inTestFunction("TestSafeMarshalUnmarshalSlicePreservesData", func() {
- g.emit("var x, y, yUnsafe [8]%s\n", g.typeName())
- g.emit("analysis.RandomizeValue(&x)\n\n")
- g.emit("size := (*%s)(nil).SizeBytes() * len(x)\n", g.typeName())
- g.emit("buf := bytes.NewBuffer(make([]byte, size))\n")
- g.emit("buf.Reset()\n")
- g.emit("if err := binary.Write(buf, usermem.ByteOrder, x[:]); err != nil {\n")
- g.inIndent(func() {
- g.emit("t.Fatal(fmt.Sprintf(\"binary.Write failed: %v\", err))\n")
- })
- g.emit("}\n")
- g.emit("bufUnsafe := make([]byte, size)\n")
- g.emit("MarshalUnsafe%s(x[:], bufUnsafe)\n\n", slice.ident)
-
- g.emit("UnmarshalUnsafe%s(y[:], buf.Bytes())\n", slice.ident)
- g.emit("if !reflect.DeepEqual(x, y) {\n")
- g.inIndent(func() {
- g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across binary.Write/UnmarshalUnsafeSlice cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, y))\n")
- })
- g.emit("}\n")
- g.emit("UnmarshalUnsafe%s(yUnsafe[:], bufUnsafe)\n", slice.ident)
- g.emit("if !reflect.DeepEqual(x, yUnsafe) {\n")
- g.inIndent(func() {
- g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across MarshalUnsafeSlice/UnmarshalUnsafeSlice cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, yUnsafe))\n")
- })
- g.emit("}\n\n")
- })
-}
-
-func (g *testGenerator) emitTestWriteToUnmarshalPreservesData() {
- g.inTestFunction("TestWriteToUnmarshalPreservesData", func() {
- g.emit("var x, y, yUnsafe %s\n", g.typeName())
- g.emit("analysis.RandomizeValue(&x)\n\n")
-
- g.emit("var buf bytes.Buffer\n\n")
-
- g.emit("x.WriteTo(&buf)\n")
- g.emit("y.UnmarshalBytes(buf.Bytes())\n\n")
- g.emit("yUnsafe.UnmarshalUnsafe(buf.Bytes())\n\n")
-
- g.emit("if !reflect.DeepEqual(x, y) {\n")
- g.inIndent(func() {
- g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across WriteTo/UnmarshalBytes cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, y))\n")
- })
- g.emit("}\n")
- g.emit("if !reflect.DeepEqual(x, yUnsafe) {\n")
- g.inIndent(func() {
- g.emit("t.Fatal(fmt.Sprintf(\"Data corrupted across WriteTo/UnmarshalUnsafe cycle:\\nBefore: %+v\\nAfter: %+v\\n\", x, yUnsafe))\n")
- })
- g.emit("}\n")
- })
-}
-
-func (g *testGenerator) emitTestSizeBytesOnTypedNilPtr() {
- g.inTestFunction("TestSizeBytesOnTypedNilPtr", func() {
- g.emit("var x %s\n", g.typeName())
- g.emit("sizeFromConcrete := x.SizeBytes()\n")
- g.emit("sizeFromTypedNilPtr := (*%s)(nil).SizeBytes()\n\n", g.typeName())
-
- g.emit("if sizeFromTypedNilPtr != sizeFromConcrete {\n")
- g.inIndent(func() {
- g.emit("t.Fatalf(\"SizeBytes() on typed nil pointer (%v) doesn't match size returned by a concrete object (%v).\\n\", sizeFromTypedNilPtr, sizeFromConcrete)\n")
- })
- g.emit("}\n")
- })
-}
-
-func (g *testGenerator) emitTests(slice *sliceAPI) {
- g.emitTestNonZeroSize()
- g.emitTestSuspectAlignment()
- g.emitTestMarshalUnmarshalPreservesData()
- g.emitTestWriteToUnmarshalPreservesData()
- g.emitTestSizeBytesOnTypedNilPtr()
-
- if slice != nil {
- g.emitTestMarshalUnmarshalSlicePreservesData(slice)
- }
-}
-
-func (g *testGenerator) write(out io.Writer) error {
- return g.sourceBuffer.write(out)
-}
diff --git a/tools/go_marshal/gomarshal/util.go b/tools/go_marshal/gomarshal/util.go
deleted file mode 100644
index 6a42691cd..000000000
--- a/tools/go_marshal/gomarshal/util.go
+++ /dev/null
@@ -1,503 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package gomarshal
-
-import (
- "bytes"
- "flag"
- "fmt"
- "go/ast"
- "go/token"
- "io"
- "os"
- "path"
- "reflect"
- "sort"
- "strings"
-)
-
-var debug = flag.Bool("debug", false, "enables debugging output")
-
-// receiverName returns an appropriate receiver name given a type spec.
-func receiverName(t *ast.TypeSpec) string {
- if len(t.Name.Name) < 1 {
- // Zero length type name?
- panic("unreachable")
- }
- return strings.ToLower(t.Name.Name[:1])
-}
-
-// kindString returns a user-friendly representation of an AST expr type.
-func kindString(e ast.Expr) string {
- switch e.(type) {
- case *ast.Ident:
- return "scalar"
- case *ast.ArrayType:
- return "array"
- case *ast.StructType:
- return "struct"
- case *ast.StarExpr:
- return "pointer"
- case *ast.FuncType:
- return "function"
- case *ast.InterfaceType:
- return "interface"
- case *ast.MapType:
- return "map"
- case *ast.ChanType:
- return "channel"
- default:
- return reflect.TypeOf(e).String()
- }
-}
-
-func forEachStructField(st *ast.StructType, fn func(f *ast.Field)) {
- for _, field := range st.Fields.List {
- fn(field)
- }
-}
-
-// fieldDispatcher is a collection of callbacks for handling different types of
-// fields in a struct declaration.
-type fieldDispatcher struct {
- primitive func(n, t *ast.Ident)
- selector func(n, tX, tSel *ast.Ident)
- array func(n *ast.Ident, a *ast.ArrayType, t *ast.Ident)
- unhandled func(n *ast.Ident)
-}
-
-// Precondition: All dispatch callbacks that will be invoked must be
-// provided.
-func (fd fieldDispatcher) dispatch(f *ast.Field) {
- // Each field declaration may actually be multiple declarations of the same
- // type. For example, consider:
- //
- // type Point struct {
- // x, y, z int
- // }
- //
- // We invoke the call-backs once per such instance.
-
- // Handle embedded fields. Embedded fields have no names, but can be
- // referenced by the type name.
- if len(f.Names) < 1 {
- switch v := f.Type.(type) {
- case *ast.Ident:
- fd.primitive(v, v)
- case *ast.SelectorExpr:
- fd.selector(v.Sel, v.X.(*ast.Ident), v.Sel)
- default:
- // Note: Arrays can't be embedded, which is handled here.
- panic(fmt.Sprintf("Attempted to dispatch on embedded field of unsupported kind: %#v", f.Type))
- }
- return
- }
-
- // Non-embedded field.
- for _, name := range f.Names {
- switch v := f.Type.(type) {
- case *ast.Ident:
- fd.primitive(name, v)
- case *ast.SelectorExpr:
- fd.selector(name, v.X.(*ast.Ident), v.Sel)
- case *ast.ArrayType:
- switch t := v.Elt.(type) {
- case *ast.Ident:
- fd.array(name, v, t)
- default:
- // Should be handled with a better error message during validate.
- panic(fmt.Sprintf("Array element type is of unsupported kind. Expected *ast.Ident, got %v", t))
- }
- default:
- fd.unhandled(name)
- }
- }
-}
-
-// debugEnabled indicates whether debugging is enabled for gomarshal.
-func debugEnabled() bool {
- return *debug
-}
-
-// abort aborts the go_marshal tool with the given error message.
-func abort(msg string) {
- if !strings.HasSuffix(msg, "\n") {
- msg += "\n"
- }
- fmt.Print(msg)
- os.Exit(1)
-}
-
-// abortAt aborts the go_marshal tool with the given error message, with
-// a reference position to the input source.
-func abortAt(p token.Position, msg string) {
- abort(fmt.Sprintf("%v:\n %s\n", p, msg))
-}
-
-// debugf conditionally prints a debug message.
-func debugf(f string, a ...interface{}) {
- if debugEnabled() {
- fmt.Printf(f, a...)
- }
-}
-
-// debugfAt conditionally prints a debug message with a reference to a position
-// in the input source.
-func debugfAt(p token.Position, f string, a ...interface{}) {
- if debugEnabled() {
- fmt.Printf("%s:\n %s", p, fmt.Sprintf(f, a...))
- }
-}
-
-// emit generates a line of code in the output file.
-//
-// emit is a wrapper around writing a formatted string to the output
-// buffer. emit can be invoked in one of two ways:
-//
-// (1) emit("some string")
-// When emit is called with a single string argument, it is simply copied to
-// the output buffer without any further formatting.
-// (2) emit(fmtString, args...)
-// emit can also be invoked in a similar fashion to *Printf() functions,
-// where the first argument is a format string.
-//
-// Calling emit with a single argument that is not a string will result in a
-// panic, as the caller's intent is ambiguous.
-func emit(out io.Writer, indent int, a ...interface{}) {
- const spacesPerIndentLevel = 4
-
- if len(a) < 1 {
- panic("emit() called with no arguments")
- }
-
- if indent > 0 {
- if _, err := fmt.Fprint(out, strings.Repeat(" ", indent*spacesPerIndentLevel)); err != nil {
- // Writing to the emit output should not fail. Typically the output
- // is a byte.Buffer; writes to these never fail.
- panic(err)
- }
- }
-
- first, ok := a[0].(string)
- if !ok {
- // First argument must be either the string to emit (case 1 from
- // function-level comment), or a format string (case 2).
- panic(fmt.Sprintf("First argument to emit() is not a string: %+v", a[0]))
- }
-
- if len(a) == 1 {
- // Single string argument. Assume no formatting requested.
- if _, err := fmt.Fprint(out, first); err != nil {
- // Writing to out should not fail.
- panic(err)
- }
- return
-
- }
-
- // Formatting requested.
- if _, err := fmt.Fprintf(out, first, a[1:]...); err != nil {
- // Writing to out should not fail.
- panic(err)
- }
-}
-
-// sourceBuffer represents fragments of generated go source code.
-//
-// sourceBuffer provides a convenient way to build up go souce fragments in
-// memory. May be safely zero-value initialized. Not thread-safe.
-type sourceBuffer struct {
- // Current indentation level.
- indent int
-
- // Memory buffer containing contents while they're being generated.
- b bytes.Buffer
-}
-
-func (b *sourceBuffer) reset() {
- b.indent = 0
- b.b.Reset()
-}
-
-func (b *sourceBuffer) incIndent() {
- b.indent++
-}
-
-func (b *sourceBuffer) decIndent() {
- if b.indent <= 0 {
- panic("decIndent() without matching incIndent()")
- }
- b.indent--
-}
-
-func (b *sourceBuffer) emit(a ...interface{}) {
- emit(&b.b, b.indent, a...)
-}
-
-func (b *sourceBuffer) emitNoIndent(a ...interface{}) {
- emit(&b.b, 0 /*indent*/, a...)
-}
-
-func (b *sourceBuffer) inIndent(body func()) {
- b.incIndent()
- body()
- b.decIndent()
-}
-
-func (b *sourceBuffer) write(out io.Writer) error {
- _, err := fmt.Fprint(out, b.b.String())
- return err
-}
-
-// Write implements io.Writer.Write.
-func (b *sourceBuffer) Write(buf []byte) (int, error) {
- return (b.b.Write(buf))
-}
-
-// importStmt represents a single import statement.
-type importStmt struct {
- // Local name of the imported package.
- name string
- // Import path.
- path string
- // Indicates whether the local name is an alias, or simply the final
- // component of the path.
- aliased bool
- // Indicates whether this import was referenced by generated code.
- used bool
- // AST node and file set representing the import statement, if any. These
- // are only non-nil if the import statement originates from an input source
- // file.
- spec *ast.ImportSpec
- fset *token.FileSet
-}
-
-func newImport(p string) *importStmt {
- name := path.Base(p)
- return &importStmt{
- name: name,
- path: p,
- aliased: false,
- }
-}
-
-func newImportFromSpec(spec *ast.ImportSpec, f *token.FileSet) *importStmt {
- p := spec.Path.Value[1 : len(spec.Path.Value)-1] // Strip the " quotes around path.
- name := path.Base(p)
- if name == "" || name == "/" || name == "." {
- panic(fmt.Sprintf("Couldn't process local package name for import at %s, (processed as %s)",
- f.Position(spec.Path.Pos()), name))
- }
- if spec.Name != nil {
- name = spec.Name.Name
- }
- return &importStmt{
- name: name,
- path: p,
- aliased: spec.Name != nil,
- spec: spec,
- fset: f,
- }
-}
-
-// String implements fmt.Stringer.String. This generates a string for the import
-// statement appropriate for writing directly to generated code.
-func (i *importStmt) String() string {
- if i.aliased {
- return fmt.Sprintf("%s %q", i.name, i.path)
- }
- return fmt.Sprintf("%q", i.path)
-}
-
-// debugString returns a debug string representing an import statement. This
-// representation is not valid golang code and is used for debugging output.
-func (i *importStmt) debugString() string {
- if i.spec != nil && i.fset != nil {
- return fmt.Sprintf("%s: %s", i.fset.Position(i.spec.Path.Pos()), i)
- }
- return fmt.Sprintf("(go-marshal import): %s", i)
-}
-
-func (i *importStmt) markUsed() {
- i.used = true
-}
-
-func (i *importStmt) equivalent(other *importStmt) bool {
- return i.name == other.name && i.path == other.path && i.aliased == other.aliased
-}
-
-// importTable represents a collection of importStmts.
-//
-// An importTable may contain multiple import statements referencing the same
-// local name. All import statements aliasing to the same local name are
-// technically ambiguous, as if such an import name is used in the generated
-// code, it's not clear which import statement it refers to. We ignore any
-// potential collisions until actually writing the import table to the generated
-// source file. See importTable.write.
-//
-// Given the following import statements across all the files comprising a
-// package marshalled:
-//
-// "sync"
-// "pkg/sync"
-// "pkg/sentry/kernel"
-// ktime "pkg/sentry/kernel/time"
-//
-// An importTable representing them would look like this:
-//
-// importTable {
-// is: map[string][]*importStmt {
-// "sync": []*importStmt{
-// importStmt{name:"sync", path:"sync", aliased:false}
-// importStmt{name:"sync", path:"pkg/sync", aliased:false}
-// },
-// "kernel": []*importStmt{importStmt{
-// name: "kernel",
-// path: "pkg/sentry/kernel",
-// aliased: false
-// }},
-// "ktime": []*importStmt{importStmt{
-// name: "ktime",
-// path: "pkg/sentry/kernel/time",
-// aliased: true,
-// }},
-// }
-// }
-//
-// Note that the local name "sync" is assigned to two different import
-// statements. This is possible if the import statements are from different
-// source files in the same package.
-//
-// Since go-marshal generates a single output file per package regardless of the
-// number of input files, if "sync" is referenced by any generated code, it's
-// unclear which import statement "sync" refers to. While it's theoretically
-// possible to resolve this by assigning a unique local alias to each instance
-// of the sync package, go-marshal currently aborts when it encounters such an
-// ambiguity.
-//
-// TODO(b/151478251): importTable considers the final component of an import
-// path to be the package name, but this is only a convention. The actual
-// package name is determined by the package statement in the source files for
-// the package.
-type importTable struct {
- // Map of imports and whether they should be copied to the output.
- is map[string][]*importStmt
-}
-
-func newImportTable() *importTable {
- return &importTable{
- is: make(map[string][]*importStmt),
- }
-}
-
-// Merges import statements from other into i.
-func (i *importTable) merge(other *importTable) {
- for name, ims := range other.is {
- i.is[name] = append(i.is[name], ims...)
- }
-}
-
-func (i *importTable) addStmt(s *importStmt) *importStmt {
- i.is[s.name] = append(i.is[s.name], s)
- return s
-}
-
-func (i *importTable) add(s string) *importStmt {
- n := newImport(s)
- return i.addStmt(n)
-}
-
-func (i *importTable) addFromSpec(spec *ast.ImportSpec, f *token.FileSet) *importStmt {
- return i.addStmt(newImportFromSpec(spec, f))
-}
-
-// Marks the import named n as used. If no such import is in the table, returns
-// false.
-func (i *importTable) markUsed(n string) bool {
- if ns, ok := i.is[n]; ok {
- for _, n := range ns {
- n.markUsed()
- }
- return true
- }
- return false
-}
-
-func (i *importTable) clear() {
- for _, is := range i.is {
- for _, i := range is {
- i.used = false
- }
- }
-}
-
-func (i *importTable) write(out io.Writer) error {
- if len(i.is) == 0 {
- // Nothing to import, we're done.
- return nil
- }
-
- imports := make([]string, 0, len(i.is))
- for name, is := range i.is {
- var lastUsed *importStmt
- var ambiguous bool
-
- for _, i := range is {
- if i.used {
- if lastUsed != nil {
- if !i.equivalent(lastUsed) {
- ambiguous = true
- }
- }
- lastUsed = i
- }
- }
-
- if ambiguous {
- // We have two or more import statements across the different source
- // files that share a local name, and at least one of these imports
- // are used by the generated code. This ambiguity can't be resolved
- // by go-marshal and requires the user intervention. Dump a list of
- // the colliding import statements and let the user modify the input
- // files as appropriate.
- var b strings.Builder
- fmt.Fprintf(&b, "The imported name %q is used by one of the types marked for marshalling, and which import statement the code refers to is ambiguous. Perhaps give the imports unique local names?\n\n", name)
- fmt.Fprintf(&b, "The following %d import statements are ambiguous for the local name %q:\n", len(is), name)
- // Note: len(is) is guaranteed to be 1 or greater or ambiguous can't
- // be true. Therefore the slicing below is safe.
- for _, i := range is[:len(is)-1] {
- fmt.Fprintf(&b, " %v\n", i.debugString())
- }
- fmt.Fprintf(&b, " %v", is[len(is)-1].debugString())
- panic(b.String())
- }
-
- if lastUsed != nil {
- imports = append(imports, lastUsed.String())
- }
- }
- sort.Strings(imports)
-
- var b sourceBuffer
- b.emit("import (\n")
- b.incIndent()
- for _, i := range imports {
- b.emit("%s\n", i)
- }
- b.decIndent()
- b.emit(")\n\n")
-
- return b.write(out)
-}
diff --git a/tools/go_marshal/main.go b/tools/go_marshal/main.go
deleted file mode 100644
index 6e4a3e8c4..000000000
--- a/tools/go_marshal/main.go
+++ /dev/null
@@ -1,73 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// go_marshal is a code generation utility for automatically generating code to
-// marshal go data structures to memory.
-//
-// This binary is typically run as part of the build process, and is invoked by
-// the go_marshal bazel rule defined in defs.bzl.
-//
-// See README.md.
-package main
-
-import (
- "flag"
- "fmt"
- "os"
- "strings"
-
- "gvisor.dev/gvisor/tools/go_marshal/gomarshal"
-)
-
-var (
- pkg = flag.String("pkg", "", "output package")
- output = flag.String("output", "", "output file")
- outputTest = flag.String("output_test", "", "output file for tests")
- outputTestUnconditional = flag.String("output_test_unconditional", "", "output file for unconditional tests")
- imports = flag.String("imports", "", "comma-separated list of extra packages to import in generated code")
-)
-
-func main() {
- flag.Usage = func() {
- fmt.Fprintf(os.Stderr, "Usage: %s <input go src files>\n", os.Args[0])
- flag.PrintDefaults()
- }
- flag.Parse()
- if len(flag.Args()) == 0 {
- flag.Usage()
- os.Exit(1)
- }
-
- if *pkg == "" {
- flag.Usage()
- fmt.Fprint(os.Stderr, "Flag -pkg must be provided.\n")
- os.Exit(1)
- }
-
- var extraImports []string
- if len(*imports) > 0 {
- // Note: strings.Split(s, sep) returns s if sep doesn't exist in s. Thus
- // we check for an empty imports list to avoid emitting an empty string
- // as an import.
- extraImports = strings.Split(*imports, ",")
- }
- g, err := gomarshal.NewGenerator(flag.Args(), *output, *outputTest, *outputTestUnconditional, *pkg, extraImports)
- if err != nil {
- panic(err)
- }
-
- if err := g.Run(); err != nil {
- panic(err)
- }
-}
diff --git a/tools/go_marshal/test/BUILD b/tools/go_marshal/test/BUILD
deleted file mode 100644
index 4b27773c2..000000000
--- a/tools/go_marshal/test/BUILD
+++ /dev/null
@@ -1,44 +0,0 @@
-load("//tools:defs.bzl", "go_library", "go_test")
-
-licenses(["notice"])
-
-package_group(
- name = "gomarshal_test",
- packages = [
- "//tools/go_marshal/test/...",
- ],
-)
-
-go_test(
- name = "benchmark_test",
- srcs = ["benchmark_test.go"],
- deps = [
- ":test",
- "//pkg/binary",
- "//pkg/usermem",
- "//tools/go_marshal/analysis",
- ],
-)
-
-go_library(
- name = "test",
- testonly = 1,
- srcs = ["test.go"],
- marshal = True,
- visibility = ["//tools/go_marshal/test:__subpackages__"],
- deps = ["//tools/go_marshal/test/external"],
-)
-
-go_test(
- name = "marshal_test",
- size = "small",
- srcs = ["marshal_test.go"],
- deps = [
- ":test",
- "//pkg/marshal",
- "//pkg/syserror",
- "//pkg/usermem",
- "//tools/go_marshal/analysis",
- "@com_github_google_go_cmp//cmp:go_default_library",
- ],
-)
diff --git a/tools/go_marshal/test/benchmark_test.go b/tools/go_marshal/test/benchmark_test.go
deleted file mode 100644
index 224d308c7..000000000
--- a/tools/go_marshal/test/benchmark_test.go
+++ /dev/null
@@ -1,220 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package benchmark_test
-
-import (
- "bytes"
- encbin "encoding/binary"
- "fmt"
- "reflect"
- "testing"
-
- "gvisor.dev/gvisor/pkg/binary"
- "gvisor.dev/gvisor/pkg/usermem"
- "gvisor.dev/gvisor/tools/go_marshal/analysis"
- "gvisor.dev/gvisor/tools/go_marshal/test"
-)
-
-// Marshalling using the standard encoding/binary package.
-func BenchmarkEncodingBinary(b *testing.B) {
- var s1, s2 test.Stat
- analysis.RandomizeValue(&s1)
-
- size := encbin.Size(&s1)
-
- b.ResetTimer()
-
- for n := 0; n < b.N; n++ {
- buf := bytes.NewBuffer(make([]byte, size))
- buf.Reset()
- if err := encbin.Write(buf, usermem.ByteOrder, &s1); err != nil {
- b.Error("Write:", err)
- }
- if err := encbin.Read(buf, usermem.ByteOrder, &s2); err != nil {
- b.Error("Read:", err)
- }
- }
-
- b.StopTimer()
-
- // Sanity check, make sure the values were preserved.
- if !reflect.DeepEqual(s1, s2) {
- panic(fmt.Sprintf("Data corruption across marshal/unmarshal cycle:\nBefore: %+v\nAfter: %+v\n", s1, s2))
- }
-}
-
-// Marshalling using the sentry's binary.Marshal.
-func BenchmarkBinary(b *testing.B) {
- var s1, s2 test.Stat
- analysis.RandomizeValue(&s1)
-
- size := binary.Size(s1)
-
- b.ResetTimer()
-
- for n := 0; n < b.N; n++ {
- buf := make([]byte, 0, size)
- buf = binary.Marshal(buf, usermem.ByteOrder, &s1)
- binary.Unmarshal(buf, usermem.ByteOrder, &s2)
- }
-
- b.StopTimer()
-
- // Sanity check, make sure the values were preserved.
- if !reflect.DeepEqual(s1, s2) {
- panic(fmt.Sprintf("Data corruption across marshal/unmarshal cycle:\nBefore: %+v\nAfter: %+v\n", s1, s2))
- }
-}
-
-// Marshalling field-by-field with manually-written code.
-func BenchmarkMarshalManual(b *testing.B) {
- var s1, s2 test.Stat
- analysis.RandomizeValue(&s1)
-
- b.ResetTimer()
-
- for n := 0; n < b.N; n++ {
- buf := make([]byte, 0, s1.SizeBytes())
-
- // Marshal
- buf = binary.AppendUint64(buf, usermem.ByteOrder, s1.Dev)
- buf = binary.AppendUint64(buf, usermem.ByteOrder, s1.Ino)
- buf = binary.AppendUint64(buf, usermem.ByteOrder, s1.Nlink)
- buf = binary.AppendUint32(buf, usermem.ByteOrder, s1.Mode)
- buf = binary.AppendUint32(buf, usermem.ByteOrder, s1.UID)
- buf = binary.AppendUint32(buf, usermem.ByteOrder, s1.GID)
- buf = binary.AppendUint32(buf, usermem.ByteOrder, 0)
- buf = binary.AppendUint64(buf, usermem.ByteOrder, s1.Rdev)
- buf = binary.AppendUint64(buf, usermem.ByteOrder, uint64(s1.Size))
- buf = binary.AppendUint64(buf, usermem.ByteOrder, uint64(s1.Blksize))
- buf = binary.AppendUint64(buf, usermem.ByteOrder, uint64(s1.Blocks))
- buf = binary.AppendUint64(buf, usermem.ByteOrder, uint64(s1.ATime.Sec))
- buf = binary.AppendUint64(buf, usermem.ByteOrder, uint64(s1.ATime.Nsec))
- buf = binary.AppendUint64(buf, usermem.ByteOrder, uint64(s1.MTime.Sec))
- buf = binary.AppendUint64(buf, usermem.ByteOrder, uint64(s1.MTime.Nsec))
- buf = binary.AppendUint64(buf, usermem.ByteOrder, uint64(s1.CTime.Sec))
- buf = binary.AppendUint64(buf, usermem.ByteOrder, uint64(s1.CTime.Nsec))
-
- // Unmarshal
- s2.Dev = usermem.ByteOrder.Uint64(buf[0:8])
- s2.Ino = usermem.ByteOrder.Uint64(buf[8:16])
- s2.Nlink = usermem.ByteOrder.Uint64(buf[16:24])
- s2.Mode = usermem.ByteOrder.Uint32(buf[24:28])
- s2.UID = usermem.ByteOrder.Uint32(buf[28:32])
- s2.GID = usermem.ByteOrder.Uint32(buf[32:36])
- // Padding: buf[36:40]
- s2.Rdev = usermem.ByteOrder.Uint64(buf[40:48])
- s2.Size = int64(usermem.ByteOrder.Uint64(buf[48:56]))
- s2.Blksize = int64(usermem.ByteOrder.Uint64(buf[56:64]))
- s2.Blocks = int64(usermem.ByteOrder.Uint64(buf[64:72]))
- s2.ATime.Sec = int64(usermem.ByteOrder.Uint64(buf[72:80]))
- s2.ATime.Nsec = int64(usermem.ByteOrder.Uint64(buf[80:88]))
- s2.MTime.Sec = int64(usermem.ByteOrder.Uint64(buf[88:96]))
- s2.MTime.Nsec = int64(usermem.ByteOrder.Uint64(buf[96:104]))
- s2.CTime.Sec = int64(usermem.ByteOrder.Uint64(buf[104:112]))
- s2.CTime.Nsec = int64(usermem.ByteOrder.Uint64(buf[112:120]))
- }
-
- b.StopTimer()
-
- // Sanity check, make sure the values were preserved.
- if !reflect.DeepEqual(s1, s2) {
- panic(fmt.Sprintf("Data corruption across marshal/unmarshal cycle:\nBefore: %+v\nAfter: %+v\n", s1, s2))
- }
-}
-
-// Marshalling with the go_marshal safe API.
-func BenchmarkGoMarshalSafe(b *testing.B) {
- var s1, s2 test.Stat
- analysis.RandomizeValue(&s1)
-
- b.ResetTimer()
-
- for n := 0; n < b.N; n++ {
- buf := make([]byte, s1.SizeBytes())
- s1.MarshalBytes(buf)
- s2.UnmarshalBytes(buf)
- }
-
- b.StopTimer()
-
- // Sanity check, make sure the values were preserved.
- if !reflect.DeepEqual(s1, s2) {
- panic(fmt.Sprintf("Data corruption across marshal/unmarshal cycle:\nBefore: %+v\nAfter: %+v\n", s1, s2))
- }
-}
-
-// Marshalling with the go_marshal unsafe API.
-func BenchmarkGoMarshalUnsafe(b *testing.B) {
- var s1, s2 test.Stat
- analysis.RandomizeValue(&s1)
-
- b.ResetTimer()
-
- for n := 0; n < b.N; n++ {
- buf := make([]byte, s1.SizeBytes())
- s1.MarshalUnsafe(buf)
- s2.UnmarshalUnsafe(buf)
- }
-
- b.StopTimer()
-
- // Sanity check, make sure the values were preserved.
- if !reflect.DeepEqual(s1, s2) {
- panic(fmt.Sprintf("Data corruption across marshal/unmarshal cycle:\nBefore: %+v\nAfter: %+v\n", s1, s2))
- }
-}
-
-func BenchmarkBinarySlice(b *testing.B) {
- var s1, s2 [64]test.Stat
- analysis.RandomizeValue(&s1)
-
- size := binary.Size(s1)
-
- b.ResetTimer()
-
- for n := 0; n < b.N; n++ {
- buf := make([]byte, 0, size)
- buf = binary.Marshal(buf, usermem.ByteOrder, &s1)
- binary.Unmarshal(buf, usermem.ByteOrder, &s2)
- }
-
- b.StopTimer()
-
- // Sanity check, make sure the values were preserved.
- if !reflect.DeepEqual(s1, s2) {
- panic(fmt.Sprintf("Data corruption across marshal/unmarshal cycle:\nBefore: %+v\nAfter: %+v\n", s1, s2))
- }
-}
-
-func BenchmarkGoMarshalUnsafeSlice(b *testing.B) {
- var s1, s2 [64]test.Stat
- analysis.RandomizeValue(&s1)
-
- b.ResetTimer()
-
- for n := 0; n < b.N; n++ {
- buf := make([]byte, (*test.Stat)(nil).SizeBytes()*len(s1))
- test.MarshalUnsafeStatSlice(s1[:], buf)
- test.UnmarshalUnsafeStatSlice(s2[:], buf)
- }
-
- b.StopTimer()
-
- // Sanity check, make sure the values were preserved.
- if !reflect.DeepEqual(s1, s2) {
- panic(fmt.Sprintf("Data corruption across marshal/unmarshal cycle:\nBefore: %+v\nAfter: %+v\n", s1, s2))
- }
-}
diff --git a/tools/go_marshal/test/escape/BUILD b/tools/go_marshal/test/escape/BUILD
deleted file mode 100644
index 2981ef196..000000000
--- a/tools/go_marshal/test/escape/BUILD
+++ /dev/null
@@ -1,14 +0,0 @@
-load("//tools:defs.bzl", "go_library")
-
-licenses(["notice"])
-
-go_library(
- name = "escape",
- testonly = 1,
- srcs = ["escape.go"],
- deps = [
- "//pkg/marshal",
- "//pkg/usermem",
- "//tools/go_marshal/test",
- ],
-)
diff --git a/tools/go_marshal/test/escape/escape.go b/tools/go_marshal/test/escape/escape.go
deleted file mode 100644
index 7f62b0a2b..000000000
--- a/tools/go_marshal/test/escape/escape.go
+++ /dev/null
@@ -1,99 +0,0 @@
-// Copyright 2020 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package escape
-
-import (
- "gvisor.dev/gvisor/pkg/marshal"
- "gvisor.dev/gvisor/pkg/usermem"
- "gvisor.dev/gvisor/tools/go_marshal/test"
-)
-
-// dummyCopyContext implements marshal.CopyContext.
-type dummyCopyContext struct {
-}
-
-func (*dummyCopyContext) CopyScratchBuffer(size int) []byte {
- return make([]byte, size)
-}
-
-func (*dummyCopyContext) CopyOutBytes(addr usermem.Addr, b []byte) (int, error) {
- return len(b), nil
-}
-
-func (*dummyCopyContext) CopyInBytes(addr usermem.Addr, b []byte) (int, error) {
- return len(b), nil
-}
-
-func (t *dummyCopyContext) MarshalBytes(addr usermem.Addr, marshallable marshal.Marshallable) {
- buf := t.CopyScratchBuffer(marshallable.SizeBytes())
- marshallable.MarshalBytes(buf)
- t.CopyOutBytes(addr, buf)
-}
-
-func (t *dummyCopyContext) MarshalUnsafe(addr usermem.Addr, marshallable marshal.Marshallable) {
- buf := t.CopyScratchBuffer(marshallable.SizeBytes())
- marshallable.MarshalUnsafe(buf)
- t.CopyOutBytes(addr, buf)
-}
-
-// +checkescape:all
-//go:nosplit
-func doCopyIn(t *dummyCopyContext) {
- var stat test.Stat
- stat.CopyIn(t, usermem.Addr(0xf000ba12))
-}
-
-// +checkescape:all
-//go:nosplit
-func doCopyOut(t *dummyCopyContext) {
- var stat test.Stat
- stat.CopyOut(t, usermem.Addr(0xf000ba12))
-}
-
-// +mustescape:builtin
-// +mustescape:stack
-//go:nosplit
-func doMarshalBytesDirect(t *dummyCopyContext) {
- var stat test.Stat
- buf := t.CopyScratchBuffer(stat.SizeBytes())
- stat.MarshalBytes(buf)
- t.CopyOutBytes(usermem.Addr(0xf000ba12), buf)
-}
-
-// +mustescape:builtin
-// +mustescape:stack
-//go:nosplit
-func doMarshalUnsafeDirect(t *dummyCopyContext) {
- var stat test.Stat
- buf := t.CopyScratchBuffer(stat.SizeBytes())
- stat.MarshalUnsafe(buf)
- t.CopyOutBytes(usermem.Addr(0xf000ba12), buf)
-}
-
-// +mustescape:local,heap
-// +mustescape:stack
-//go:nosplit
-func doMarshalBytesViaMarshallable(t *dummyCopyContext) {
- var stat test.Stat
- t.MarshalBytes(usermem.Addr(0xf000ba12), &stat)
-}
-
-// +mustescape:local,heap
-// +mustescape:stack
-//go:nosplit
-func doMarshalUnsafeViaMarshallable(t *dummyCopyContext) {
- var stat test.Stat
- t.MarshalUnsafe(usermem.Addr(0xf000ba12), &stat)
-}
diff --git a/tools/go_marshal/test/external/BUILD b/tools/go_marshal/test/external/BUILD
deleted file mode 100644
index 0cf6da603..000000000
--- a/tools/go_marshal/test/external/BUILD
+++ /dev/null
@@ -1,11 +0,0 @@
-load("//tools:defs.bzl", "go_library")
-
-licenses(["notice"])
-
-go_library(
- name = "external",
- testonly = 1,
- srcs = ["external.go"],
- marshal = True,
- visibility = ["//tools/go_marshal/test:gomarshal_test"],
-)
diff --git a/tools/go_marshal/test/external/external.go b/tools/go_marshal/test/external/external.go
deleted file mode 100644
index 26fe8e0c8..000000000
--- a/tools/go_marshal/test/external/external.go
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package external defines types we can import for testing.
-package external
-
-// External is a public Marshallable type for use in testing.
-//
-// +marshal
-type External struct {
- j int64
-}
-
-// NotPacked is an unaligned Marshallable type for use in testing.
-//
-// +marshal
-type NotPacked struct {
- a int32
- b byte `marshal:"unaligned"`
-}
diff --git a/tools/go_marshal/test/marshal_test.go b/tools/go_marshal/test/marshal_test.go
deleted file mode 100644
index a00f9a684..000000000
--- a/tools/go_marshal/test/marshal_test.go
+++ /dev/null
@@ -1,515 +0,0 @@
-// Copyright 2020 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package marshal_test contains manual tests for the marshal interface. These
-// are intended to test behaviour not covered by the automatically generated
-// tests.
-package marshal_test
-
-import (
- "bytes"
- "encoding/binary"
- "fmt"
- "reflect"
- "runtime"
- "testing"
- "unsafe"
-
- "github.com/google/go-cmp/cmp"
- "gvisor.dev/gvisor/pkg/marshal"
- "gvisor.dev/gvisor/pkg/syserror"
- "gvisor.dev/gvisor/pkg/usermem"
- "gvisor.dev/gvisor/tools/go_marshal/analysis"
- "gvisor.dev/gvisor/tools/go_marshal/test"
-)
-
-var simulatedErr error = syserror.EFAULT
-
-// mockCopyContext implements marshal.CopyContext.
-type mockCopyContext struct {
- taskMem usermem.BytesIO
-}
-
-// populate fills the task memory with the contents of val.
-func (t *mockCopyContext) populate(val interface{}) {
- var buf bytes.Buffer
- // Use binary.Write so we aren't testing go-marshal against its own
- // potentially buggy implementation.
- if err := binary.Write(&buf, usermem.ByteOrder, val); err != nil {
- panic(err)
- }
- t.taskMem.Bytes = buf.Bytes()
-}
-
-func (t *mockCopyContext) setLimit(n int) {
- if len(t.taskMem.Bytes) < n {
- grown := make([]byte, n)
- copy(grown, t.taskMem.Bytes)
- t.taskMem.Bytes = grown
- return
- }
- t.taskMem.Bytes = t.taskMem.Bytes[:n]
-}
-
-// CopyScratchBuffer implements marshal.CopyContext.CopyScratchBuffer.
-func (t *mockCopyContext) CopyScratchBuffer(size int) []byte {
- return make([]byte, size)
-}
-
-// CopyOutBytes implements marshal.CopyContext.CopyOutBytes. The implementation
-// completely ignores the target address and stores a copy of b in its
-// internally buffer, overriding any previous contents.
-func (t *mockCopyContext) CopyOutBytes(_ usermem.Addr, b []byte) (int, error) {
- return t.taskMem.CopyOut(nil, 0, b, usermem.IOOpts{})
-}
-
-// CopyInBytes implements marshal.CopyContext.CopyInBytes. The implementation
-// completely ignores the source address and always fills b from the begining of
-// its internal buffer.
-func (t *mockCopyContext) CopyInBytes(_ usermem.Addr, b []byte) (int, error) {
- return t.taskMem.CopyIn(nil, 0, b, usermem.IOOpts{})
-}
-
-// unsafeMemory returns the underlying memory for m. The returned slice is only
-// valid for the lifetime for m. The garbage collector isn't aware that the
-// returned slice is related to m, the caller must ensure m lives long enough.
-func unsafeMemory(m marshal.Marshallable) []byte {
- if !m.Packed() {
- // We can't return a slice pointing to the underlying memory
- // since the layout isn't packed. Allocate a temporary buffer
- // and marshal instead.
- var buf bytes.Buffer
- if err := binary.Write(&buf, usermem.ByteOrder, m); err != nil {
- panic(err)
- }
- return buf.Bytes()
- }
-
- // reflect.ValueOf(m)
- // .Elem() // Unwrap interface to inner concrete object
- // .Addr() // Pointer value to object
- // .Pointer() // Actual address from the pointer value
- ptr := reflect.ValueOf(m).Elem().Addr().Pointer()
-
- size := m.SizeBytes()
-
- var mem []byte
- hdr := (*reflect.SliceHeader)(unsafe.Pointer(&mem))
- hdr.Data = ptr
- hdr.Len = size
- hdr.Cap = size
-
- return mem
-}
-
-// unsafeMemorySlice returns the underlying memory for m. The returned slice is
-// only valid for the lifetime for m. The garbage collector isn't aware that the
-// returned slice is related to m, the caller must ensure m lives long enough.
-//
-// Precondition: m must be a slice.
-func unsafeMemorySlice(m interface{}, elt marshal.Marshallable) []byte {
- kind := reflect.TypeOf(m).Kind()
- if kind != reflect.Slice {
- panic("unsafeMemorySlice called on non-slice")
- }
-
- if !elt.Packed() {
- // We can't return a slice pointing to the underlying memory
- // since the layout isn't packed. Allocate a temporary buffer
- // and marshal instead.
- var buf bytes.Buffer
- if err := binary.Write(&buf, usermem.ByteOrder, m); err != nil {
- panic(err)
- }
- return buf.Bytes()
- }
-
- v := reflect.ValueOf(m)
- length := v.Len() * elt.SizeBytes()
-
- var mem []byte
- hdr := (*reflect.SliceHeader)(unsafe.Pointer(&mem))
- hdr.Data = v.Pointer() // This is a pointer to the first elem for slices.
- hdr.Len = length
- hdr.Cap = length
-
- return mem
-}
-
-func isZeroes(buf []byte) bool {
- for _, b := range buf {
- if b != 0 {
- return false
- }
- }
- return true
-}
-
-// compareMemory compares the first n bytes of two chuncks of memory represented
-// by expected and actual.
-func compareMemory(t *testing.T, expected, actual []byte, n int) {
- t.Logf("Expected (%d): %v (%d) + (%d) %v\n", len(expected), expected[:n], n, len(expected)-n, expected[n:])
- t.Logf("Actual (%d): %v (%d) + (%d) %v\n", len(actual), actual[:n], n, len(actual)-n, actual[n:])
-
- if diff := cmp.Diff(expected[:n], actual[:n]); diff != "" {
- t.Errorf("Memory buffers don't match:\n--- expected only\n+++ actual only\n%v", diff)
- }
-}
-
-// limitedCopyIn populates task memory with src, then unmarshals task memory to
-// dst. The task signals an error at limit bytes during copy-in, which should
-// result in a truncated unmarshalling.
-func limitedCopyIn(t *testing.T, src, dst marshal.Marshallable, limit int) {
- var cc mockCopyContext
- cc.populate(src)
- cc.setLimit(limit)
-
- n, err := dst.CopyIn(&cc, usermem.Addr(0))
- if n != limit {
- t.Errorf("CopyIn copied unexpected number of bytes, expected %d, got %d", limit, n)
- }
- if err != simulatedErr {
- t.Errorf("CopyIn returned unexpected error, expected %v, got %v", simulatedErr, err)
- }
-
- expectedMem := unsafeMemory(src)
- defer runtime.KeepAlive(src)
- actualMem := unsafeMemory(dst)
- defer runtime.KeepAlive(dst)
-
- compareMemory(t, expectedMem, actualMem, n)
-
- // The last n bytes should be zero for actual, since actual was
- // zero-initialized, and CopyIn shouldn't have touched those bytes. However
- // we can only guarantee we didn't touch anything in the last n bytes if the
- // layout is packed.
- if dst.Packed() && !isZeroes(actualMem[n:]) {
- t.Errorf("Expected the last %d bytes of copied in object to be zeroes, got %v\n", dst.SizeBytes()-n, actualMem)
- }
-}
-
-// limitedCopyOut marshals src to task memory. The task signals an error at
-// limit bytes during copy-out, which should result in a truncated marshalling.
-func limitedCopyOut(t *testing.T, src marshal.Marshallable, limit int) {
- var cc mockCopyContext
- cc.setLimit(limit)
-
- n, err := src.CopyOut(&cc, usermem.Addr(0))
- if n != limit {
- t.Errorf("CopyOut copied unexpected number of bytes, expected %d, got %d", limit, n)
- }
- if err != simulatedErr {
- t.Errorf("CopyOut returned unexpected error, expected %v, got %v", simulatedErr, err)
- }
-
- expectedMem := unsafeMemory(src)
- defer runtime.KeepAlive(src)
- actualMem := cc.taskMem.Bytes
-
- compareMemory(t, expectedMem, actualMem, n)
-}
-
-// copyOutN marshals src to task memory, requesting the marshalling to be
-// limited to limit bytes.
-func copyOutN(t *testing.T, src marshal.Marshallable, limit int) {
- var cc mockCopyContext
- cc.setLimit(limit)
-
- n, err := src.CopyOutN(&cc, usermem.Addr(0), limit)
- if err != nil {
- t.Errorf("CopyOut returned unexpected error: %v", err)
- }
- if n != limit {
- t.Errorf("CopyOut copied unexpected number of bytes, expected %d, got %d", limit, n)
- }
-
- expectedMem := unsafeMemory(src)
- defer runtime.KeepAlive(src)
- actualMem := cc.taskMem.Bytes
-
- t.Logf("Expected: %v + %v\n", expectedMem[:n], expectedMem[n:])
- t.Logf("Actual : %v + %v\n", actualMem[:n], actualMem[n:])
-
- compareMemory(t, expectedMem, actualMem, n)
-}
-
-// TestLimitedMarshalling verifies marshalling/unmarshalling succeeds when the
-// underyling copy in/out operations partially succeed.
-func TestLimitedMarshalling(t *testing.T) {
- types := []reflect.Type{
- // Packed types.
- reflect.TypeOf((*test.Type2)(nil)),
- reflect.TypeOf((*test.Type3)(nil)),
- reflect.TypeOf((*test.Timespec)(nil)),
- reflect.TypeOf((*test.Stat)(nil)),
- reflect.TypeOf((*test.InetAddr)(nil)),
- reflect.TypeOf((*test.SignalSet)(nil)),
- reflect.TypeOf((*test.SignalSetAlias)(nil)),
- // Non-packed types.
- reflect.TypeOf((*test.Type1)(nil)),
- reflect.TypeOf((*test.Type4)(nil)),
- reflect.TypeOf((*test.Type5)(nil)),
- reflect.TypeOf((*test.Type6)(nil)),
- reflect.TypeOf((*test.Type7)(nil)),
- reflect.TypeOf((*test.Type8)(nil)),
- }
-
- for _, tyPtr := range types {
- // Remove one level of pointer-indirection from the type. We get this
- // back when we pass the type to reflect.New.
- ty := tyPtr.Elem()
-
- // Partial copy-in.
- t.Run(fmt.Sprintf("PartialCopyIn_%v", ty), func(t *testing.T) {
- expected := reflect.New(ty).Interface().(marshal.Marshallable)
- actual := reflect.New(ty).Interface().(marshal.Marshallable)
- analysis.RandomizeValue(expected)
-
- limitedCopyIn(t, expected, actual, expected.SizeBytes()/2)
- })
-
- // Partial copy-out.
- t.Run(fmt.Sprintf("PartialCopyOut_%v", ty), func(t *testing.T) {
- expected := reflect.New(ty).Interface().(marshal.Marshallable)
- analysis.RandomizeValue(expected)
-
- limitedCopyOut(t, expected, expected.SizeBytes()/2)
- })
-
- // Explicitly request partial copy-out.
- t.Run(fmt.Sprintf("PartialCopyOutN_%v", ty), func(t *testing.T) {
- expected := reflect.New(ty).Interface().(marshal.Marshallable)
- analysis.RandomizeValue(expected)
-
- copyOutN(t, expected, expected.SizeBytes()/2)
- })
- }
-}
-
-// TestLimitedMarshalling verifies marshalling/unmarshalling of slices of
-// marshallable types succeed when the underyling copy in/out operations
-// partially succeed.
-func TestLimitedSliceMarshalling(t *testing.T) {
- types := []struct {
- arrayPtrType reflect.Type
- copySliceIn func(cc marshal.CopyContext, addr usermem.Addr, dstSlice interface{}) (int, error)
- copySliceOut func(cc marshal.CopyContext, addr usermem.Addr, srcSlice interface{}) (int, error)
- unsafeMemory func(arrPtr interface{}) []byte
- }{
- // Packed types.
- {
- reflect.TypeOf((*[20]test.Stat)(nil)),
- func(cc marshal.CopyContext, addr usermem.Addr, dst interface{}) (int, error) {
- slice := dst.(*[20]test.Stat)[:]
- return test.CopyStatSliceIn(cc, addr, slice)
- },
- func(cc marshal.CopyContext, addr usermem.Addr, src interface{}) (int, error) {
- slice := src.(*[20]test.Stat)[:]
- return test.CopyStatSliceOut(cc, addr, slice)
- },
- func(a interface{}) []byte {
- slice := a.(*[20]test.Stat)[:]
- return unsafeMemorySlice(slice, &slice[0])
- },
- },
- {
- reflect.TypeOf((*[1]test.Stat)(nil)),
- func(cc marshal.CopyContext, addr usermem.Addr, dst interface{}) (int, error) {
- slice := dst.(*[1]test.Stat)[:]
- return test.CopyStatSliceIn(cc, addr, slice)
- },
- func(cc marshal.CopyContext, addr usermem.Addr, src interface{}) (int, error) {
- slice := src.(*[1]test.Stat)[:]
- return test.CopyStatSliceOut(cc, addr, slice)
- },
- func(a interface{}) []byte {
- slice := a.(*[1]test.Stat)[:]
- return unsafeMemorySlice(slice, &slice[0])
- },
- },
- {
- reflect.TypeOf((*[5]test.SignalSetAlias)(nil)),
- func(cc marshal.CopyContext, addr usermem.Addr, dst interface{}) (int, error) {
- slice := dst.(*[5]test.SignalSetAlias)[:]
- return test.CopySignalSetAliasSliceIn(cc, addr, slice)
- },
- func(cc marshal.CopyContext, addr usermem.Addr, src interface{}) (int, error) {
- slice := src.(*[5]test.SignalSetAlias)[:]
- return test.CopySignalSetAliasSliceOut(cc, addr, slice)
- },
- func(a interface{}) []byte {
- slice := a.(*[5]test.SignalSetAlias)[:]
- return unsafeMemorySlice(slice, &slice[0])
- },
- },
- // Non-packed types.
- {
- reflect.TypeOf((*[20]test.Type1)(nil)),
- func(cc marshal.CopyContext, addr usermem.Addr, dst interface{}) (int, error) {
- slice := dst.(*[20]test.Type1)[:]
- return test.CopyType1SliceIn(cc, addr, slice)
- },
- func(cc marshal.CopyContext, addr usermem.Addr, src interface{}) (int, error) {
- slice := src.(*[20]test.Type1)[:]
- return test.CopyType1SliceOut(cc, addr, slice)
- },
- func(a interface{}) []byte {
- slice := a.(*[20]test.Type1)[:]
- return unsafeMemorySlice(slice, &slice[0])
- },
- },
- {
- reflect.TypeOf((*[1]test.Type1)(nil)),
- func(cc marshal.CopyContext, addr usermem.Addr, dst interface{}) (int, error) {
- slice := dst.(*[1]test.Type1)[:]
- return test.CopyType1SliceIn(cc, addr, slice)
- },
- func(cc marshal.CopyContext, addr usermem.Addr, src interface{}) (int, error) {
- slice := src.(*[1]test.Type1)[:]
- return test.CopyType1SliceOut(cc, addr, slice)
- },
- func(a interface{}) []byte {
- slice := a.(*[1]test.Type1)[:]
- return unsafeMemorySlice(slice, &slice[0])
- },
- },
- {
- reflect.TypeOf((*[7]test.Type8)(nil)),
- func(cc marshal.CopyContext, addr usermem.Addr, dst interface{}) (int, error) {
- slice := dst.(*[7]test.Type8)[:]
- return test.CopyType8SliceIn(cc, addr, slice)
- },
- func(cc marshal.CopyContext, addr usermem.Addr, src interface{}) (int, error) {
- slice := src.(*[7]test.Type8)[:]
- return test.CopyType8SliceOut(cc, addr, slice)
- },
- func(a interface{}) []byte {
- slice := a.(*[7]test.Type8)[:]
- return unsafeMemorySlice(slice, &slice[0])
- },
- },
- }
-
- for _, tt := range types {
- // The body of this loop is generic over the type tt.arrayPtrType, with
- // the help of reflection. To aid in readability, comments below show
- // the equivalent go code assuming
- // tt.arrayPtrType = typeof(*[20]test.Stat).
-
- // Equivalent:
- // var x *[20]test.Stat
- // arrayTy := reflect.TypeOf(*x)
- arrayTy := tt.arrayPtrType.Elem()
-
- // Partial copy-in of slices.
- t.Run(fmt.Sprintf("PartialCopySliceIn_%v", arrayTy), func(t *testing.T) {
- // Equivalent:
- // var x [20]test.Stat
- // length := len(x)
- length := arrayTy.Len()
- if length < 1 {
- panic("Test type can't be zero-length array")
- }
- // Equivalent:
- // elem := new(test.Stat).(marshal.Marshallable)
- elem := reflect.New(arrayTy.Elem()).Interface().(marshal.Marshallable)
-
- // Equivalent:
- // var expected, actual interface{}
- // expected = new([20]test.Stat)
- // actual = new([20]test.Stat)
- expected := reflect.New(arrayTy).Interface()
- actual := reflect.New(arrayTy).Interface()
-
- analysis.RandomizeValue(expected)
-
- limit := (length * elem.SizeBytes()) / 2
- // Also make sure the limit is partially inside one of the elements.
- limit += elem.SizeBytes() / 2
- analysis.RandomizeValue(expected)
-
- var cc mockCopyContext
- cc.populate(expected)
- cc.setLimit(limit)
-
- n, err := tt.copySliceIn(&cc, usermem.Addr(0), actual)
- if n != limit {
- t.Errorf("CopyIn copied unexpected number of bytes, expected %d, got %d", limit, n)
- }
- if n < length*elem.SizeBytes() && err != simulatedErr {
- t.Errorf("CopyIn returned unexpected error, expected %v, got %v", simulatedErr, err)
- }
-
- expectedMem := tt.unsafeMemory(expected)
- defer runtime.KeepAlive(expected)
- actualMem := tt.unsafeMemory(actual)
- defer runtime.KeepAlive(actual)
-
- compareMemory(t, expectedMem, actualMem, n)
-
- // The last n bytes should be zero for actual, since actual was
- // zero-initialized, and CopyIn shouldn't have touched those bytes. However
- // we can only guarantee we didn't touch anything in the last n bytes if the
- // layout is packed.
- if elem.Packed() && !isZeroes(actualMem[n:]) {
- t.Errorf("Expected the last %d bytes of copied in object to be zeroes, got %v\n", (elem.SizeBytes()*length)-n, actualMem)
- }
- })
-
- // Partial copy-out of slices.
- t.Run(fmt.Sprintf("PartialCopySliceOut_%v", arrayTy), func(t *testing.T) {
- // Equivalent:
- // var x [20]test.Stat
- // length := len(x)
- length := arrayTy.Len()
- if length < 1 {
- panic("Test type can't be zero-length array")
- }
- // Equivalent:
- // elem := new(test.Stat).(marshal.Marshallable)
- elem := reflect.New(arrayTy.Elem()).Interface().(marshal.Marshallable)
-
- // Equivalent:
- // var expected, actual interface{}
- // expected = new([20]test.Stat)
- // actual = new([20]test.Stat)
- expected := reflect.New(arrayTy).Interface()
-
- analysis.RandomizeValue(expected)
-
- limit := (length * elem.SizeBytes()) / 2
- // Also make sure the limit is partially inside one of the elements.
- limit += elem.SizeBytes() / 2
- analysis.RandomizeValue(expected)
-
- var cc mockCopyContext
- cc.populate(expected)
- cc.setLimit(limit)
-
- n, err := tt.copySliceOut(&cc, usermem.Addr(0), expected)
- if n != limit {
- t.Errorf("CopyIn copied unexpected number of bytes, expected %d, got %d", limit, n)
- }
- if n < length*elem.SizeBytes() && err != simulatedErr {
- t.Errorf("CopyIn returned unexpected error, expected %v, got %v", simulatedErr, err)
- }
-
- expectedMem := tt.unsafeMemory(expected)
- defer runtime.KeepAlive(expected)
- actualMem := cc.taskMem.Bytes
-
- compareMemory(t, expectedMem, actualMem, n)
- })
- }
-}
diff --git a/tools/go_marshal/test/test.go b/tools/go_marshal/test/test.go
deleted file mode 100644
index d9e9f341b..000000000
--- a/tools/go_marshal/test/test.go
+++ /dev/null
@@ -1,200 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package test contains data structures for testing the go_marshal tool.
-package test
-
-import (
- // We're intentionally using a package name alias here even though it's not
- // necessary to test the code generator's ability to handle package aliases.
- ex "gvisor.dev/gvisor/tools/go_marshal/test/external"
-)
-
-// Type1 is a test data type.
-//
-// +marshal slice:Type1Slice
-type Type1 struct {
- a Type2
- x, y int64 // Multiple field names.
- b byte `marshal:"unaligned"` // Short field.
- c uint64
- _ uint32 // Unnamed scalar field.
- _ [6]byte // Unnamed vector field, typical padding.
- _ [2]byte
- xs [8]int32
- as [10]Type2 `marshal:"unaligned"` // Array of Marshallable objects.
- ss Type3
-}
-
-// Type2 is a test data type.
-//
-// +marshal
-type Type2 struct {
- n int64
- c byte
- _ [7]byte
- m int64
- a int64
-}
-
-// Type3 is a test data type.
-//
-// +marshal
-type Type3 struct {
- s int64
- x ex.External // Type defined in another package.
-}
-
-// Type4 is a test data type.
-//
-// +marshal
-type Type4 struct {
- c byte
- x int64 `marshal:"unaligned"`
- d byte
- _ [7]byte
-}
-
-// Type5 is a test data type.
-//
-// +marshal
-type Type5 struct {
- n int64
- t Type4
- m int64
-}
-
-// Type6 is a test data type ends mid-word.
-//
-// +marshal
-type Type6 struct {
- a int64
- b int64
- // If c isn't marked unaligned, analysis fails (as it should, since
- // the unsafe API corrupts Type7).
- c byte `marshal:"unaligned"`
-}
-
-// Type7 is a test data type that contains a child struct that ends
-// mid-word.
-// +marshal
-type Type7 struct {
- x Type6
- y int64
-}
-
-// Type8 is a test data type which contains an external non-packed field.
-//
-// +marshal slice:Type8Slice
-type Type8 struct {
- a int64
- np ex.NotPacked
- b int64
-}
-
-// Timespec represents struct timespec in <time.h>.
-//
-// +marshal
-type Timespec struct {
- Sec int64
- Nsec int64
-}
-
-// Stat represents struct stat.
-//
-// +marshal slice:StatSlice
-type Stat struct {
- Dev uint64
- Ino uint64
- Nlink uint64
- Mode uint32
- UID uint32
- GID uint32
- _ int32
- Rdev uint64
- Size int64
- Blksize int64
- Blocks int64
- ATime Timespec
- MTime Timespec
- CTime Timespec
- _ [3]int64
-}
-
-// InetAddr is an example marshallable newtype on an array.
-//
-// +marshal
-type InetAddr [4]byte
-
-// SignalSet is an example marshallable newtype on a primitive.
-//
-// +marshal slice:SignalSetSlice:inner
-type SignalSet uint64
-
-// SignalSetAlias is an example newtype on another marshallable type.
-//
-// +marshal slice:SignalSetAliasSlice
-type SignalSetAlias SignalSet
-
-const sizeA = 64
-const sizeB = 8
-
-// TestArray is a test data structure on an array with a constant length.
-//
-// +marshal
-type TestArray [sizeA]int32
-
-// TestArray2 is a newtype on an array with a simple arithmetic expression of
-// constants for the array length.
-//
-// +marshal
-type TestArray2 [sizeA * sizeB]int32
-
-// TestArray2 is a newtype on an array with a simple arithmetic expression of
-// mixed constants and literals for the array length.
-//
-// +marshal
-type TestArray3 [sizeA*sizeB + 12]int32
-
-// Type9 is a test data type containing an array with a non-literal length.
-//
-// +marshal
-type Type9 struct {
- x int64
- y [sizeA]int32
-}
-
-// Type10Embed is a test data type which is be embedded into another type.
-//
-// +marshal
-type Type10Embed struct {
- x int64
-}
-
-// Type10 is a test data type which contains an embedded struct.
-//
-// +marshal
-type Type10 struct {
- Type10Embed
- y int64
-}
-
-// Type11 is a test data type which contains an embedded struct from an external
-// package.
-//
-// +marshal
-type Type11 struct {
- ex.External
- y int64
-}
diff --git a/tools/go_stateify/BUILD b/tools/go_stateify/BUILD
deleted file mode 100644
index 913558b4e..000000000
--- a/tools/go_stateify/BUILD
+++ /dev/null
@@ -1,16 +0,0 @@
-load("//tools:defs.bzl", "bzl_library", "go_binary")
-
-package(licenses = ["notice"])
-
-go_binary(
- name = "stateify",
- srcs = ["main.go"],
- visibility = ["//:sandbox"],
- deps = ["//tools/tags"],
-)
-
-bzl_library(
- name = "defs_bzl",
- srcs = ["defs.bzl"],
- visibility = ["//visibility:private"],
-)
diff --git a/tools/go_stateify/defs.bzl b/tools/go_stateify/defs.bzl
deleted file mode 100644
index 6a5e666f0..000000000
--- a/tools/go_stateify/defs.bzl
+++ /dev/null
@@ -1,60 +0,0 @@
-"""Stateify is a tool for generating state wrappers for Go types."""
-
-def _go_stateify_impl(ctx):
- """Implementation for the stateify tool."""
- output = ctx.outputs.out
-
- # Run the stateify command.
- args = ["-output=%s" % output.path]
- args.append("-fullpkg=%s" % ctx.attr.package)
- if ctx.attr._statepkg:
- args.append("-statepkg=%s" % ctx.attr._statepkg)
- if ctx.attr.imports:
- args.append("-imports=%s" % ",".join(ctx.attr.imports))
- args.append("--")
- for src in ctx.attr.srcs:
- args += [f.path for f in src.files.to_list()]
- ctx.actions.run(
- inputs = ctx.files.srcs,
- outputs = [output],
- mnemonic = "GoStateify",
- progress_message = "Generating state library %s" % ctx.label,
- arguments = args,
- executable = ctx.executable._tool,
- )
-
-go_stateify = rule(
- implementation = _go_stateify_impl,
- doc = "Generates save and restore logic from a set of Go files.",
- attrs = {
- "srcs": attr.label_list(
- doc = """
-The input source files. These files should include all structs in the package
-that need to be saved.
-""",
- mandatory = True,
- allow_files = True,
- ),
- "imports": attr.string_list(
- doc = """
-An optional list of extra non-aliased, Go-style absolute import paths required
-for statified types.
-""",
- mandatory = False,
- ),
- "package": attr.string(
- doc = "The fully qualified package name for the input sources.",
- mandatory = True,
- ),
- "out": attr.output(
- doc = "Name of the generator output file.",
- mandatory = True,
- ),
- "_tool": attr.label(
- executable = True,
- cfg = "host",
- default = Label("//tools/go_stateify:stateify"),
- ),
- "_statepkg": attr.string(default = "gvisor.dev/gvisor/pkg/state"),
- },
-)
diff --git a/tools/go_stateify/main.go b/tools/go_stateify/main.go
deleted file mode 100644
index e1de12e25..000000000
--- a/tools/go_stateify/main.go
+++ /dev/null
@@ -1,470 +0,0 @@
-// Copyright 2018 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Stateify provides a simple way to generate Load/Save methods based on
-// existing types and struct tags.
-package main
-
-import (
- "flag"
- "fmt"
- "go/ast"
- "go/parser"
- "go/token"
- "os"
- "path/filepath"
- "reflect"
- "strings"
- "sync"
-
- "gvisor.dev/gvisor/tools/tags"
-)
-
-var (
- fullPkg = flag.String("fullpkg", "", "fully qualified output package")
- imports = flag.String("imports", "", "extra imports for the output file")
- output = flag.String("output", "", "output file")
- statePkg = flag.String("statepkg", "", "state import package; defaults to empty")
-)
-
-// resolveTypeName returns a qualified type name.
-func resolveTypeName(typ ast.Expr) (field string, qualified string) {
- for done := false; !done; {
- // Resolve star expressions.
- switch rs := typ.(type) {
- case *ast.StarExpr:
- qualified += "*"
- typ = rs.X
- case *ast.ArrayType:
- if rs.Len == nil {
- // Slice type declaration.
- qualified += "[]"
- } else {
- // Array type declaration.
- qualified += "[" + rs.Len.(*ast.BasicLit).Value + "]"
- }
- typ = rs.Elt
- default:
- // No more descent.
- done = true
- }
- }
-
- // Resolve a package selector.
- sel, ok := typ.(*ast.SelectorExpr)
- if ok {
- qualified = qualified + sel.X.(*ast.Ident).Name + "."
- typ = sel.Sel
- }
-
- // Figure out actual type name.
- field = typ.(*ast.Ident).Name
- qualified = qualified + field
- return
-}
-
-// extractStateTag pulls the relevant state tag.
-func extractStateTag(tag *ast.BasicLit) string {
- if tag == nil {
- return ""
- }
- if len(tag.Value) < 2 {
- return ""
- }
- return reflect.StructTag(tag.Value[1 : len(tag.Value)-1]).Get("state")
-}
-
-// scanFunctions is a set of functions passed to scanFields.
-type scanFunctions struct {
- zerovalue func(name string)
- normal func(name string)
- wait func(name string)
- value func(name, typName string)
-}
-
-// scanFields scans the fields of a struct.
-//
-// Each provided function will be applied to appropriately tagged fields, or
-// skipped if nil.
-//
-// Fields tagged nosave are skipped.
-func scanFields(ss *ast.StructType, prefix string, fn scanFunctions) {
- if ss.Fields.List == nil {
- // No fields.
- return
- }
-
- // Scan all fields.
- for _, field := range ss.Fields.List {
- // Calculate the name.
- name := ""
- if field.Names != nil {
- // It's a named field; override.
- name = field.Names[0].Name
- } else {
- // Anonymous types can't be embedded, so we don't need
- // to worry about providing a useful name here.
- name, _ = resolveTypeName(field.Type)
- }
-
- // Skip _ fields.
- if name == "_" {
- continue
- }
-
- // Is this a anonymous struct? If yes, then continue the
- // recursion with the given prefix. We don't pay attention to
- // any tags on the top-level struct field.
- tag := extractStateTag(field.Tag)
- if anon, ok := field.Type.(*ast.StructType); ok && tag == "" {
- scanFields(anon, name+".", fn)
- continue
- }
-
- switch tag {
- case "zerovalue":
- if fn.zerovalue != nil {
- fn.zerovalue(name)
- }
-
- case "":
- if fn.normal != nil {
- fn.normal(name)
- }
-
- case "wait":
- if fn.wait != nil {
- fn.wait(name)
- }
-
- case "manual", "nosave", "ignore":
- // Do nothing.
-
- default:
- if strings.HasPrefix(tag, ".(") && strings.HasSuffix(tag, ")") {
- if fn.value != nil {
- fn.value(name, tag[2:len(tag)-1])
- }
- }
- }
- }
-}
-
-func camelCased(name string) string {
- return strings.ToUpper(name[:1]) + name[1:]
-}
-
-func main() {
- // Parse flags.
- flag.Usage = func() {
- fmt.Fprintf(os.Stderr, "Usage: %s [options]\n", os.Args[0])
- flag.PrintDefaults()
- }
- flag.Parse()
- if len(flag.Args()) == 0 {
- flag.Usage()
- os.Exit(1)
- }
- if *fullPkg == "" {
- fmt.Fprintf(os.Stderr, "Error: package required.")
- os.Exit(1)
- }
-
- // Open the output file.
- var (
- outputFile *os.File
- err error
- )
- if *output == "" || *output == "-" {
- outputFile = os.Stdout
- } else {
- outputFile, err = os.OpenFile(*output, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644)
- if err != nil {
- fmt.Fprintf(os.Stderr, "Error opening output %q: %v", *output, err)
- }
- defer outputFile.Close()
- }
-
- // Set the statePrefix for below, depending on the import.
- statePrefix := ""
- if *statePkg != "" {
- parts := strings.Split(*statePkg, "/")
- statePrefix = parts[len(parts)-1] + "."
- }
-
- // initCalls is dumped at the end.
- var initCalls []string
-
- // Common closures.
- emitRegister := func(name string) {
- initCalls = append(initCalls, fmt.Sprintf("%sRegister((*%s)(nil))", statePrefix, name))
- }
-
- // Automated warning.
- fmt.Fprint(outputFile, "// automatically generated by stateify.\n\n")
-
- // Emit build tags.
- if t := tags.Aggregate(flag.Args()); len(t) > 0 {
- fmt.Fprintf(outputFile, "%s\n\n", strings.Join(t.Lines(), "\n"))
- }
-
- // Emit the package name.
- _, pkg := filepath.Split(*fullPkg)
- fmt.Fprintf(outputFile, "package %s\n\n", pkg)
-
- // Emit the imports lazily.
- var once sync.Once
- maybeEmitImports := func() {
- once.Do(func() {
- // Emit the imports.
- fmt.Fprint(outputFile, "import (\n")
- if *statePkg != "" {
- fmt.Fprintf(outputFile, " \"%s\"\n", *statePkg)
- }
- if *imports != "" {
- for _, i := range strings.Split(*imports, ",") {
- fmt.Fprintf(outputFile, " \"%s\"\n", i)
- }
- }
- fmt.Fprint(outputFile, ")\n\n")
- })
- }
-
- files := make([]*ast.File, 0, len(flag.Args()))
-
- // Parse the input files.
- for _, filename := range flag.Args() {
- // Parse the file.
- fset := token.NewFileSet()
- f, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
- if err != nil {
- // Not a valid input file?
- fmt.Fprintf(os.Stderr, "Input %q can't be parsed: %v\n", filename, err)
- os.Exit(1)
- }
-
- files = append(files, f)
- }
-
- type method struct {
- typeName string
- methodName string
- }
-
- // Search for and add all method to a set. We auto-detecting several
- // different methods (and insert them if we don't find them, in order
- // to ensure that expectations match reality).
- //
- // While we do this, figure out the right receiver name. If there are
- // multiple distinct receivers, then we will just pick the last one.
- simpleMethods := make(map[method]struct{})
- receiverNames := make(map[string]string)
- for _, f := range files {
- // Go over all functions.
- for _, decl := range f.Decls {
- d, ok := decl.(*ast.FuncDecl)
- if !ok {
- continue
- }
- if d.Recv == nil || len(d.Recv.List) != 1 {
- // Not a named method.
- continue
- }
-
- // Save the method and the receiver.
- name, _ := resolveTypeName(d.Recv.List[0].Type)
- simpleMethods[method{
- typeName: name,
- methodName: d.Name.Name,
- }] = struct{}{}
- if len(d.Recv.List[0].Names) > 0 {
- receiverNames[name] = d.Recv.List[0].Names[0].Name
- }
- }
- }
-
- for _, f := range files {
- // Go over all named types.
- for _, decl := range f.Decls {
- d, ok := decl.(*ast.GenDecl)
- if !ok || d.Tok != token.TYPE {
- continue
- }
-
- // Only generate code for types marked "// +stateify
- // savable" in one of the proceeding comment lines. If
- // the line is marked "// +stateify type" then only
- // generate type information and register the type.
- if d.Doc == nil {
- continue
- }
- var (
- generateTypeInfo = false
- generateSaverLoader = false
- )
- for _, l := range d.Doc.List {
- if l.Text == "// +stateify savable" {
- generateTypeInfo = true
- generateSaverLoader = true
- break
- }
- if l.Text == "// +stateify type" {
- generateTypeInfo = true
- }
- }
- if !generateTypeInfo && !generateSaverLoader {
- continue
- }
-
- for _, gs := range d.Specs {
- ts := gs.(*ast.TypeSpec)
- recv, ok := receiverNames[ts.Name.Name]
- if !ok {
- // Maybe no methods were defined?
- recv = strings.ToLower(ts.Name.Name[:1])
- }
- switch x := ts.Type.(type) {
- case *ast.StructType:
- maybeEmitImports()
-
- // Record the slot for each field.
- fieldCount := 0
- fields := make(map[string]int)
- emitField := func(name string) {
- fmt.Fprintf(outputFile, " \"%s\",\n", name)
- fields[name] = fieldCount
- fieldCount++
- }
- emitFieldValue := func(name string, _ string) {
- emitField(name)
- }
- emitLoadValue := func(name, typName string) {
- fmt.Fprintf(outputFile, " stateSourceObject.LoadValue(%d, new(%s), func(y interface{}) { %s.load%s(y.(%s)) })\n", fields[name], typName, recv, camelCased(name), typName)
- }
- emitLoad := func(name string) {
- fmt.Fprintf(outputFile, " stateSourceObject.Load(%d, &%s.%s)\n", fields[name], recv, name)
- }
- emitLoadWait := func(name string) {
- fmt.Fprintf(outputFile, " stateSourceObject.LoadWait(%d, &%s.%s)\n", fields[name], recv, name)
- }
- emitSaveValue := func(name, typName string) {
- fmt.Fprintf(outputFile, " var %sValue %s = %s.save%s()\n", name, typName, recv, camelCased(name))
- fmt.Fprintf(outputFile, " stateSinkObject.SaveValue(%d, %sValue)\n", fields[name], name)
- }
- emitSave := func(name string) {
- fmt.Fprintf(outputFile, " stateSinkObject.Save(%d, &%s.%s)\n", fields[name], recv, name)
- }
- emitZeroCheck := func(name string) {
- fmt.Fprintf(outputFile, " if !%sIsZeroValue(&%s.%s) { %sFailf(\"%s is %%#v, expected zero\", &%s.%s) }\n", statePrefix, recv, name, statePrefix, name, recv, name)
- }
-
- // Generate the type name method.
- fmt.Fprintf(outputFile, "func (%s *%s) StateTypeName() string {\n", recv, ts.Name.Name)
- fmt.Fprintf(outputFile, " return \"%s.%s\"\n", *fullPkg, ts.Name.Name)
- fmt.Fprintf(outputFile, "}\n\n")
-
- // Generate the fields method.
- fmt.Fprintf(outputFile, "func (%s *%s) StateFields() []string {\n", recv, ts.Name.Name)
- fmt.Fprintf(outputFile, " return []string{\n")
- scanFields(x, "", scanFunctions{
- normal: emitField,
- wait: emitField,
- value: emitFieldValue,
- })
- fmt.Fprintf(outputFile, " }\n")
- fmt.Fprintf(outputFile, "}\n\n")
-
- // Define beforeSave if a definition was not found. This prevents
- // the code from compiling if a custom beforeSave was defined in a
- // file not provided to this binary and prevents inherited methods
- // from being called multiple times by overriding them.
- if _, ok := simpleMethods[method{
- typeName: ts.Name.Name,
- methodName: "beforeSave",
- }]; !ok && generateSaverLoader {
- fmt.Fprintf(outputFile, "func (%s *%s) beforeSave() {}\n\n", recv, ts.Name.Name)
- }
-
- // Generate the save method.
- //
- // N.B. For historical reasons, we perform the value saves first,
- // and perform the value loads last. There should be no dependency
- // on this specific behavior, but the ability to specify slots
- // allows a manual implementation to be order-dependent.
- if generateSaverLoader {
- fmt.Fprintf(outputFile, "func (%s *%s) StateSave(stateSinkObject %sSink) {\n", recv, ts.Name.Name, statePrefix)
- fmt.Fprintf(outputFile, " %s.beforeSave()\n", recv)
- scanFields(x, "", scanFunctions{zerovalue: emitZeroCheck})
- scanFields(x, "", scanFunctions{value: emitSaveValue})
- scanFields(x, "", scanFunctions{normal: emitSave, wait: emitSave})
- fmt.Fprintf(outputFile, "}\n\n")
- }
-
- // Define afterLoad if a definition was not found. We do this for
- // the same reason that we do it for beforeSave.
- _, hasAfterLoad := simpleMethods[method{
- typeName: ts.Name.Name,
- methodName: "afterLoad",
- }]
- if !hasAfterLoad && generateSaverLoader {
- fmt.Fprintf(outputFile, "func (%s *%s) afterLoad() {}\n\n", recv, ts.Name.Name)
- }
-
- // Generate the load method.
- //
- // N.B. See the comment above for the save method.
- if generateSaverLoader {
- fmt.Fprintf(outputFile, "func (%s *%s) StateLoad(stateSourceObject %sSource) {\n", recv, ts.Name.Name, statePrefix)
- scanFields(x, "", scanFunctions{normal: emitLoad, wait: emitLoadWait})
- scanFields(x, "", scanFunctions{value: emitLoadValue})
- if hasAfterLoad {
- // The call to afterLoad is made conditionally, because when
- // AfterLoad is called, the object encodes a dependency on
- // referred objects (i.e. fields). This means that afterLoad
- // will not be called until the other afterLoads are called.
- fmt.Fprintf(outputFile, " stateSourceObject.AfterLoad(%s.afterLoad)\n", recv)
- }
- fmt.Fprintf(outputFile, "}\n\n")
- }
-
- // Add to our registration.
- emitRegister(ts.Name.Name)
-
- case *ast.Ident, *ast.SelectorExpr, *ast.ArrayType:
- maybeEmitImports()
-
- // Generate the info methods.
- fmt.Fprintf(outputFile, "func (%s *%s) StateTypeName() string {\n", recv, ts.Name.Name)
- fmt.Fprintf(outputFile, " return \"%s.%s\"\n", *fullPkg, ts.Name.Name)
- fmt.Fprintf(outputFile, "}\n\n")
- fmt.Fprintf(outputFile, "func (%s *%s) StateFields() []string {\n", recv, ts.Name.Name)
- fmt.Fprintf(outputFile, " return nil\n")
- fmt.Fprintf(outputFile, "}\n\n")
-
- // See above.
- emitRegister(ts.Name.Name)
- }
- }
- }
- }
-
- if len(initCalls) > 0 {
- // Emit the init() function.
- fmt.Fprintf(outputFile, "func init() {\n")
- for _, ic := range initCalls {
- fmt.Fprintf(outputFile, " %s\n", ic)
- }
- fmt.Fprintf(outputFile, "}\n")
- }
-}
diff --git a/tools/installers/BUILD b/tools/installers/BUILD
deleted file mode 100644
index 13d3cc5e0..000000000
--- a/tools/installers/BUILD
+++ /dev/null
@@ -1,41 +0,0 @@
-# Installers for use by the tools/vm_test rules.
-
-package(
- default_visibility = ["//:sandbox"],
- licenses = ["notice"],
-)
-
-sh_binary(
- name = "head",
- srcs = ["head.sh"],
- data = [
- "//runsc",
- ],
-)
-
-sh_binary(
- name = "images",
- srcs = ["images.sh"],
- data = [
- "//images",
- ],
-)
-
-sh_binary(
- name = "master",
- srcs = ["master.sh"],
-)
-
-sh_binary(
- name = "containerd",
- srcs = ["containerd.sh"],
-)
-
-sh_binary(
- name = "shim",
- srcs = ["shim.sh"],
- data = [
- "//shim/v1:gvisor-containerd-shim",
- "//shim/v2:containerd-shim-runsc-v1",
- ],
-)
diff --git a/tools/installers/containerd.sh b/tools/installers/containerd.sh
deleted file mode 100755
index 6b7bb261c..000000000
--- a/tools/installers/containerd.sh
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/bin/bash
-
-# Copyright 2019 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xeo pipefail
-
-declare -r CONTAINERD_VERSION=${CONTAINERD_VERSION:-1.3.0}
-declare -r CONTAINERD_MAJOR="$(echo ${CONTAINERD_VERSION} | awk -F '.' '{ print $1; }')"
-declare -r CONTAINERD_MINOR="$(echo ${CONTAINERD_VERSION} | awk -F '.' '{ print $2; }')"
-
-# Default to an older version for crictl for containerd <= 1.2.
-if [[ "${CONTAINERD_MAJOR}" -eq 1 ]] && [[ "${CONTAINERD_MINOR}" -le 2 ]]; then
- declare -r CRITOOLS_VERSION=${CRITOOLS_VERSION:-1.13.0}
-else
- declare -r CRITOOLS_VERSION=${CRITOOLS_VERSION:-1.18.0}
-fi
-
-# Helper for Go packages below.
-install_helper() {
- PACKAGE="${1}"
- TAG="${2}"
-
- # Clone the repository.
- mkdir -p "${GOPATH}"/src/$(dirname "${PACKAGE}") && \
- git clone https://"${PACKAGE}" "${GOPATH}"/src/"${PACKAGE}"
-
- # Checkout and build the repository.
- (cd "${GOPATH}"/src/"${PACKAGE}" && \
- git checkout "${TAG}" && \
- make && \
- make install)
-}
-
-# Install dependencies for the crictl tests.
-while true; do
- if (apt-get update && apt-get install -y \
- btrfs-tools \
- libseccomp-dev); then
- break
- fi
- result=$?
- if [[ $result -ne 100 ]]; then
- exit $result
- fi
-done
-
-# Install containerd & cri-tools.
-declare -rx GOPATH=$(mktemp -d --tmpdir gopathXXXXX)
-install_helper github.com/containerd/containerd "v${CONTAINERD_VERSION}" "${GOPATH}"
-install_helper github.com/kubernetes-sigs/cri-tools "v${CRITOOLS_VERSION}" "${GOPATH}"
-
-# Configure containerd-shim.
-#
-# Note that for versions <= 1.1 the legacy shim must be installed in /usr/bin,
-# which should align with the installer script in head.sh (or master.sh).
-if [[ "${CONTAINERD_MAJOR}" -le 1 ]] && [[ "${CONTAINERD_MINOR}" -lt 2 ]]; then
- declare -r shim_config_path=/etc/containerd/gvisor-containerd-shim.toml
- mkdir -p $(dirname ${shim_config_path})
- cat > ${shim_config_path} <<-EOF
- runc_shim = "/usr/bin/containerd-shim"
-
-[runsc_config]
- debug = "true"
- debug-log = "/tmp/runsc-logs/"
- strace = "true"
- file-access = "shared"
-EOF
-fi
-
-# Configure CNI.
-(cd "${GOPATH}" && src/github.com/containerd/containerd/script/setup/install-cni)
-cat <<EOF | sudo tee /etc/cni/net.d/10-bridge.conf
-{
- "cniVersion": "0.3.1",
- "name": "bridge",
- "type": "bridge",
- "bridge": "cnio0",
- "isGateway": true,
- "ipMasq": true,
- "ipam": {
- "type": "host-local",
- "ranges": [
- [{"subnet": "10.200.0.0/24"}]
- ],
- "routes": [{"dst": "0.0.0.0/0"}]
- }
-}
-EOF
-cat <<EOF | sudo tee /etc/cni/net.d/99-loopback.conf
-{
- "cniVersion": "0.3.1",
- "type": "loopback"
-}
-EOF
-
-# Configure crictl.
-cat <<EOF | sudo tee /etc/crictl.yaml
-runtime-endpoint: unix:///run/containerd/containerd.sock
-EOF
-
-# Cleanup.
-rm -rf "${GOPATH}"
diff --git a/tools/installers/head.sh b/tools/installers/head.sh
deleted file mode 100755
index a613fcb5b..000000000
--- a/tools/installers/head.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-
-# Copyright 2019 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Install our runtime.
-runfiles=.
-if [[ -d "$0.runfiles" ]]; then
- runfiles="$0.runfiles"
-fi
-$(find -L "${runfiles}" -executable -type f -name runsc) install
-
-# Restart docker.
-if service docker status 2>/dev/null; then
- service docker restart
-fi
diff --git a/tools/installers/images.sh b/tools/installers/images.sh
deleted file mode 100755
index 52e750f57..000000000
--- a/tools/installers/images.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-
-# Copyright 2020 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xeuo pipefail
-
-# Find the images directory.
-for images in $(find . -type d -name images); do
- if [[ -f "${images}"/Makefile ]]; then
- make -C "${images}" load-all-images
- fi
-done
diff --git a/tools/installers/master.sh b/tools/installers/master.sh
deleted file mode 100755
index 2c6001c6c..000000000
--- a/tools/installers/master.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-
-# Copyright 2019 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Install runsc from the master branch.
-set -e
-
-curl -fsSL https://gvisor.dev/archive.key | sudo apt-key add -
-add-apt-repository "deb https://storage.googleapis.com/gvisor/releases release main"
-
-while true; do
- if (apt-get update && apt-get install -y runsc); then
- break
- fi
- result=$?
- if [[ $result -ne 100 ]]; then
- exit $result
- fi
-done
-
-runsc install
-service docker restart
diff --git a/tools/installers/shim.sh b/tools/installers/shim.sh
deleted file mode 100755
index 8153ce283..000000000
--- a/tools/installers/shim.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-
-# Copyright 2019 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Install all the shims.
-#
-# Note that containerd looks at the current executable directory
-# in order to find the shim binary. So we need to check in order
-# of preference. The local containerd installer will install to
-# /usr/local, so we use that first.
-if [[ -x /usr/local/bin/containerd ]]; then
- containerd_install_dir=/usr/local/bin
-else
- containerd_install_dir=/usr/bin
-fi
-runfiles=.
-if [[ -d "$0.runfiles" ]]; then
- runfiles="$0.runfiles"
-fi
-find -L "${runfiles}" -executable -type f -name containerd-shim-runsc-v1 -exec cp -L {} "${containerd_install_dir}" \;
-find -L "${runfiles}" -executable -type f -name gvisor-containerd-shim -exec cp -L {} "${containerd_install_dir}" \;
diff --git a/tools/make_apt.sh b/tools/make_apt.sh
deleted file mode 100755
index 13c5edd76..000000000
--- a/tools/make_apt.sh
+++ /dev/null
@@ -1,144 +0,0 @@
-#!/bin/bash
-
-# Copyright 2018 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-if [[ "$#" -le 3 ]]; then
- echo "usage: $0 <private-key> <suite> <root> <packages...>"
- exit 1
-fi
-declare -r private_key=$(readlink -e "$1"); shift
-declare -r suite="$1"; shift
-declare -r root="$1"; shift
-
-# Ensure that we have the correct packages installed.
-function apt_install() {
- while true; do
- sudo apt-get update &&
- sudo apt-get install -y "$@" &&
- true
- result="${?}"
- case $result in
- 0)
- break
- ;;
- 100)
- # 100 is the error code that apt-get returns.
- ;;
- *)
- exit $result
- ;;
- esac
- done
-}
-dpkg-sig --help >/dev/null 2>&1 || apt_install dpkg-sig
-apt-ftparchive --help >/dev/null 2>&1 || apt_install apt-utils
-xz --help >/dev/null 2>&1 || apt_install xz-utils
-
-# Verbose from this point.
-set -xeo pipefail
-
-# Create a directory for the release.
-declare -r release="${root}/dists/${suite}"
-mkdir -p "${release}"
-
-# Create a temporary keyring, and ensure it is cleaned up.
-# Using separate homedir allows us to install apt repositories multiple times
-# using the same key. This is a limitation in GnuPG pre-2.1.
-declare -r keyring=$(mktemp /tmp/keyringXXXXXX.gpg)
-declare -r homedir=$(mktemp -d /tmp/homedirXXXXXX)
-declare -r gpg_opts=("--no-default-keyring" "--secret-keyring" "${keyring}" "--homedir" "${homedir}")
-cleanup() {
- rm -rf "${keyring}" "${homedir}"
-}
-trap cleanup EXIT
-
-# We attempt the import twice because the first one will fail if the public key
-# is not found. This isn't actually a failure for us, because we don't require
-# the public key (this may be stored separately). The second import will succeed
-# because, in reality, the first import succeeded and it's a no-op.
-gpg "${gpg_opts[@]}" --import "${private_key}" || \
- gpg "${gpg_opts[@]}" --import "${private_key}"
-
-# Copy the packages into the root.
-for pkg in "$@"; do
- ext=${pkg##*.}
- name=$(basename "${pkg}" ".${ext}")
- arch=${name##*_}
- if [[ "${name}" == "${arch}" ]]; then
- continue # Not a regular package.
- fi
- if [[ "${pkg}" =~ ^.*\.deb$ ]]; then
- # Extract from the debian file.
- version=$(dpkg --info "${pkg}" | grep -E 'Version:' | cut -d':' -f2)
- elif [[ "${pkg}" =~ ^.*\.changes$ ]]; then
- # Extract from the changes file.
- version=$(grep -E 'Version:' "${pkg}" | cut -d':' -f2)
- else
- # Unsupported file type.
- echo "Unknown file type: ${pkg}"
- exit 1
- fi
-
- # The package may already exist, in which case we leave it alone.
- version=${version// /} # Trim whitespace.
- destdir="${root}/pool/${version}/binary-${arch}"
- target="${destdir}/${name}.${ext}"
- if [[ -f "${target}" ]]; then
- continue
- fi
-
- # Copy & sign the package.
- mkdir -p "${destdir}"
- cp -a "${pkg}" "${target}"
- chmod 0644 "${target}"
- if [[ "${ext}" == "deb" ]]; then
- # We use [*] here to expand the gpg_opts array into a single shell-word.
- dpkg-sig -g "${gpg_opts[*]}" --sign builder "${target}"
- fi
-done
-
-# Build the package list.
-declare arches=()
-for dir in "${root}"/pool/*/binary-*; do
- name=$(basename "${dir}")
- arch=${name##binary-}
- arches+=("${arch}")
- repo_packages="${release}"/main/"${name}"
- mkdir -p "${repo_packages}"
- (cd "${root}" && apt-ftparchive --arch "${arch}" packages pool > "${repo_packages}"/Packages)
- (cd "${repo_packages}" && cat Packages | gzip > Packages.gz)
- (cd "${repo_packages}" && cat Packages | xz > Packages.xz)
-done
-
-# Build the release list.
-cat > "${release}"/apt.conf <<EOF
-APT {
- FTPArchive {
- Release {
- Architectures "${arches[@]}";
- Suite "${suite}";
- Components "main";
- };
- };
-};
-EOF
-(cd "${release}" && apt-ftparchive -c=apt.conf release . > Release)
-rm "${release}"/apt.conf
-
-# Sign the release.
-declare -r digest_opts=("--digest-algo" "SHA512" "--cert-digest-algo" "SHA512")
-(cd "${release}" && rm -f Release.gpg InRelease)
-(cd "${release}" && gpg "${gpg_opts[@]}" --clearsign "${digest_opts[@]}" -o InRelease Release)
-(cd "${release}" && gpg "${gpg_opts[@]}" -abs "${digest_opts[@]}" -o Release.gpg Release)
diff --git a/tools/make_release.sh b/tools/make_release.sh
deleted file mode 100755
index 9137dd9bb..000000000
--- a/tools/make_release.sh
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/bin/bash
-
-# Copyright 2018 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-if [[ "$#" -le 2 ]]; then
- echo "usage: $0 <private-key> <root> <binaries & packages...>"
- echo "The environment variable NIGHTLY may be set to control"
- echo "whether the nightly packages are produced or not."
- exit 1
-fi
-
-set -xeo pipefail
-declare -r private_key="$1"; shift
-declare -r root="$1"; shift
-declare -a binaries
-declare -a pkgs
-
-# Collect binaries & packages.
-for arg in "$@"; do
- if [[ "${arg}" == *.deb ]] || [[ "${arg}" == *.changes ]]; then
- pkgs+=("${arg}")
- else
- binaries+=("${arg}")
- fi
-done
-
-# install_raw installs raw artifacts.
-install_raw() {
- mkdir -p "${root}/$1"
- for binary in "${binaries[@]}"; do
- # Copy the raw file & generate a sha512sum.
- name=$(basename "${binary}")
- cp -f "${binary}" "${root}/$1"
- (cd "${root}/$1" && sha512sum "${name}" > "${name}.sha512")
- done
-}
-
-# install_apt installs an apt repository.
-install_apt() {
- tools/make_apt.sh "${private_key}" "$1" "${root}" "${pkgs[@]}"
-}
-
-# If nightly, install only nightly artifacts.
-if [[ "${NIGHTLY:-false}" == "true" ]]; then
- # The "latest" directory and current date.
- stamp="$(date -Idate)"
- install_raw "nightly/latest"
- install_raw "nightly/${stamp}"
- install_apt "nightly"
-else
- # Is it a tagged release? Build that.
- tags="$(git tag --points-at HEAD 2>/dev/null || true)"
- if ! [[ -z "${tags}" ]]; then
- # Note that a given commit can match any number of tags. We have to iterate
- # through all possible tags and produce associated artifacts.
- for tag in ${tags}; do
- name=$(echo "${tag}" | cut -d'-' -f2)
- base=$(echo "${name}" | cut -d'.' -f1)
- install_raw "release/${name}"
- install_raw "release/latest"
- install_apt "release"
- install_apt "${base}"
- done
- else
- # Otherwise, assume it is a raw master commit.
- install_raw "master/latest"
- install_apt "master"
- fi
-fi
diff --git a/tools/nogo/BUILD b/tools/nogo/BUILD
deleted file mode 100644
index 12b8b597c..000000000
--- a/tools/nogo/BUILD
+++ /dev/null
@@ -1,74 +0,0 @@
-load("//tools:defs.bzl", "bzl_library", "go_library", "select_goarch", "select_goos")
-load("//tools/nogo:defs.bzl", "nogo_objdump_tool", "nogo_stdlib", "nogo_target")
-
-package(licenses = ["notice"])
-
-nogo_target(
- name = "target",
- goarch = select_goarch(),
- goos = select_goos(),
- visibility = ["//visibility:public"],
-)
-
-nogo_objdump_tool(
- name = "objdump_tool",
- visibility = ["//visibility:public"],
-)
-
-nogo_stdlib(
- name = "stdlib",
- visibility = ["//visibility:public"],
-)
-
-go_library(
- name = "nogo",
- srcs = [
- "analyzers.go",
- "build.go",
- "config.go",
- "findings.go",
- "nogo.go",
- ],
- nogo = False,
- visibility = ["//:sandbox"],
- deps = [
- "//tools/checkescape",
- "//tools/checkunsafe",
- "@co_honnef_go_tools//staticcheck:go_default_library",
- "@co_honnef_go_tools//stylecheck:go_default_library",
- "@org_golang_x_tools//go/analysis:go_tool_library",
- "@org_golang_x_tools//go/analysis/internal/facts:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/asmdecl:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/assign:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/atomic:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/bools:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/buildtag:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/cgocall:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/composite:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/copylock:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/errorsas:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/httpresponse:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/loopclosure:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/lostcancel:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/nilfunc:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/nilness:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/printf:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/shadow:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/shift:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/stdmethods:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/stringintconv:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/structtag:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/tests:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/unmarshal:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/unreachable:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/unsafeptr:go_tool_library",
- "@org_golang_x_tools//go/analysis/passes/unusedresult:go_tool_library",
- "@org_golang_x_tools//go/gcexportdata:go_tool_library",
- ],
-)
-
-bzl_library(
- name = "defs_bzl",
- srcs = ["defs.bzl"],
- visibility = ["//visibility:private"],
-)
diff --git a/tools/nogo/README.md b/tools/nogo/README.md
deleted file mode 100644
index 6e4db18de..000000000
--- a/tools/nogo/README.md
+++ /dev/null
@@ -1,31 +0,0 @@
-# Extended "nogo" analysis
-
-This package provides a build aspect that perform nogo analysis. This will be
-automatically injected to all relevant libraries when using the default
-`go_binary` and `go_library` rules.
-
-It exists for several reasons.
-
-* The default `nogo` provided by bazel is insufficient with respect to the
- possibility of binary analysis. This package allows us to analyze the
- generated binary in addition to using the standard analyzers.
-
-* The configuration provided in this package is much richer than the standard
- `nogo` JSON blob. Specifically, it allows us to exclude specific structures
- from the composite rules (such as the Ranges that are common with the set
- types).
-
-* The bazel version of `nogo` is run directly against the `go_library` and
- `go_binary` targets, meaning that any change to the configuration requires a
- rebuild from scratch (for some reason included all C++ source files in the
- process). Using an aspect is more efficient in this regard.
-
-* The checks supported by this package are exported as tests, which makes it
- easier to reason about and plumb into the build system.
-
-* For uninteresting reasons, it is impossible to integrate the default `nogo`
- analyzer provided by bazel with internal Google tooling. To provide a
- consistent experience, this package allows those systems to be unified.
-
-To use this package, import `nogo_test` from `defs.bzl` and add a single
-dependency which is a `go_binary` or `go_library` rule.
diff --git a/tools/nogo/analyzers.go b/tools/nogo/analyzers.go
deleted file mode 100644
index b919bc2f8..000000000
--- a/tools/nogo/analyzers.go
+++ /dev/null
@@ -1,131 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package nogo
-
-import (
- "encoding/gob"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/analysis/passes/asmdecl"
- "golang.org/x/tools/go/analysis/passes/assign"
- "golang.org/x/tools/go/analysis/passes/atomic"
- "golang.org/x/tools/go/analysis/passes/bools"
- "golang.org/x/tools/go/analysis/passes/buildtag"
- "golang.org/x/tools/go/analysis/passes/cgocall"
- "golang.org/x/tools/go/analysis/passes/composite"
- "golang.org/x/tools/go/analysis/passes/copylock"
- "golang.org/x/tools/go/analysis/passes/errorsas"
- "golang.org/x/tools/go/analysis/passes/httpresponse"
- "golang.org/x/tools/go/analysis/passes/loopclosure"
- "golang.org/x/tools/go/analysis/passes/lostcancel"
- "golang.org/x/tools/go/analysis/passes/nilfunc"
- "golang.org/x/tools/go/analysis/passes/nilness"
- "golang.org/x/tools/go/analysis/passes/printf"
- "golang.org/x/tools/go/analysis/passes/shadow"
- "golang.org/x/tools/go/analysis/passes/shift"
- "golang.org/x/tools/go/analysis/passes/stdmethods"
- "golang.org/x/tools/go/analysis/passes/stringintconv"
- "golang.org/x/tools/go/analysis/passes/structtag"
- "golang.org/x/tools/go/analysis/passes/tests"
- "golang.org/x/tools/go/analysis/passes/unmarshal"
- "golang.org/x/tools/go/analysis/passes/unreachable"
- "golang.org/x/tools/go/analysis/passes/unsafeptr"
- "golang.org/x/tools/go/analysis/passes/unusedresult"
- "honnef.co/go/tools/staticcheck"
- "honnef.co/go/tools/stylecheck"
-
- "gvisor.dev/gvisor/tools/checkescape"
- "gvisor.dev/gvisor/tools/checkunsafe"
-)
-
-// AllAnalyzers is a list of all available analyzers.
-var AllAnalyzers = []*analysis.Analyzer{
- asmdecl.Analyzer,
- assign.Analyzer,
- atomic.Analyzer,
- bools.Analyzer,
- buildtag.Analyzer,
- cgocall.Analyzer,
- composite.Analyzer,
- copylock.Analyzer,
- errorsas.Analyzer,
- httpresponse.Analyzer,
- loopclosure.Analyzer,
- lostcancel.Analyzer,
- nilfunc.Analyzer,
- nilness.Analyzer,
- printf.Analyzer,
- shift.Analyzer,
- stdmethods.Analyzer,
- stringintconv.Analyzer,
- shadow.Analyzer,
- structtag.Analyzer,
- tests.Analyzer,
- unmarshal.Analyzer,
- unreachable.Analyzer,
- unsafeptr.Analyzer,
- unusedresult.Analyzer,
- checkescape.Analyzer,
- checkunsafe.Analyzer,
-}
-
-// EscapeAnalyzers is a list of escape-related analyzers.
-var EscapeAnalyzers = []*analysis.Analyzer{
- checkescape.EscapeAnalyzer,
-}
-
-func register(all []*analysis.Analyzer) {
- // Register all fact types.
- //
- // N.B. This needs to be done recursively, because there may be
- // analyzers in the Requires list that do not appear explicitly above.
- registered := make(map[*analysis.Analyzer]struct{})
- var registerOne func(*analysis.Analyzer)
- registerOne = func(a *analysis.Analyzer) {
- if _, ok := registered[a]; ok {
- return
- }
-
- // Register dependencies.
- for _, da := range a.Requires {
- registerOne(da)
- }
-
- // Register local facts.
- for _, f := range a.FactTypes {
- gob.Register(f)
- }
-
- registered[a] = struct{}{} // Done.
- }
- for _, a := range all {
- registerOne(a)
- }
-}
-
-func init() {
- // Add all staticcheck analyzers.
- for _, a := range staticcheck.Analyzers {
- AllAnalyzers = append(AllAnalyzers, a)
- }
- // Add all stylecheck analyzers.
- for _, a := range stylecheck.Analyzers {
- AllAnalyzers = append(AllAnalyzers, a)
- }
-
- // Register lists.
- register(AllAnalyzers)
- register(EscapeAnalyzers)
-}
diff --git a/tools/nogo/build.go b/tools/nogo/build.go
deleted file mode 100644
index d173cff1f..000000000
--- a/tools/nogo/build.go
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package nogo
-
-import (
- "fmt"
- "io"
- "os"
-)
-
-// findStdPkg needs to find the bundled standard library packages.
-func findStdPkg(GOOS, GOARCH, path string) (io.ReadCloser, error) {
- if path == "C" {
- // Cgo builds cannot be analyzed. Skip.
- return nil, ErrSkip
- }
- return os.Open(fmt.Sprintf("external/go_sdk/pkg/%s_%s/%s.a", GOOS, GOARCH, path))
-}
diff --git a/tools/nogo/check/BUILD b/tools/nogo/check/BUILD
deleted file mode 100644
index e18483a18..000000000
--- a/tools/nogo/check/BUILD
+++ /dev/null
@@ -1,11 +0,0 @@
-load("//tools:defs.bzl", "go_binary")
-
-package(licenses = ["notice"])
-
-go_binary(
- name = "check",
- srcs = ["main.go"],
- nogo = False,
- visibility = ["//visibility:public"],
- deps = ["//tools/nogo"],
-)
diff --git a/tools/nogo/check/main.go b/tools/nogo/check/main.go
deleted file mode 100644
index 69bdfe502..000000000
--- a/tools/nogo/check/main.go
+++ /dev/null
@@ -1,114 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Binary check is the nogo entrypoint.
-package main
-
-import (
- "encoding/json"
- "flag"
- "fmt"
- "io/ioutil"
- "log"
- "os"
-
- "gvisor.dev/gvisor/tools/nogo"
-)
-
-var (
- packageFile = flag.String("package", "", "package configuration file (in JSON format)")
- stdlibFile = flag.String("stdlib", "", "stdlib configuration file (in JSON format)")
- findingsOutput = flag.String("findings", "", "output file (or stdout, if not specified)")
- factsOutput = flag.String("facts", "", "output file for facts (optional)")
- escapesOutput = flag.String("escapes", "", "output file for escapes (optional)")
-)
-
-func loadConfig(file string, config interface{}) interface{} {
- // Load the configuration.
- f, err := os.Open(file)
- if err != nil {
- log.Fatalf("unable to open configuration %q: %v", file, err)
- }
- defer f.Close()
- dec := json.NewDecoder(f)
- dec.DisallowUnknownFields()
- if err := dec.Decode(config); err != nil {
- log.Fatalf("unable to decode configuration: %v", err)
- }
- return config
-}
-
-func main() {
- // Parse all flags.
- flag.Parse()
-
- var (
- findings []nogo.Finding
- factData []byte
- err error
- )
-
- // Check & load the configuration.
- if *packageFile != "" && *stdlibFile != "" {
- log.Fatalf("unable to perform stdlib and package analysis; provide only one!")
- }
-
- // Run the configuration.
- if *stdlibFile != "" {
- // Perform basic analysis.
- c := loadConfig(*stdlibFile, new(nogo.StdlibConfig)).(*nogo.StdlibConfig)
- findings, factData, err = nogo.CheckStdlib(c, nogo.AllAnalyzers)
-
- } else if *packageFile != "" {
- // Perform basic analysis.
- c := loadConfig(*packageFile, new(nogo.PackageConfig)).(*nogo.PackageConfig)
- findings, factData, err = nogo.CheckPackage(c, nogo.AllAnalyzers, nil)
-
- // Do we need to do escape analysis?
- if *escapesOutput != "" {
- escapes, _, err := nogo.CheckPackage(c, nogo.EscapeAnalyzers, nil)
- if err != nil {
- log.Fatalf("error performing escape analysis: %v", err)
- }
- if err := nogo.WriteFindingsToFile(escapes, *escapesOutput); err != nil {
- log.Fatalf("error writing escapes to %q: %v", *escapesOutput, err)
- }
- }
- } else {
- log.Fatalf("please provide at least one of package or stdlib!")
- }
-
- // Check that analysis was successful.
- if err != nil {
- log.Fatalf("error performing analysis: %v", err)
- }
-
- // Save facts.
- if *factsOutput != "" {
- if err := ioutil.WriteFile(*factsOutput, factData, 0644); err != nil {
- log.Fatalf("error saving findings to %q: %v", *factsOutput, err)
- }
- }
-
- // Write all findings.
- if *findingsOutput != "" {
- if err := nogo.WriteFindingsToFile(findings, *findingsOutput); err != nil {
- log.Fatalf("error writing findings to %q: %v", *findingsOutput, err)
- }
- } else {
- for _, finding := range findings {
- fmt.Fprintf(os.Stdout, "%s\n", finding.String())
- }
- }
-}
diff --git a/tools/nogo/config.go b/tools/nogo/config.go
deleted file mode 100644
index 2fea5b3e1..000000000
--- a/tools/nogo/config.go
+++ /dev/null
@@ -1,261 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package nogo
-
-import (
- "fmt"
- "regexp"
-)
-
-// GroupName is a named group.
-type GroupName string
-
-// AnalyzerName is a named analyzer.
-type AnalyzerName string
-
-// Group represents a named collection of files.
-type Group struct {
- // Name is the short name for the group.
- Name GroupName `yaml:"name"`
-
- // Regex matches all full paths in the group.
- Regex string `yaml:"regex"`
- regex *regexp.Regexp `yaml:"-"`
-
- // Default determines the default group behavior.
- //
- // If Default is true, all Analyzers are enabled for this
- // group. Otherwise, Analyzers must be individually enabled
- // by specifying a (possible empty) ItemConfig for the group
- // in the AnalyzerConfig.
- Default bool `yaml:"default"`
-}
-
-func (g *Group) compile() error {
- r, err := regexp.Compile(g.Regex)
- if err != nil {
- return err
- }
- g.regex = r
- return nil
-}
-
-// ItemConfig is an (Analyzer,Group) configuration.
-type ItemConfig struct {
- // Exclude are analyzer exclusions.
- //
- // Exclude is a list of regular expressions. If the corresponding
- // Analyzer emits a Finding for which Finding.Position.String()
- // matches a regular expression in Exclude, the finding will not
- // be reported.
- Exclude []string `yaml:"exclude,omitempty"`
- exclude []*regexp.Regexp `yaml:"-"`
-
- // Suppress are analyzer suppressions.
- //
- // Suppress is a list of regular expressions. If the corresponding
- // Analyzer emits a Finding for which Finding.Message matches a regular
- // expression in Suppress, the finding will not be reported.
- Suppress []string `yaml:"suppress,omitempty"`
- suppress []*regexp.Regexp `yaml:"-"`
-}
-
-func compileRegexps(ss []string, rs *[]*regexp.Regexp) error {
- *rs = make([]*regexp.Regexp, 0, len(ss))
- for _, s := range ss {
- r, err := regexp.Compile(s)
- if err != nil {
- return err
- }
- *rs = append(*rs, r)
- }
- return nil
-}
-
-func (i *ItemConfig) compile() error {
- if i == nil {
- // This may be nil if nothing is included in the
- // item configuration. That's fine, there's nothing
- // to compile and nothing to exclude & suppress.
- return nil
- }
- if err := compileRegexps(i.Exclude, &i.exclude); err != nil {
- return fmt.Errorf("in exclude: %w", err)
- }
- if err := compileRegexps(i.Suppress, &i.suppress); err != nil {
- return fmt.Errorf("in suppress: %w", err)
- }
- return nil
-}
-
-func (i *ItemConfig) merge(other *ItemConfig) {
- i.Exclude = append(i.Exclude, other.Exclude...)
- i.Suppress = append(i.Suppress, other.Suppress...)
-}
-
-func (i *ItemConfig) shouldReport(fullPos, msg string) bool {
- if i == nil {
- // See above.
- return true
- }
- for _, r := range i.exclude {
- if r.MatchString(fullPos) {
- return false
- }
- }
- for _, r := range i.suppress {
- if r.MatchString(msg) {
- return false
- }
- }
- return true
-}
-
-// AnalyzerConfig is the configuration for a single analyzers.
-//
-// This map is keyed by individual Group names, to allow for different
-// configurations depending on what Group the file belongs to.
-type AnalyzerConfig map[GroupName]*ItemConfig
-
-func (a AnalyzerConfig) compile() error {
- for name, gc := range a {
- if err := gc.compile(); err != nil {
- return fmt.Errorf("invalid group %q: %v", name, err)
- }
- }
- return nil
-}
-
-func (a AnalyzerConfig) merge(other AnalyzerConfig) {
- // Merge all the groups.
- for name, gc := range other {
- old, ok := a[name]
- if !ok || old == nil {
- a[name] = gc // Not configured in a.
- continue
- }
- old.merge(gc)
- }
-}
-
-func (a AnalyzerConfig) shouldReport(groupConfig *Group, fullPos, msg string) bool {
- gc, ok := a[groupConfig.Name]
- if !ok {
- return groupConfig.Default
- }
-
- // Note that if a section appears for a particular group
- // for a particular analyzer, then it will now be enabled,
- // and the group default no longer applies.
- return gc.shouldReport(fullPos, msg)
-}
-
-// Config is a nogo configuration.
-type Config struct {
- // Prefixes defines a set of regular expressions that
- // are standard "prefixes", so that files can be grouped
- // and specific rules applied to individual groups.
- Groups []Group `yaml:"groups"`
-
- // Global is the global analyzer config.
- Global AnalyzerConfig `yaml:"global"`
-
- // Analyzers are individual analyzer configurations. The
- // key for each analyzer is the name of the analyzer. The
- // value is either a boolean (enable/disable), or a map to
- // the groups above.
- Analyzers map[AnalyzerName]AnalyzerConfig `yaml:"analyzers"`
-}
-
-// Merge merges two configurations.
-func (c *Config) Merge(other *Config) {
- // Merge all groups.
- for _, g := range other.Groups {
- // Is there a matching group? If yes, we just delete
- // it. This will preserve the order provided in the
- // overriding file, even if it differs.
- for i := 0; i < len(c.Groups); i++ {
- if g.Name == c.Groups[i].Name {
- copy(c.Groups[i:], c.Groups[i+1:])
- c.Groups = c.Groups[:len(c.Groups)-1]
- break
- }
- }
- c.Groups = append(c.Groups, g)
- }
-
- // Merge global configurations.
- c.Global.merge(other.Global)
-
- // Merge all analyzer configurations.
- for name, ac := range other.Analyzers {
- old, ok := c.Analyzers[name]
- if !ok {
- c.Analyzers[name] = ac // No analyzer in original config.
- continue
- }
- old.merge(ac)
- }
-}
-
-// Compile compiles a configuration to make it useable.
-func (c *Config) Compile() error {
- for i := 0; i < len(c.Groups); i++ {
- if err := c.Groups[i].compile(); err != nil {
- return fmt.Errorf("invalid group %q: %w", c.Groups[i].Name, err)
- }
- }
- if err := c.Global.compile(); err != nil {
- return fmt.Errorf("invalid global: %w", err)
- }
- for name, ac := range c.Analyzers {
- if err := ac.compile(); err != nil {
- return fmt.Errorf("invalid analyzer %q: %w", name, err)
- }
- }
- return nil
-}
-
-// ShouldReport returns true iff the finding should match the Config.
-func (c *Config) ShouldReport(finding Finding) bool {
- fullPos := finding.Position.String()
-
- // Find the matching group.
- var groupConfig *Group
- for i := 0; i < len(c.Groups); i++ {
- if c.Groups[i].regex.MatchString(fullPos) {
- groupConfig = &c.Groups[i]
- break
- }
- }
-
- // If there is no group matching this path, then
- // we default to accept the finding.
- if groupConfig == nil {
- return true
- }
-
- // Suppress via global rule?
- if !c.Global.shouldReport(groupConfig, fullPos, finding.Message) {
- return false
- }
-
- // Try the analyzer config.
- ac, ok := c.Analyzers[finding.Category]
- if !ok {
- return groupConfig.Default
- }
- return ac.shouldReport(groupConfig, fullPos, finding.Message)
-}
diff --git a/tools/nogo/defs.bzl b/tools/nogo/defs.bzl
deleted file mode 100644
index 161ea972e..000000000
--- a/tools/nogo/defs.bzl
+++ /dev/null
@@ -1,426 +0,0 @@
-"""Nogo rules."""
-
-load("//tools/bazeldefs:go.bzl", "go_context", "go_embed_libraries", "go_importpath", "go_rule")
-
-NogoConfigInfo = provider(
- "information about a nogo configuration",
- fields = {
- "srcs": "the collection of configuration files",
- },
-)
-
-def _nogo_config_impl(ctx):
- return [NogoConfigInfo(
- srcs = ctx.files.srcs,
- )]
-
-nogo_config = rule(
- implementation = _nogo_config_impl,
- attrs = {
- "srcs": attr.label_list(
- doc = "a list of yaml files (schema defined by tool/nogo/config.go).",
- allow_files = True,
- ),
- },
-)
-
-NogoTargetInfo = provider(
- "information about the Go target",
- fields = {
- "goarch": "the build architecture (GOARCH)",
- "goos": "the build OS target (GOOS)",
- },
-)
-
-def _nogo_target_impl(ctx):
- return [NogoTargetInfo(
- goarch = ctx.attr.goarch,
- goos = ctx.attr.goos,
- )]
-
-nogo_target = go_rule(
- rule,
- implementation = _nogo_target_impl,
- attrs = {
- "goarch": attr.string(
- doc = "the Go build architecture (propagated to other rules).",
- mandatory = True,
- ),
- "goos": attr.string(
- doc = "the Go OS target (propagated to other rules).",
- mandatory = True,
- ),
- },
-)
-
-def _nogo_objdump_tool_impl(ctx):
- # Construct the magic dump command.
- #
- # Note that in some cases, the input is being fed into the tool via stdin.
- # Unfortunately, the Go objdump tool expects to see a seekable file [1], so
- # we need the tool to handle this case by creating a temporary file.
- #
- # [1] https://github.com/golang/go/issues/41051
- nogo_target_info = ctx.attr._nogo_target[NogoTargetInfo]
- go_ctx = go_context(ctx, goos = nogo_target_info.goos, goarch = nogo_target_info.goarch)
- env_prefix = " ".join(["%s=%s" % (key, value) for (key, value) in go_ctx.env.items()])
- dumper = ctx.actions.declare_file(ctx.label.name)
- ctx.actions.write(dumper, "\n".join([
- "#!/bin/bash",
- "set -euo pipefail",
- "if [[ $# -eq 0 ]]; then",
- " T=$(mktemp -u -t libXXXXXX.a)",
- " cat /dev/stdin > ${T}",
- "else",
- " T=$1;",
- "fi",
- "%s %s tool objdump ${T}" % (
- env_prefix,
- go_ctx.go.path,
- ),
- "if [[ $# -eq 0 ]]; then",
- " rm -rf ${T}",
- "fi",
- "",
- ]), is_executable = True)
-
- # Include the full runfiles.
- return [DefaultInfo(
- runfiles = ctx.runfiles(files = go_ctx.runfiles.to_list()),
- executable = dumper,
- )]
-
-nogo_objdump_tool = go_rule(
- rule,
- implementation = _nogo_objdump_tool_impl,
- attrs = {
- "_nogo_target": attr.label(
- default = "//tools/nogo:target",
- cfg = "target",
- ),
- },
-)
-
-# NogoStdlibInfo is the set of standard library facts.
-NogoStdlibInfo = provider(
- "information for nogo analysis (standard library facts)",
- fields = {
- "facts": "serialized standard library facts",
- "raw_findings": "raw package findings (if relevant)",
- },
-)
-
-def _nogo_stdlib_impl(ctx):
- # Build the standard library facts.
- nogo_target_info = ctx.attr._nogo_target[NogoTargetInfo]
- go_ctx = go_context(ctx, goos = nogo_target_info.goos, goarch = nogo_target_info.goarch)
- facts = ctx.actions.declare_file(ctx.label.name + ".facts")
- raw_findings = ctx.actions.declare_file(ctx.label.name + ".raw_findings")
- config = struct(
- Srcs = [f.path for f in go_ctx.stdlib_srcs],
- GOOS = go_ctx.goos,
- GOARCH = go_ctx.goarch,
- Tags = go_ctx.tags,
- )
- config_file = ctx.actions.declare_file(ctx.label.name + ".cfg")
- ctx.actions.write(config_file, config.to_json())
- ctx.actions.run(
- inputs = [config_file] + go_ctx.stdlib_srcs,
- outputs = [facts, raw_findings],
- tools = depset(go_ctx.runfiles.to_list() + ctx.files._nogo_objdump_tool),
- executable = ctx.files._nogo_check[0],
- mnemonic = "NogoStandardLibraryAnalysis",
- progress_message = "Analyzing Go Standard Library",
- arguments = go_ctx.nogo_args + [
- "-objdump_tool=%s" % ctx.files._nogo_objdump_tool[0].path,
- "-stdlib=%s" % config_file.path,
- "-findings=%s" % raw_findings.path,
- "-facts=%s" % facts.path,
- ],
- )
-
- # Return the stdlib facts as output.
- return [NogoStdlibInfo(
- facts = facts,
- raw_findings = raw_findings,
- )]
-
-nogo_stdlib = go_rule(
- rule,
- implementation = _nogo_stdlib_impl,
- attrs = {
- "_nogo_check": attr.label(
- default = "//tools/nogo/check:check",
- cfg = "host",
- ),
- "_nogo_objdump_tool": attr.label(
- default = "//tools/nogo:objdump_tool",
- cfg = "host",
- ),
- "_nogo_target": attr.label(
- default = "//tools/nogo:target",
- cfg = "target",
- ),
- },
-)
-
-# NogoInfo is the serialized set of package facts for a nogo analysis.
-#
-# Each go_library rule will generate a corresponding nogo rule, which will run
-# with the source files as input. Note however, that the individual nogo rules
-# are simply stubs that enter into the shadow dependency tree (the "aspect").
-NogoInfo = provider(
- "information for nogo analysis",
- fields = {
- "facts": "serialized package facts",
- "raw_findings": "raw package findings (if relevant)",
- "escapes": "escape-only findings (if relevant)",
- "importpath": "package import path",
- "binaries": "package binary files",
- "srcs": "srcs (for go_test support)",
- "deps": "deps (for go_test support)",
- },
-)
-
-def _nogo_aspect_impl(target, ctx):
- # If this is a nogo rule itself (and not the shadow of a go_library or
- # go_binary rule created by such a rule), then we simply return nothing.
- # All work is done in the shadow properties for go rules. For a proto
- # library, we simply skip the analysis portion but still need to return a
- # valid NogoInfo to reference the generated binary.
- if ctx.rule.kind in ("go_library", "go_tool_library", "go_binary", "go_test"):
- srcs = ctx.rule.files.srcs
- deps = ctx.rule.attr.deps
- elif ctx.rule.kind in ("go_proto_library", "go_wrap_cc"):
- srcs = []
- deps = ctx.rule.attr.deps
- else:
- return [NogoInfo()]
-
- # If we're using the "library" attribute, then we need to aggregate the
- # original library sources and dependencies into this target to perform
- # proper type analysis.
- for embed in go_embed_libraries(ctx.rule):
- info = embed[NogoInfo]
- if hasattr(info, "srcs"):
- srcs = srcs + info.srcs
- if hasattr(info, "deps"):
- deps = deps + info.deps
-
- # Start with all target files and srcs as input.
- inputs = target.files.to_list() + srcs
-
- # Generate a shell script that dumps the binary. Annoyingly, this seems
- # necessary as the context in which a run_shell command runs does not seem
- # to cleanly allow us redirect stdout to the actual output file. Perhaps
- # I'm missing something here, but the intermediate script does work.
- binaries = target.files.to_list()
- objfiles = [f for f in binaries if f.path.endswith(".a")]
- if len(objfiles) > 0:
- # Prefer the .a files for go_library targets.
- target_objfile = objfiles[0]
- else:
- # Use the raw binary for go_binary and go_test targets.
- target_objfile = binaries[0]
- inputs.append(target_objfile)
-
- # Extract the importpath for this package.
- if ctx.rule.kind == "go_test":
- # If this is a test, then it will not be imported by anything else.
- # We can safely set the importapth to just "test". Note that this
- # is necessary if the library also imports the core library (in
- # addition to including the sources directly), which happens in
- # some complex cases (seccomp_victim).
- importpath = "test"
- else:
- importpath = go_importpath(target)
-
- # Collect all info from shadow dependencies.
- fact_map = dict()
- import_map = dict()
- all_raw_findings = []
- for dep in deps:
- # There will be no file attribute set for all transitive dependencies
- # that are not go_library or go_binary rules, such as a proto rules.
- # This is handled by the ctx.rule.kind check above.
- info = dep[NogoInfo]
- if not hasattr(info, "facts"):
- continue
-
- # Configure where to find the binary & fact files. Note that this will
- # use .x and .a regardless of whether this is a go_binary rule, since
- # these dependencies must be go_library rules.
- x_files = [f.path for f in info.binaries if f.path.endswith(".x")]
- if not len(x_files):
- x_files = [f.path for f in info.binaries if f.path.endswith(".a")]
- import_map[info.importpath] = x_files[0]
- fact_map[info.importpath] = info.facts.path
-
- # Collect all findings; duplicates are resolved at the end.
- all_raw_findings.extend(info.raw_findings)
-
- # Ensure the above are available as inputs.
- inputs.append(info.facts)
- inputs += info.binaries
-
- # Add the standard library facts.
- stdlib_info = ctx.attr._nogo_stdlib[NogoStdlibInfo]
- stdlib_facts = stdlib_info.facts
- inputs.append(stdlib_facts)
-
- # The nogo tool operates on a configuration serialized in JSON format.
- nogo_target_info = ctx.attr._nogo_target[NogoTargetInfo]
- go_ctx = go_context(ctx, goos = nogo_target_info.goos, goarch = nogo_target_info.goarch)
- facts = ctx.actions.declare_file(target.label.name + ".facts")
- raw_findings = ctx.actions.declare_file(target.label.name + ".raw_findings")
- escapes = ctx.actions.declare_file(target.label.name + ".escapes")
- config = struct(
- ImportPath = importpath,
- GoFiles = [src.path for src in srcs if src.path.endswith(".go")],
- NonGoFiles = [src.path for src in srcs if not src.path.endswith(".go")],
- GOOS = go_ctx.goos,
- GOARCH = go_ctx.goarch,
- Tags = go_ctx.tags,
- FactMap = fact_map,
- ImportMap = import_map,
- StdlibFacts = stdlib_facts.path,
- )
- config_file = ctx.actions.declare_file(target.label.name + ".cfg")
- ctx.actions.write(config_file, config.to_json())
- inputs.append(config_file)
- ctx.actions.run(
- inputs = inputs,
- outputs = [facts, raw_findings, escapes],
- tools = depset(go_ctx.runfiles.to_list() + ctx.files._nogo_objdump_tool),
- executable = ctx.files._nogo_check[0],
- mnemonic = "NogoAnalysis",
- progress_message = "Analyzing %s" % target.label,
- arguments = go_ctx.nogo_args + [
- "-binary=%s" % target_objfile.path,
- "-objdump_tool=%s" % ctx.files._nogo_objdump_tool[0].path,
- "-package=%s" % config_file.path,
- "-findings=%s" % raw_findings.path,
- "-facts=%s" % facts.path,
- "-escapes=%s" % escapes.path,
- ],
- )
-
- # Flatten all findings from all dependencies.
- #
- # This is done because all the filtering must be done at the
- # top-level nogo_test to dynamically apply a configuration.
- # This does not actually add any additional work here, but
- # will simply propagate the full list of files.
- all_raw_findings = [stdlib_info.raw_findings] + depset(all_raw_findings).to_list() + [raw_findings]
-
- # Return the package facts as output.
- return [NogoInfo(
- facts = facts,
- raw_findings = all_raw_findings,
- escapes = escapes,
- importpath = importpath,
- binaries = binaries,
- srcs = srcs,
- deps = deps,
- )]
-
-nogo_aspect = go_rule(
- aspect,
- implementation = _nogo_aspect_impl,
- attr_aspects = [
- "deps",
- "library",
- "embed",
- ],
- attrs = {
- "_nogo_check": attr.label(
- default = "//tools/nogo/check:check",
- cfg = "host",
- ),
- "_nogo_stdlib": attr.label(
- default = "//tools/nogo:stdlib",
- cfg = "host",
- ),
- "_nogo_objdump_tool": attr.label(
- default = "//tools/nogo:objdump_tool",
- cfg = "host",
- ),
- "_nogo_target": attr.label(
- default = "//tools/nogo:target",
- cfg = "target",
- ),
- },
-)
-
-def _nogo_test_impl(ctx):
- """Check nogo findings."""
-
- # Ensure there's a single dependency.
- if len(ctx.attr.deps) != 1:
- fail("nogo_test requires exactly one dep.")
- raw_findings = ctx.attr.deps[0][NogoInfo].raw_findings
- escapes = ctx.attr.deps[0][NogoInfo].escapes
-
- # Build a step that applies the configuration.
- config_srcs = ctx.attr.config[NogoConfigInfo].srcs
- findings = ctx.actions.declare_file(ctx.label.name + ".findings")
- ctx.actions.run(
- inputs = raw_findings + ctx.files.srcs + config_srcs,
- outputs = [findings],
- tools = depset(ctx.files._filter),
- executable = ctx.files._filter[0],
- mnemonic = "GoStaticAnalysis",
- progress_message = "Generating %s" % ctx.label,
- arguments = ["-input=%s" % f.path for f in raw_findings] +
- ["-config=%s" % f.path for f in config_srcs] +
- ["-output=%s" % findings.path],
- )
-
- # Build a runner that checks the filtered facts.
- #
- # Note that this calls the filter binary without any configuration, so all
- # findings will be included. But this is expected, since we've already
- # filtered out everything that should not be included.
- runner = ctx.actions.declare_file(ctx.label.name)
- runner_content = [
- "#!/bin/bash",
- "exec %s -input=%s" % (ctx.files._filter[0].short_path, findings.short_path),
- "",
- ]
- ctx.actions.write(runner, "\n".join(runner_content), is_executable = True)
-
- return [DefaultInfo(
- # The runner just executes the filter again, on the
- # newly generated filtered findings. We still need
- # the filter tool as part of our runfiles, however.
- runfiles = ctx.runfiles(files = ctx.files._filter + [findings]),
- executable = runner,
- ), OutputGroupInfo(
- # Propagate the filtered filters, for consumption by
- # build tooling. Note that the build tooling typically
- # pays attention to the mnemoic above, so this must be
- # what is expected by the tooling.
- nogo_findings = depset([findings]),
- # Expose all escape analysis findings (see above).
- nogo_escapes = depset([escapes]),
- )]
-
-nogo_test = rule(
- implementation = _nogo_test_impl,
- attrs = {
- "config": attr.label(
- mandatory = True,
- doc = "A rule of kind nogo_config.",
- ),
- "deps": attr.label_list(
- aspects = [nogo_aspect],
- doc = "Exactly one Go dependency to be analyzed.",
- ),
- "srcs": attr.label_list(
- allow_files = True,
- doc = "Relevant src files. This is ignored except to make the nogo_test directly affected by the files.",
- ),
- "_filter": attr.label(default = "//tools/nogo/filter:filter"),
- },
- test = True,
-)
diff --git a/tools/nogo/filter/BUILD b/tools/nogo/filter/BUILD
deleted file mode 100644
index e56a783e2..000000000
--- a/tools/nogo/filter/BUILD
+++ /dev/null
@@ -1,14 +0,0 @@
-load("//tools:defs.bzl", "go_binary")
-
-package(licenses = ["notice"])
-
-go_binary(
- name = "filter",
- srcs = ["main.go"],
- nogo = False,
- visibility = ["//visibility:public"],
- deps = [
- "//tools/nogo",
- "@in_gopkg_yaml_v2//:go_default_library",
- ],
-)
diff --git a/tools/nogo/filter/main.go b/tools/nogo/filter/main.go
deleted file mode 100644
index 9cf41b3b0..000000000
--- a/tools/nogo/filter/main.go
+++ /dev/null
@@ -1,131 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Binary check is the nogo entrypoint.
-package main
-
-import (
- "flag"
- "fmt"
- "io/ioutil"
- "log"
- "os"
- "strings"
-
- yaml "gopkg.in/yaml.v2"
- "gvisor.dev/gvisor/tools/nogo"
-)
-
-type stringList []string
-
-func (s *stringList) String() string {
- return strings.Join(*s, ",")
-}
-
-func (s *stringList) Set(value string) error {
- *s = append(*s, value)
- return nil
-}
-
-var (
- inputFiles stringList
- configFiles stringList
- outputFile string
- showConfig bool
-)
-
-func init() {
- flag.Var(&inputFiles, "input", "findings input files")
- flag.StringVar(&outputFile, "output", "", "findings output file")
- flag.Var(&configFiles, "config", "findings configuration files")
- flag.BoolVar(&showConfig, "show-config", false, "dump configuration only")
-}
-
-func main() {
- flag.Parse()
-
- // Load all available findings.
- var findings []nogo.Finding
- for _, filename := range inputFiles {
- inputFindings, err := nogo.ExtractFindingsFromFile(filename)
- if err != nil {
- log.Fatalf("unable to extract findings from %s: %v", filename, err)
- }
- findings = append(findings, inputFindings...)
- }
-
- // Open and merge all configuations.
- config := &nogo.Config{
- Global: make(nogo.AnalyzerConfig),
- Analyzers: make(map[nogo.AnalyzerName]nogo.AnalyzerConfig),
- }
- for _, filename := range configFiles {
- content, err := ioutil.ReadFile(filename)
- if err != nil {
- log.Fatalf("unable to read %s: %v", filename, err)
- }
- var newConfig nogo.Config // For current file.
- if err := yaml.Unmarshal(content, &newConfig); err != nil {
- log.Fatalf("unable to decode %s: %v", filename, err)
- }
- config.Merge(&newConfig)
- if showConfig {
- bytes, err := yaml.Marshal(&newConfig)
- if err != nil {
- log.Fatalf("error marshalling config: %v", err)
- }
- mergedBytes, err := yaml.Marshal(config)
- if err != nil {
- log.Fatalf("error marshalling config: %v", err)
- }
- fmt.Fprintf(os.Stdout, "Loaded configuration from %s:\n%s\n", filename, string(bytes))
- fmt.Fprintf(os.Stdout, "Merged configuration:\n%s\n", string(mergedBytes))
- }
- }
- if err := config.Compile(); err != nil {
- log.Fatalf("error compiling config: %v", err)
- }
- if showConfig {
- os.Exit(0)
- }
-
- // Filter the findings (and aggregate by group).
- filteredFindings := make([]nogo.Finding, 0, len(findings))
- for _, finding := range findings {
- if ok := config.ShouldReport(finding); ok {
- filteredFindings = append(filteredFindings, finding)
- }
- }
-
- // Write the output (if required).
- //
- // If the outputFile is specified, then we exit here. Otherwise,
- // we continue to write to stdout and treat like a test.
- if outputFile != "" {
- if err := nogo.WriteFindingsToFile(filteredFindings, outputFile); err != nil {
- log.Fatalf("unable to write findings: %v", err)
- }
- return
- }
-
- // Treat the run as a test.
- if len(filteredFindings) == 0 {
- fmt.Fprintf(os.Stdout, "PASS\n")
- os.Exit(0)
- }
- for _, finding := range filteredFindings {
- fmt.Fprintf(os.Stdout, "%s\n", finding.String())
- }
- os.Exit(1)
-}
diff --git a/tools/nogo/findings.go b/tools/nogo/findings.go
deleted file mode 100644
index 5bd850269..000000000
--- a/tools/nogo/findings.go
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package nogo
-
-import (
- "encoding/json"
- "fmt"
- "go/token"
- "io/ioutil"
-)
-
-// Finding is a single finding.
-type Finding struct {
- Category AnalyzerName
- Position token.Position
- Message string
-}
-
-// String implements fmt.Stringer.String.
-func (f *Finding) String() string {
- return fmt.Sprintf("%s: %s: %s", f.Category, f.Position.String(), f.Message)
-}
-
-// WriteFindingsToFile writes findings to a file.
-func WriteFindingsToFile(findings []Finding, filename string) error {
- content, err := WriteFindingsToBytes(findings)
- if err != nil {
- return err
- }
- return ioutil.WriteFile(filename, content, 0644)
-}
-
-// WriteFindingsToBytes serializes findings as bytes.
-func WriteFindingsToBytes(findings []Finding) ([]byte, error) {
- return json.Marshal(findings)
-}
-
-// ExtractFindingsFromFile loads findings from a file.
-func ExtractFindingsFromFile(filename string) ([]Finding, error) {
- content, err := ioutil.ReadFile(filename)
- if err != nil {
- return nil, err
- }
- return ExtractFindingsFromBytes(content)
-}
-
-// ExtractFindingsFromBytes loads findings from bytes.
-func ExtractFindingsFromBytes(content []byte) (findings []Finding, err error) {
- err = json.Unmarshal(content, &findings)
- return findings, err
-}
diff --git a/tools/nogo/nogo.go b/tools/nogo/nogo.go
deleted file mode 100644
index 779d4d6d8..000000000
--- a/tools/nogo/nogo.go
+++ /dev/null
@@ -1,473 +0,0 @@
-// Copyright 2019 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package nogo implements binary analysis similar to bazel's nogo,
-// or the unitchecker package. It exists in order to provide additional
-// facilities for analysis, namely plumbing through the output from
-// dumping the generated binary (to analyze actual produced code).
-package nogo
-
-import (
- "encoding/json"
- "errors"
- "fmt"
- "go/ast"
- "go/build"
- "go/parser"
- "go/token"
- "go/types"
- "io"
- "io/ioutil"
- "log"
- "os"
- "path"
- "path/filepath"
- "reflect"
- "strings"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/analysis/internal/facts"
- "golang.org/x/tools/go/gcexportdata"
-
- // Special case: flags live here and change overall behavior.
- "gvisor.dev/gvisor/tools/checkescape"
-)
-
-// StdlibConfig is serialized as the configuration.
-//
-// This contains everything required for stdlib analysis.
-type StdlibConfig struct {
- Srcs []string
- GOOS string
- GOARCH string
- Tags []string
-}
-
-// PackageConfig is serialized as the configuration.
-//
-// This contains everything required for single package analysis.
-type PackageConfig struct {
- ImportPath string
- GoFiles []string
- NonGoFiles []string
- Tags []string
- GOOS string
- GOARCH string
- ImportMap map[string]string
- FactMap map[string]string
- StdlibFacts string
-}
-
-// loader is a fact-loader function.
-type loader func(string) ([]byte, error)
-
-// saver is a fact-saver function.
-type saver func([]byte) error
-
-// factLoader returns a function that loads facts.
-//
-// This resolves all standard library facts and imported package facts up
-// front. The returned loader function will never return an error, only
-// empty facts.
-//
-// This is done because all stdlib data is stored together, and we don't want
-// to load this data many times over.
-func (c *PackageConfig) factLoader() (loader, error) {
- allFacts := make(map[string][]byte)
- if c.StdlibFacts != "" {
- data, err := ioutil.ReadFile(c.StdlibFacts)
- if err != nil {
- return nil, fmt.Errorf("error loading stdlib facts from %q: %w", c.StdlibFacts, err)
- }
- var stdlibFacts map[string][]byte
- if err := json.Unmarshal(data, &stdlibFacts); err != nil {
- return nil, fmt.Errorf("error loading stdlib facts: %w", err)
- }
- for pkg, data := range stdlibFacts {
- allFacts[pkg] = data
- }
- }
- for pkg, file := range c.FactMap {
- data, err := ioutil.ReadFile(file)
- if err != nil {
- return nil, fmt.Errorf("error loading %q: %w", file, err)
- }
- allFacts[pkg] = data
- }
- return func(path string) ([]byte, error) {
- return allFacts[path], nil
- }, nil
-}
-
-// shouldInclude indicates whether the file should be included.
-//
-// NOTE: This does only basic parsing of tags.
-func (c *PackageConfig) shouldInclude(path string) (bool, error) {
- ctx := build.Default
- ctx.GOOS = c.GOOS
- ctx.GOARCH = c.GOARCH
- ctx.BuildTags = c.Tags
- return ctx.MatchFile(filepath.Dir(path), filepath.Base(path))
-}
-
-// importer is an implementation of go/types.Importer.
-//
-// This wraps a configuration, which provides the map of package names to
-// files, and the facts. Note that this importer implementation will always
-// pass when a given package is not available.
-type importer struct {
- *PackageConfig
- fset *token.FileSet
- cache map[string]*types.Package
- lastErr error
- callback func(string) error
-}
-
-// Import implements types.Importer.Import.
-func (i *importer) Import(path string) (*types.Package, error) {
- if path == "unsafe" {
- // Special case: go/types has pre-defined type information for
- // unsafe. We ensure that this package is correct, in case any
- // analyzers are specifically looking for this.
- return types.Unsafe, nil
- }
-
- // Call the internal callback. This is used to resolve loading order
- // for the standard library. See checkStdlib.
- if i.callback != nil {
- if err := i.callback(path); err != nil {
- i.lastErr = err
- return nil, err
- }
- }
-
- // Actually load the data.
- realPath, ok := i.ImportMap[path]
- var (
- rc io.ReadCloser
- err error
- )
- if !ok {
- // Not found in the import path. Attempt to find the package
- // via the standard library.
- rc, err = findStdPkg(i.GOOS, i.GOARCH, path)
- } else {
- // Open the file.
- rc, err = os.Open(realPath)
- }
- if err != nil {
- i.lastErr = err
- return nil, err
- }
- defer rc.Close()
-
- // Load all exported data.
- r, err := gcexportdata.NewReader(rc)
- if err != nil {
- return nil, err
- }
-
- return gcexportdata.Read(r, i.fset, i.cache, path)
-}
-
-// ErrSkip indicates the package should be skipped.
-var ErrSkip = errors.New("skipped")
-
-// CheckStdlib checks the standard library.
-//
-// This constructs a synthetic package configuration for each library in the
-// standard library sources, and call CheckPackage repeatedly.
-//
-// Note that not all parts of the source are expected to build. We skip obvious
-// test files, and cmd files, which should not be dependencies.
-func CheckStdlib(config *StdlibConfig, analyzers []*analysis.Analyzer) (allFindings []Finding, facts []byte, err error) {
- if len(config.Srcs) == 0 {
- return nil, nil, nil
- }
-
- // Ensure all paths are normalized.
- for i := 0; i < len(config.Srcs); i++ {
- config.Srcs[i] = path.Clean(config.Srcs[i])
- }
-
- // Calculate the root source directory. This is always a directory
- // named 'src', of which we simply take the first we find. This is a
- // bit fragile, but works for all currently known Go source
- // configurations.
- //
- // Note that there may be extra files outside of the root source
- // directory; we simply ignore those.
- rootSrcPrefix := ""
- for _, file := range config.Srcs {
- const src = "/src/"
- i := strings.Index(file, src)
- if i == -1 {
- // Superfluous file.
- continue
- }
-
- // Index of first character after /src/.
- i += len(src)
- rootSrcPrefix = file[:i]
- break
- }
-
- // Aggregate all files by directory.
- packages := make(map[string]*PackageConfig)
- for _, file := range config.Srcs {
- if !strings.HasPrefix(file, rootSrcPrefix) {
- // Superflouous file.
- continue
- }
-
- d := path.Dir(file)
- if len(rootSrcPrefix) >= len(d) {
- continue // Not a file.
- }
- pkg := d[len(rootSrcPrefix):]
- // Skip cmd packages and obvious test files: see above.
- if strings.HasPrefix(pkg, "cmd/") || strings.HasSuffix(file, "_test.go") {
- continue
- }
- c, ok := packages[pkg]
- if !ok {
- c = &PackageConfig{
- ImportPath: pkg,
- GOOS: config.GOOS,
- GOARCH: config.GOARCH,
- Tags: config.Tags,
- }
- packages[pkg] = c
- }
- // Add the files appropriately. Note that they will be further
- // filtered by architecture and build tags below, so this need
- // not be done immediately.
- if strings.HasSuffix(file, ".go") {
- c.GoFiles = append(c.GoFiles, file)
- } else {
- c.NonGoFiles = append(c.NonGoFiles, file)
- }
- }
-
- // Closure to check a single package.
- stdlibFacts := make(map[string][]byte)
- stdlibErrs := make(map[string]error)
- var checkOne func(pkg string) error // Recursive.
- checkOne = func(pkg string) error {
- // Is this already done?
- if _, ok := stdlibFacts[pkg]; ok {
- return nil
- }
- // Did this fail previously?
- if _, ok := stdlibErrs[pkg]; ok {
- return nil
- }
-
- // Lookup the configuration.
- config, ok := packages[pkg]
- if !ok {
- return nil // Not known.
- }
-
- // Find the binary package, and provide to objdump.
- rc, err := findStdPkg(config.GOOS, config.GOARCH, pkg)
- if err != nil {
- // If there's no binary for this package, it is likely
- // not built with the distribution. That's fine, we can
- // just skip analysis.
- stdlibErrs[pkg] = err
- return nil
- }
-
- // Provide the input.
- oldReader := checkescape.Reader
- checkescape.Reader = rc // For analysis.
- defer func() {
- rc.Close()
- checkescape.Reader = oldReader // Restore.
- }()
-
- // Run the analysis.
- findings, factData, err := CheckPackage(config, analyzers, checkOne)
- if err != nil {
- // If we can't analyze a package from the standard library,
- // then we skip it. It will simply not have any findings.
- stdlibErrs[pkg] = err
- return nil
- }
- stdlibFacts[pkg] = factData
- allFindings = append(allFindings, findings...)
- return nil
- }
-
- // Check all packages.
- //
- // Note that this may call checkOne recursively, so it's not guaranteed
- // to evaluate in the order provided here. We do ensure however, that
- // all packages are evaluated.
- for pkg := range packages {
- if err := checkOne(pkg); err != nil {
- return nil, nil, err
- }
- }
-
- // Sanity check.
- if len(stdlibFacts) == 0 {
- return nil, nil, fmt.Errorf("no stdlib facts found: misconfiguration?")
- }
-
- // Write out all findings.
- factData, err := json.Marshal(stdlibFacts)
- if err != nil {
- return nil, nil, fmt.Errorf("error saving stdlib facts: %w", err)
- }
-
- // Write out all errors.
- for pkg, err := range stdlibErrs {
- log.Printf("WARNING: error while processing %v: %v", pkg, err)
- }
-
- // Return all findings.
- return allFindings, factData, nil
-}
-
-// CheckPackage runs all given analyzers.
-//
-// The implementation was adapted from [1], which was in turn adpated from [2].
-// This returns a list of matching analysis issues, or an error if the analysis
-// could not be completed.
-//
-// [1] bazelbuid/rules_go/tools/builders/nogo_main.go
-// [2] golang.org/x/tools/go/checker/internal/checker
-func CheckPackage(config *PackageConfig, analyzers []*analysis.Analyzer, importCallback func(string) error) (findings []Finding, factData []byte, err error) {
- imp := &importer{
- PackageConfig: config,
- fset: token.NewFileSet(),
- cache: make(map[string]*types.Package),
- callback: importCallback,
- }
-
- // Load all source files.
- var syntax []*ast.File
- for _, file := range config.GoFiles {
- include, err := config.shouldInclude(file)
- if err != nil {
- return nil, nil, fmt.Errorf("error evaluating file %q: %v", file, err)
- }
- if !include {
- continue
- }
- s, err := parser.ParseFile(imp.fset, file, nil, parser.ParseComments)
- if err != nil {
- return nil, nil, fmt.Errorf("error parsing file %q: %v", file, err)
- }
- syntax = append(syntax, s)
- }
-
- // Check type information.
- typesSizes := types.SizesFor("gc", config.GOARCH)
- typeConfig := types.Config{Importer: imp}
- typesInfo := &types.Info{
- Types: make(map[ast.Expr]types.TypeAndValue),
- Uses: make(map[*ast.Ident]types.Object),
- Defs: make(map[*ast.Ident]types.Object),
- Implicits: make(map[ast.Node]types.Object),
- Scopes: make(map[ast.Node]*types.Scope),
- Selections: make(map[*ast.SelectorExpr]*types.Selection),
- }
- types, err := typeConfig.Check(config.ImportPath, imp.fset, syntax, typesInfo)
- if err != nil && imp.lastErr != ErrSkip {
- return nil, nil, fmt.Errorf("error checking types: %w", err)
- }
-
- // Load all package facts.
- loader, err := config.factLoader()
- if err != nil {
- return nil, nil, fmt.Errorf("error loading facts: %w", err)
- }
- facts, err := facts.Decode(types, loader)
- if err != nil {
- return nil, nil, fmt.Errorf("error decoding facts: %w", err)
- }
-
- // Register fact types and establish dependencies between analyzers.
- // The visit closure will execute recursively, and populate results
- // will all required analysis results.
- results := make(map[*analysis.Analyzer]interface{})
- var visit func(*analysis.Analyzer) error // For recursion.
- visit = func(a *analysis.Analyzer) error {
- if _, ok := results[a]; ok {
- return nil
- }
-
- // Run recursively for all dependencies.
- for _, req := range a.Requires {
- if err := visit(req); err != nil {
- return err
- }
- }
-
- // Run the analysis.
- factFilter := make(map[reflect.Type]bool)
- for _, f := range a.FactTypes {
- factFilter[reflect.TypeOf(f)] = true
- }
- p := &analysis.Pass{
- Analyzer: a,
- Fset: imp.fset,
- Files: syntax,
- Pkg: types,
- TypesInfo: typesInfo,
- ResultOf: results, // All results.
- Report: func(d analysis.Diagnostic) {
- findings = append(findings, Finding{
- Category: AnalyzerName(a.Name),
- Position: imp.fset.Position(d.Pos),
- Message: d.Message,
- })
- },
- ImportPackageFact: facts.ImportPackageFact,
- ExportPackageFact: facts.ExportPackageFact,
- ImportObjectFact: facts.ImportObjectFact,
- ExportObjectFact: facts.ExportObjectFact,
- AllPackageFacts: func() []analysis.PackageFact { return facts.AllPackageFacts(factFilter) },
- AllObjectFacts: func() []analysis.ObjectFact { return facts.AllObjectFacts(factFilter) },
- TypesSizes: typesSizes,
- }
- result, err := a.Run(p)
- if err != nil {
- return fmt.Errorf("error running analysis %s: %v", a, err)
- }
-
- // Sanity check & save the result.
- if got, want := reflect.TypeOf(result), a.ResultType; got != want {
- return fmt.Errorf("error: analyzer %s returned a result of type %v, but declared ResultType %v", a, got, want)
- }
- results[a] = result
- return nil // Success.
- }
-
- // Visit all analyzers recursively.
- for _, a := range analyzers {
- if imp.lastErr == ErrSkip {
- continue // No local analysis.
- }
- if err := visit(a); err != nil {
- return nil, nil, err // Already has context.
- }
- }
-
- // Return all findings.
- return findings, facts.Encode(), nil
-}
diff --git a/tools/parsers/BUILD b/tools/parsers/BUILD
deleted file mode 100644
index dab954e25..000000000
--- a/tools/parsers/BUILD
+++ /dev/null
@@ -1,41 +0,0 @@
-load("//tools:defs.bzl", "go_binary", "go_library", "go_test")
-
-package(licenses = ["notice"])
-
-go_test(
- name = "parsers_test",
- size = "small",
- srcs = ["go_parser_test.go"],
- library = ":parsers",
- nogo = False,
- deps = [
- "//tools/bigquery",
- "@com_github_google_go_cmp//cmp:go_default_library",
- ],
-)
-
-go_library(
- name = "parsers",
- testonly = 1,
- srcs = [
- "go_parser.go",
- ],
- nogo = False,
- visibility = ["//:sandbox"],
- deps = [
- "//test/benchmarks/tools",
- "//tools/bigquery",
- ],
-)
-
-go_binary(
- name = "parser",
- testonly = 1,
- srcs = ["parser_main.go"],
- nogo = False,
- deps = [
- ":parsers",
- "//runsc/flag",
- "//tools/bigquery",
- ],
-)
diff --git a/tools/parsers/go_parser.go b/tools/parsers/go_parser.go
deleted file mode 100644
index df4875e6a..000000000
--- a/tools/parsers/go_parser.go
+++ /dev/null
@@ -1,150 +0,0 @@
-// Copyright 2020 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package parsers holds parsers to parse Benchmark test output.
-//
-// Parsers parse Benchmark test output and place it in BigQuery
-// structs for sending to BigQuery databases.
-package parsers
-
-import (
- "fmt"
- "strconv"
- "strings"
-
- "gvisor.dev/gvisor/test/benchmarks/tools"
- "gvisor.dev/gvisor/tools/bigquery"
-)
-
-// ParseOutput expects golang benchmark output and returns a struct formatted
-// for BigQuery.
-func ParseOutput(output string, name string, official bool) (*bigquery.Suite, error) {
- suite := bigquery.NewSuite(name)
- lines := strings.Split(output, "\n")
- for _, line := range lines {
- bm, err := parseLine(line, official)
- if err != nil {
- return nil, fmt.Errorf("failed to parse line '%s': %v", line, err)
- }
- if bm != nil {
- suite.Benchmarks = append(suite.Benchmarks, bm)
- }
- }
- return suite, nil
-}
-
-// parseLine handles parsing a benchmark line into a bigquery.Benchmark.
-//
-// Example: "BenchmarkRuby/server_threads.1-6 1 1397875880 ns/op 140 requests_per_second.QPS"
-//
-// This function will return the following benchmark:
-// *bigquery.Benchmark{
-// Name: BenchmarkRuby
-// []*bigquery.Condition{
-// {Name: GOMAXPROCS, 6}
-// {Name: server_threads, 1}
-// }
-// []*bigquery.Metric{
-// {Name: ns/op, Unit: ns/op, Sample: 1397875880}
-// {Name: requests_per_second, Unit: QPS, Sample: 140 }
-// }
-//}
-func parseLine(line string, official bool) (*bigquery.Benchmark, error) {
- fields := strings.Fields(line)
-
- // Check if this line is a Benchmark line. Otherwise ignore the line.
- if len(fields) < 2 || !strings.HasPrefix(fields[0], "Benchmark") {
- return nil, nil
- }
-
- iters, err := strconv.Atoi(fields[1])
- if err != nil {
- return nil, fmt.Errorf("expecting number of runs, got %s: %v", fields[1], err)
- }
-
- name, params, err := parseNameParams(fields[0])
- if err != nil {
- return nil, fmt.Errorf("parse name/params: %v", err)
- }
-
- bm := bigquery.NewBenchmark(name, iters, official)
- for _, p := range params {
- bm.AddCondition(p.Name, p.Value)
- }
-
- for i := 1; i < len(fields)/2; i++ {
- value := fields[2*i]
- metric := fields[2*i+1]
- if err := makeMetric(bm, value, metric); err != nil {
- return nil, fmt.Errorf("makeMetric on metric %q value: %s: %v", metric, value, err)
- }
- }
- return bm, nil
-}
-
-// parseNameParams parses the Name, GOMAXPROCS, and Params from the test.
-// Field here should be of the format TESTNAME/PARAMS-GOMAXPROCS.
-// Parameters will be separated by a "/" with individual params being
-// "name.value".
-func parseNameParams(field string) (string, []*tools.Parameter, error) {
- var params []*tools.Parameter
- // Remove GOMAXPROCS from end.
- maxIndex := strings.LastIndex(field, "-")
- if maxIndex < 0 {
- return "", nil, fmt.Errorf("GOMAXPROCS not found: %s", field)
- }
- maxProcs := field[maxIndex+1:]
- params = append(params, &tools.Parameter{
- Name: "GOMAXPROCS",
- Value: maxProcs,
- })
-
- remainder := field[0:maxIndex]
- index := strings.Index(remainder, "/")
- if index == -1 {
- return remainder, params, nil
- }
-
- name := remainder[0:index]
- p := remainder[index+1:]
-
- ps, err := tools.NameToParameters(p)
- if err != nil {
- return "", nil, fmt.Errorf("NameToParameters %s: %v", field, err)
- }
- params = append(params, ps...)
- return name, params, nil
-}
-
-// makeMetric parses metrics and adds them to the passed Benchmark.
-func makeMetric(bm *bigquery.Benchmark, value, metric string) error {
- switch metric {
- // Ignore most output from golang benchmarks.
- case "MB/s", "B/op", "allocs/op":
- return nil
- case "ns/op":
- val, err := strconv.ParseFloat(value, 64)
- if err != nil {
- return fmt.Errorf("ParseFloat %s: %v", value, err)
- }
- bm.AddMetric(metric /*metric name*/, metric /*unit*/, val /*sample*/)
- default:
- m, err := tools.ParseCustomMetric(value, metric)
- if err != nil {
- return fmt.Errorf("ParseCustomMetric %s: %v ", metric, err)
- }
- bm.AddMetric(m.Name, m.Unit, m.Sample)
- }
- return nil
-}
diff --git a/tools/parsers/go_parser_test.go b/tools/parsers/go_parser_test.go
deleted file mode 100644
index 0aa1152a2..000000000
--- a/tools/parsers/go_parser_test.go
+++ /dev/null
@@ -1,169 +0,0 @@
-// Copyright 2020 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package parsers
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "gvisor.dev/gvisor/tools/bigquery"
-)
-
-func TestParseLine(t *testing.T) {
- testCases := []struct {
- name string
- data string
- want *bigquery.Benchmark
- }{
- {
- name: "Iperf",
- data: "BenchmarkIperf/Upload-6 1 11094914892 ns/op 4751711232 bandwidth.bytes_per_second",
- want: &bigquery.Benchmark{
- Name: "BenchmarkIperf",
- Condition: []*bigquery.Condition{
- {
- Name: "GOMAXPROCS",
- Value: "6",
- },
- {
- Name: "Upload",
- Value: "Upload",
- },
- },
- Metric: []*bigquery.Metric{
- {
- Name: "ns/op",
- Unit: "ns/op",
- Sample: 11094914892.0,
- },
- {
- Name: "bandwidth",
- Unit: "bytes_per_second",
- Sample: 4751711232.0,
- },
- },
- },
- },
- {
- name: "Ruby",
- data: "BenchmarkRuby/server_threads.1-6 1 1397875880 ns/op 0.00710 average_latency.s 140 requests_per_second.QPS",
- want: &bigquery.Benchmark{
- Name: "BenchmarkRuby",
- Condition: []*bigquery.Condition{
- {
- Name: "GOMAXPROCS",
- Value: "6",
- },
- {
- Name: "server_threads",
- Value: "1",
- },
- },
- Metric: []*bigquery.Metric{
- {
- Name: "ns/op",
- Unit: "ns/op",
- Sample: 1397875880.0,
- },
- {
- Name: "average_latency",
- Unit: "s",
- Sample: 0.00710,
- },
- {
- Name: "requests_per_second",
- Unit: "QPS",
- Sample: 140.0,
- },
- },
- },
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.name, func(t *testing.T) {
- got, err := parseLine(tc.data, false)
- if err != nil {
- t.Fatalf("parseLine failed with: %v", err)
- }
-
- if !cmp.Equal(tc.want, got, nil) {
- for _, c := range got.Condition {
- t.Logf("Cond: %+v", c)
- }
- for _, m := range got.Metric {
- t.Logf("Metric: %+v", m)
- }
- t.Fatalf("Compare failed want: %+v got: %+v", tc.want, got)
- }
- })
-
- }
-}
-
-func TestParseOutput(t *testing.T) {
- testCases := []struct {
- name string
- data string
- numBenchmarks int
- numMetrics int
- numConditions int
- }{
- {
- name: "Startup",
- data: `
- BenchmarkStartupEmpty
- BenchmarkStartupEmpty-6 2 766377884 ns/op 1 allocs/op
- BenchmarkStartupNode
- BenchmarkStartupNode-6 1 1752158409 ns/op 1 allocs/op
- `,
- numBenchmarks: 2,
- numMetrics: 1,
- numConditions: 1,
- },
- {
- name: "Ruby",
- data: `BenchmarkRuby
-BenchmarkRuby/server_threads.1
-BenchmarkRuby/server_threads.1-6 1 1397875880 ns/op 0.00710 average_latency.s 140 requests_per_second.QPS
-BenchmarkRuby/server_threads.5
-BenchmarkRuby/server_threads.5-6 1 1416003331 ns/op 0.00950 average_latency.s 465 requests_per_second.QPS`,
- numBenchmarks: 2,
- numMetrics: 3,
- numConditions: 2,
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.name, func(t *testing.T) {
- suite, err := ParseOutput(tc.data, "", false)
- if err != nil {
- t.Fatalf("parseOutput failed: %v", err)
- } else if len(suite.Benchmarks) != tc.numBenchmarks {
- t.Fatalf("NumBenchmarks failed want: %d got: %d %+v", tc.numBenchmarks, len(suite.Benchmarks), suite.Benchmarks)
- }
-
- for _, bm := range suite.Benchmarks {
- if len(bm.Metric) != tc.numMetrics {
- t.Fatalf("NumMetrics failed want: %d got: %d %+v", tc.numMetrics, len(bm.Metric), bm.Metric)
- }
-
- if len(bm.Condition) != tc.numConditions {
- t.Fatalf("NumConditions failed want: %d got: %d %+v", tc.numConditions, len(bm.Condition), bm.Condition)
- }
- }
- })
- }
-}
diff --git a/tools/parsers/parser_main.go b/tools/parsers/parser_main.go
deleted file mode 100644
index 6c6182464..000000000
--- a/tools/parsers/parser_main.go
+++ /dev/null
@@ -1,129 +0,0 @@
-// Copyright 2020 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Binary parser parses Benchmark data from golang benchmarks,
-// puts it into a Schema for BigQuery, and sends it to BigQuery.
-// parser will also initialize a table with the Benchmarks BigQuery schema.
-package main
-
-import (
- "context"
- "fmt"
- "io/ioutil"
- "os"
-
- "gvisor.dev/gvisor/runsc/flag"
- bq "gvisor.dev/gvisor/tools/bigquery"
- "gvisor.dev/gvisor/tools/parsers"
-)
-
-const (
- initString = "init"
- initDescription = "initializes a new table with benchmarks schema"
- parseString = "parse"
- parseDescription = "parses given benchmarks file and sends it to BigQuery table."
-)
-
-var (
- // The init command will create a new dataset/table in the given project and initialize
- // the table with the schema in //tools/bigquery/bigquery.go. If the table/dataset exists
- // or has been initialized, init has no effect and successfully returns.
- initCmd = flag.NewFlagSet(initString, flag.ContinueOnError)
- initProject = initCmd.String("project", "", "GCP project to send benchmarks.")
- initDataset = initCmd.String("dataset", "", "dataset to send benchmarks data.")
- initTable = initCmd.String("table", "", "table to send benchmarks data.")
-
- // The parse command parses benchmark data in `file` and sends it to the
- // requested table.
- parseCmd = flag.NewFlagSet(parseString, flag.ContinueOnError)
- file = parseCmd.String("file", "", "file to parse for benchmarks")
- name = parseCmd.String("suite_name", "", "name of the benchmark suite")
- clNumber = parseCmd.String("cl", "", "changelist number of this run")
- gitCommit = parseCmd.String("git_commit", "", "git commit sha for this run")
- parseProject = parseCmd.String("project", "", "GCP project to send benchmarks.")
- parseDataset = parseCmd.String("dataset", "", "dataset to send benchmarks data.")
- parseTable = parseCmd.String("table", "", "table to send benchmarks data.")
- official = parseCmd.Bool("official", false, "mark input data as official.")
-)
-
-// initBenchmarks initializes a dataset/table in a BigQuery project.
-func initBenchmarks(ctx context.Context) error {
- return bq.InitBigQuery(ctx, *initProject, *initDataset, *initTable, nil)
-}
-
-// parseBenchmarks parses the given file into the BigQuery schema,
-// adds some custom data for the commit, and sends the data to BigQuery.
-func parseBenchmarks(ctx context.Context) error {
- data, err := ioutil.ReadFile(*file)
- if err != nil {
- return fmt.Errorf("failed to read file: %v", err)
- }
- suite, err := parsers.ParseOutput(string(data), *name, *official)
- if err != nil {
- return fmt.Errorf("failed parse data: %v", err)
- }
- extraConditions := []*bq.Condition{
- {
- Name: "change_list",
- Value: *clNumber,
- },
- {
- Name: "commit",
- Value: *gitCommit,
- },
- }
-
- suite.Conditions = append(suite.Conditions, extraConditions...)
- return bq.SendBenchmarks(ctx, suite, *parseProject, *parseDataset, *parseTable, nil)
-}
-
-func main() {
- ctx := context.Background()
- switch {
- // the "init" command
- case len(os.Args) >= 2 && os.Args[1] == initString:
- if err := initCmd.Parse(os.Args[2:]); err != nil {
- fmt.Fprintf(os.Stderr, "failed parse flags: %v", err)
- os.Exit(1)
- }
- if err := initBenchmarks(ctx); err != nil {
- failure := "failed to initialize project: %s dataset: %s table: %s: %v"
- fmt.Fprintf(os.Stderr, failure, *parseProject, *parseDataset, *parseTable, err)
- os.Exit(1)
- }
- // the "parse" command.
- case len(os.Args) >= 2 && os.Args[1] == parseString:
- if err := parseCmd.Parse(os.Args[2:]); err != nil {
- fmt.Fprintf(os.Stderr, "failed parse flags: %v", err)
- os.Exit(1)
- }
- if err := parseBenchmarks(ctx); err != nil {
- fmt.Fprintf(os.Stderr, "failed parse benchmarks: %v", err)
- os.Exit(1)
- }
- default:
- printUsage()
- }
-}
-
-// printUsage prints the top level usage string.
-func printUsage() {
- usage := `Usage: parser <command> <flags> ...
-
-Available commands:
- %s %s
- %s %s
-`
- fmt.Fprintf(os.Stderr, usage, initCmd.Name(), initDescription, parseCmd.Name(), parseDescription)
-}
diff --git a/tools/rules_go.patch b/tools/rules_go.patch
deleted file mode 100644
index 5e1e87084..000000000
--- a/tools/rules_go.patch
+++ /dev/null
@@ -1,14 +0,0 @@
-diff --git a/go/private/rules/test.bzl b/go/private/rules/test.bzl
-index 17516ad7..76b6c68c 100644
---- a/go/private/rules/test.bzl
-+++ b/go/private/rules/test.bzl
-@@ -121,9 +121,6 @@ def _go_test_impl(ctx):
- )
-
- test_gc_linkopts = gc_linkopts(ctx)
-- if not go.mode.debug:
-- # Disable symbol table and DWARF generation for test binaries.
-- test_gc_linkopts.extend(["-s", "-w"])
-
- # Now compile the test binary itself
- test_library = GoLibrary(
diff --git a/tools/tag_release.sh b/tools/tag_release.sh
deleted file mode 100755
index 50378065e..000000000
--- a/tools/tag_release.sh
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/bin/bash
-
-# Copyright 2019 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This script will optionally map a PiperOrigin-RevId to a given commit,
-# validate a provided release name, create a tag and push it. It must be
-# run manually when a release is created.
-
-set -xeuo pipefail
-
-# Check arguments.
-if [[ "$#" -ne 3 ]]; then
- echo "usage: $0 <commit|revid> <release.rc> <message-file>"
- exit 1
-fi
-
-declare -r target_commit="$1"
-declare -r release="$2"
-declare -r message_file="$3"
-
-if [[ -z "${target_commit}" ]]; then
- echo "error: <commit|revid> is empty."
-fi
-if [[ -z "${release}" ]]; then
- echo "error: <release.rc> is empty."
-fi
-if ! [[ -r "${message_file}" ]]; then
- echo "error: message file '${message_file}' is not readable."
- exit 1
-fi
-
-closest_commit() {
- while read line; do
- if [[ "$line" =~ ^"commit " ]]; then
- current_commit="${line#commit }"
- continue
- elif [[ "$line" =~ "PiperOrigin-RevId: " ]]; then
- revid="${line#PiperOrigin-RevId: }"
- [[ "${revid}" -le "$1" ]] && break
- fi
- done
- echo "${current_commit}"
-}
-
-# Is the passed identifier a sha commit?
-if ! git show "${target_commit}" &> /dev/null; then
- # Extract the commit given a piper ID.
- commit="$(set +o pipefail; \
- git log --first-parent | closest_commit "${target_commit}")"
- declare -r commit
-else
- declare -r commit="${target_commit}"
-fi
-if ! git show "${commit}" &> /dev/null; then
- echo "unknown commit: ${target_commit}"
- exit 1
-fi
-
-# Is the release name sane? Must be a date with patch/rc.
-if ! [[ "${release}" =~ ^20[0-9]{6}\.[0-9]+$ ]]; then
- declare -r expected="$(date +%Y%m%d.0)" # Use today's date.
- echo "unexpected release format: ${release}"
- echo " ... expected like ${expected}"
- exit 1
-fi
-
-# Tag the given commit (annotated, to record the committer). Note that the tag
-# here is applied as a force, in case the tag already exists and is the same.
-# The push will fail in this case (because it is not forced).
-declare -r tag="release-${release}"
-git tag -f -F "${message_file}" -a "${tag}" "${commit}" && \
- git push origin tag "${tag}"
diff --git a/tools/tags/BUILD b/tools/tags/BUILD
deleted file mode 100644
index 1c02e2c89..000000000
--- a/tools/tags/BUILD
+++ /dev/null
@@ -1,11 +0,0 @@
-load("//tools:defs.bzl", "go_library")
-
-package(licenses = ["notice"])
-
-go_library(
- name = "tags",
- srcs = ["tags.go"],
- marshal = False,
- stateify = False,
- visibility = ["//tools:__subpackages__"],
-)
diff --git a/tools/tags/tags.go b/tools/tags/tags.go
deleted file mode 100644
index f35904e0a..000000000
--- a/tools/tags/tags.go
+++ /dev/null
@@ -1,89 +0,0 @@
-// Copyright 2020 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package tags is a utility for parsing build tags.
-package tags
-
-import (
- "fmt"
- "io/ioutil"
- "strings"
-)
-
-// OrSet is a set of tags on a single line.
-//
-// Note that tags may include ",", and we don't distinguish this case in the
-// logic below. Ideally, this constraints can be split into separate top-level
-// build tags in order to resolve any issues.
-type OrSet []string
-
-// Line returns the line for this or.
-func (or OrSet) Line() string {
- return fmt.Sprintf("// +build %s", strings.Join([]string(or), " "))
-}
-
-// AndSet is the set of all OrSets.
-type AndSet []OrSet
-
-// Lines returns the lines to be printed.
-func (and AndSet) Lines() (ls []string) {
- for _, or := range and {
- ls = append(ls, or.Line())
- }
- return
-}
-
-// Join joins this AndSet with another.
-func (and AndSet) Join(other AndSet) AndSet {
- return append(and, other...)
-}
-
-// Tags returns the unique set of +build tags.
-//
-// Derived form the runtime's canBuild.
-func Tags(file string) (tags AndSet) {
- data, err := ioutil.ReadFile(file)
- if err != nil {
- return nil
- }
- // Check file contents for // +build lines.
- for _, p := range strings.Split(string(data), "\n") {
- p = strings.TrimSpace(p)
- if p == "" {
- continue
- }
- if !strings.HasPrefix(p, "//") {
- break
- }
- if !strings.Contains(p, "+build") {
- continue
- }
- fields := strings.Fields(p[2:])
- if len(fields) < 1 || fields[0] != "+build" {
- continue
- }
- tags = append(tags, OrSet(fields[1:]))
- }
- return tags
-}
-
-// Aggregate aggregates all tags from a set of files.
-//
-// Note that these may be in conflict, in which case the build will fail.
-func Aggregate(files []string) (tags AndSet) {
- for _, file := range files {
- tags = tags.Join(Tags(file))
- }
- return tags
-}
diff --git a/tools/vm/BUILD b/tools/vm/BUILD
deleted file mode 100644
index d95ca6c63..000000000
--- a/tools/vm/BUILD
+++ /dev/null
@@ -1,63 +0,0 @@
-load("//tools:defs.bzl", "bzl_library", "cc_binary", "gtest")
-load("//tools/vm:defs.bzl", "vm_image", "vm_test")
-
-package(
- default_visibility = ["//:sandbox"],
- licenses = ["notice"],
-)
-
-sh_binary(
- name = "zone",
- srcs = ["zone.sh"],
-)
-
-sh_binary(
- name = "builder",
- srcs = ["build.sh"],
-)
-
-sh_binary(
- name = "executer",
- srcs = ["execute.sh"],
-)
-
-cc_binary(
- name = "test",
- testonly = 1,
- srcs = ["test.cc"],
- linkstatic = 1,
- deps = [
- gtest,
- "//test/util:test_main",
- ],
-)
-
-vm_image(
- name = "ubuntu1604",
- family = "ubuntu-1604-lts",
- project = "ubuntu-os-cloud",
- scripts = [
- "//tools/vm/ubuntu1604",
- ],
-)
-
-vm_image(
- name = "ubuntu1804",
- family = "ubuntu-1804-lts",
- project = "ubuntu-os-cloud",
- scripts = [
- "//tools/vm/ubuntu1804",
- ],
-)
-
-vm_test(
- name = "vm_test",
- shard_count = 2,
- targets = [":test"],
-)
-
-bzl_library(
- name = "defs_bzl",
- srcs = ["defs.bzl"],
- visibility = ["//visibility:private"],
-)
diff --git a/tools/vm/README.md b/tools/vm/README.md
deleted file mode 100644
index 1e9859e66..000000000
--- a/tools/vm/README.md
+++ /dev/null
@@ -1,48 +0,0 @@
-# VM Images & Tests
-
-All commands in this directory require the `gcloud` project to be set.
-
-For example: `gcloud config set project gvisor-kokoro-testing`.
-
-Images can be generated by using the `vm_image` rule. This rule will generate a
-binary target that builds an image in an idempotent way, and can be referenced
-from other rules.
-
-For example:
-
-```
-vm_image(
- name = "ubuntu",
- project = "ubuntu-1604-lts",
- family = "ubuntu-os-cloud",
- scripts = [
- "script.sh",
- "other.sh",
- ],
-)
-```
-
-These images can be built manually by executing the target. The output on
-`stdout` will be the image id (in the current project).
-
-For example:
-
-```
-$ bazel build :ubuntu
-```
-
-Images are always named per the hash of all the hermetic input scripts. This
-allows images to be memoized quickly and easily.
-
-The `vm_test` rule can be used to execute a command remotely. This is still
-under development however, and will likely change over time.
-
-For example:
-
-```
-vm_test(
- name = "mycommand",
- image = ":ubuntu",
- targets = [":test"],
-)
-```
diff --git a/tools/vm/build.sh b/tools/vm/build.sh
deleted file mode 100755
index 752b2b77b..000000000
--- a/tools/vm/build.sh
+++ /dev/null
@@ -1,117 +0,0 @@
-#!/bin/bash
-
-# Copyright 2019 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This script is responsible for building a new GCP image that: 1) has nested
-# virtualization enabled, and 2) has been completely set up with the
-# image_setup.sh script. This script should be idempotent, as we memoize the
-# setup script with a hash and check for that name.
-
-set -eou pipefail
-
-# Parameters.
-declare -r USERNAME=${USERNAME:-test}
-declare -r IMAGE_PROJECT=${IMAGE_PROJECT:-ubuntu-os-cloud}
-declare -r IMAGE_FAMILY=${IMAGE_FAMILY:-ubuntu-1604-lts}
-declare -r ZONE=${ZONE:-us-central1-f}
-
-# Random names.
-declare -r DISK_NAME=$(mktemp -u disk-XXXXXX | tr A-Z a-z)
-declare -r SNAPSHOT_NAME=$(mktemp -u snapshot-XXXXXX | tr A-Z a-z)
-declare -r INSTANCE_NAME=$(mktemp -u build-XXXXXX | tr A-Z a-z)
-
-# Hash inputs in order to memoize the produced image.
-declare -r SETUP_HASH=$( (echo ${USERNAME} ${IMAGE_PROJECT} ${IMAGE_FAMILY} && cat "$@") | sha256sum - | cut -d' ' -f1 | cut -c 1-16)
-declare -r IMAGE_NAME=${IMAGE_FAMILY:-image}-${SETUP_HASH}
-
-# Does the image already exist? Skip the build.
-declare -r existing=$(set -x; gcloud compute images list --filter="name=(${IMAGE_NAME})" --format="value(name)")
-if ! [[ -z "${existing}" ]]; then
- echo "${existing}"
- exit 0
-fi
-
-# Standard arguments (applies only on script execution).
-declare -ar SSH_ARGS=("-o" "ConnectTimeout=60" "--")
-
-# gcloud has path errors; is this a result of being a genrule?
-export PATH=${PATH:-/bin:/usr/bin:/usr/local/bin}
-
-# Start a unique instance. Note that this instance will have a unique persistent
-# disk as it's boot disk with the same name as the instance.
-(set -x; gcloud compute instances create \
- --quiet \
- --image-project "${IMAGE_PROJECT}" \
- --image-family "${IMAGE_FAMILY}" \
- --boot-disk-size "200GB" \
- --zone "${ZONE}" \
- "${INSTANCE_NAME}" >/dev/null)
-function cleanup {
- (set -x; gcloud compute instances delete --quiet --zone "${ZONE}" "${INSTANCE_NAME}")
-}
-trap cleanup EXIT
-
-# Wait for the instance to become available (up to 5 minutes).
-echo -n "Waiting for ${INSTANCE_NAME}" >&2
-declare timeout=300
-declare success=0
-declare internal=""
-declare -r start=$(date +%s)
-declare -r end=$((${start}+${timeout}))
-while [[ "$(date +%s)" -lt "${end}" ]] && [[ "${success}" -lt 3 ]]; do
- echo -n "." >&2
- if gcloud compute ssh --zone "${ZONE}" "${USERNAME}"@"${INSTANCE_NAME}" -- true 2>/dev/null; then
- success=$((${success}+1))
- elif gcloud compute ssh --internal-ip --zone "${ZONE}" "${USERNAME}"@"${INSTANCE_NAME}" -- true 2>/dev/null; then
- success=$((${success}+1))
- internal="--internal-ip"
- fi
-done
-
-if [[ "${success}" -eq "0" ]]; then
- echo "connect timed out after ${timeout} seconds." >&2
- exit 1
-else
- echo "done." >&2
-fi
-
-# Run the install scripts provided.
-for arg; do
- (set -x; gcloud compute ssh ${internal} \
- --zone "${ZONE}" \
- "${USERNAME}"@"${INSTANCE_NAME}" -- \
- "${SSH_ARGS[@]}" \
- sudo bash - <"${arg}" >/dev/null)
-done
-
-# Stop the instance; required before creating an image.
-(set -x; gcloud compute instances stop --quiet --zone "${ZONE}" "${INSTANCE_NAME}" >/dev/null)
-
-# Create a snapshot of the instance disk.
-(set -x; gcloud compute disks snapshot \
- --quiet \
- --zone "${ZONE}" \
- --snapshot-names="${SNAPSHOT_NAME}" \
- "${INSTANCE_NAME}" >/dev/null)
-
-# Create the disk image.
-(set -x; gcloud compute images create \
- --quiet \
- --source-snapshot="${SNAPSHOT_NAME}" \
- --licenses="https://www.googleapis.com/compute/v1/projects/vm-options/global/licenses/enable-vmx" \
- "${IMAGE_NAME}" >/dev/null)
-
-# Finish up.
-echo "${IMAGE_NAME}"
diff --git a/tools/vm/defs.bzl b/tools/vm/defs.bzl
deleted file mode 100644
index 9af5ad3b4..000000000
--- a/tools/vm/defs.bzl
+++ /dev/null
@@ -1,202 +0,0 @@
-"""Image configuration. See README.md."""
-
-load("//tools:defs.bzl", "default_installer")
-
-# vm_image_builder is a rule that will construct a shell script that actually
-# generates a given VM image. Note that this does not _run_ the shell script
-# (although it can be run manually). It will be run manually during generation
-# of the vm_image target itself. This level of indirection is used so that the
-# build system itself only runs the builder once when multiple targets depend
-# on it, avoiding a set of races and conflicts.
-def _vm_image_builder_impl(ctx):
- # Generate a binary that actually builds the image.
- builder = ctx.actions.declare_file(ctx.label.name)
- script_paths = []
- for script in ctx.files.scripts:
- script_paths.append(script.short_path)
- builder_content = "\n".join([
- "#!/bin/bash",
- "export ZONE=$(%s)" % ctx.files.zone[0].short_path,
- "export USERNAME=%s" % ctx.attr.username,
- "export IMAGE_PROJECT=%s" % ctx.attr.project,
- "export IMAGE_FAMILY=%s" % ctx.attr.family,
- "%s %s" % (ctx.files._builder[0].short_path, " ".join(script_paths)),
- "",
- ])
- ctx.actions.write(builder, builder_content, is_executable = True)
-
- # Note that the scripts should only be files, and should not include any
- # indirect transitive dependencies. The build script wouldn't work.
- return [DefaultInfo(
- executable = builder,
- runfiles = ctx.runfiles(
- files = ctx.files.scripts + ctx.files._builder + ctx.files.zone,
- ),
- )]
-
-vm_image_builder = rule(
- attrs = {
- "_builder": attr.label(
- executable = True,
- default = "//tools/vm:builder",
- cfg = "host",
- ),
- "username": attr.string(default = "$(whoami)"),
- "zone": attr.label(
- executable = True,
- default = "//tools/vm:zone",
- cfg = "host",
- ),
- "family": attr.string(mandatory = True),
- "project": attr.string(mandatory = True),
- "scripts": attr.label_list(allow_files = True),
- },
- executable = True,
- implementation = _vm_image_builder_impl,
-)
-
-# See vm_image_builder above.
-def _vm_image_impl(ctx):
- # Run the builder to generate our output.
- echo = ctx.actions.declare_file(ctx.label.name)
- resolved_inputs, argv, runfiles_manifests = ctx.resolve_command(
- command = "\n".join([
- "set -e",
- "image=$(%s)" % ctx.files.builder[0].path,
- "echo -ne \"#!/bin/bash\\necho ${image}\\n\" > %s" % echo.path,
- "chmod 0755 %s" % echo.path,
- ]),
- tools = [ctx.attr.builder],
- )
- ctx.actions.run_shell(
- tools = resolved_inputs,
- outputs = [echo],
- progress_message = "Building image...",
- execution_requirements = {"local": "true"},
- command = argv,
- input_manifests = runfiles_manifests,
- )
-
- # Return just the echo command. All of the builder runfiles have been
- # resolved and consumed in the generation of the trivial echo script.
- return [DefaultInfo(executable = echo)]
-
-_vm_image_test = rule(
- attrs = {
- "builder": attr.label(
- executable = True,
- cfg = "host",
- ),
- },
- test = True,
- implementation = _vm_image_impl,
-)
-
-def vm_image(name, **kwargs):
- vm_image_builder(
- name = name + "_builder",
- **kwargs
- )
- _vm_image_test(
- name = name,
- builder = ":" + name + "_builder",
- tags = [
- "local",
- "manual",
- ],
- )
-
-def _vm_test_impl(ctx):
- runner = ctx.actions.declare_file("%s-executer" % ctx.label.name)
-
- # Note that the remote execution case must actually generate an
- # intermediate target in order to collect all the relevant runfiles so that
- # they can be copied over for remote execution.
- runner_content = "\n".join([
- "#!/bin/bash",
- "export ZONE=$(%s)" % ctx.files.zone[0].short_path,
- "export USERNAME=%s" % ctx.attr.username,
- "export IMAGE=$(%s)" % ctx.files.image[0].short_path,
- "export SUDO=%s" % "true" if ctx.attr.sudo else "false",
- "%s %s" % (
- ctx.executable.executer.short_path,
- " ".join([
- target.files_to_run.executable.short_path
- for target in ctx.attr.targets
- ]),
- ),
- "",
- ])
- ctx.actions.write(runner, runner_content, is_executable = True)
-
- # Return with all transitive files.
- runfiles = ctx.runfiles(
- transitive_files = depset(transitive = [
- depset(target.data_runfiles.files)
- for target in ctx.attr.targets
- if hasattr(target, "data_runfiles")
- ]),
- files = ctx.files.executer + ctx.files.zone + ctx.files.image +
- ctx.files.targets,
- collect_default = True,
- collect_data = True,
- )
- return [DefaultInfo(executable = runner, runfiles = runfiles)]
-
-_vm_test = rule(
- attrs = {
- "image": attr.label(
- executable = True,
- default = "//tools/vm:ubuntu1804",
- cfg = "host",
- ),
- "executer": attr.label(
- executable = True,
- default = "//tools/vm:executer",
- cfg = "host",
- ),
- "username": attr.string(default = "$(whoami)"),
- "zone": attr.label(
- executable = True,
- default = "//tools/vm:zone",
- cfg = "host",
- ),
- "sudo": attr.bool(default = True),
- "machine": attr.string(default = "n1-standard-1"),
- "targets": attr.label_list(
- mandatory = True,
- allow_empty = False,
- cfg = "target",
- ),
- },
- test = True,
- implementation = _vm_test_impl,
-)
-
-def vm_test(
- installers = None,
- **kwargs):
- """Runs the given targets as a remote test.
-
- Args:
- installer: Script to run before all targets.
- **kwargs: All test arguments. Should include targets and image.
- """
- targets = kwargs.pop("targets", [])
- if installers == None:
- installers = [
- "//tools/installers:head",
- "//tools/installers:images",
- ]
- targets = installers + targets
- if default_installer():
- targets = [default_installer()] + targets
- _vm_test(
- tags = [
- "local",
- "manual",
- ],
- targets = targets,
- local = 1,
- **kwargs
- )
diff --git a/tools/vm/execute.sh b/tools/vm/execute.sh
deleted file mode 100755
index 1f1f3ce01..000000000
--- a/tools/vm/execute.sh
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/bin/bash
-
-# Copyright 2019 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xeo pipefail
-
-# Required input.
-if ! [[ -v IMAGE ]]; then
- echo "no image provided: set IMAGE."
- exit 1
-fi
-
-# Parameters.
-declare -r USERNAME=${USERNAME:-test}
-declare -r KEYNAME=$(mktemp --tmpdir -u key-XXXXXX)
-declare -r SSHKEYS=$(mktemp --tmpdir -u sshkeys-XXXXXX)
-declare -r INSTANCE_NAME=$(mktemp -u test-XXXXXX | tr A-Z a-z)
-declare -r MACHINE=${MACHINE:-n1-standard-1}
-declare -r ZONE=${ZONE:-us-central1-f}
-declare -r SUDO=${SUDO:-false}
-
-# Standard arguments (applies only on script execution).
-declare -ar SSH_ARGS=("-o" "ConnectTimeout=60" "--")
-
-# This script is executed as a test rule, which will reset the value of HOME.
-# Unfortunately, it is needed to load the gconfig credentials. We will reset
-# HOME when we actually execute in the remote environment, defined below.
-export HOME=$(eval echo ~$(whoami))
-
-# Generate unique keys for this test.
-[[ -f "${KEYNAME}" ]] || ssh-keygen -t rsa -N "" -f "${KEYNAME}" -C "${USERNAME}"
-cat > "${SSHKEYS}" <<EOF
-${USERNAME}:$(cat ${KEYNAME}.pub)
-EOF
-
-# Start a unique instance. This means that we first generate a unique set of ssh
-# keys to ensure that only we have access to this instance. Note that we must
-# constrain ourselves to Haswell or greater in order to have nested
-# virtualization available.
-gcloud compute instances create \
- --min-cpu-platform "Intel Haswell" \
- --preemptible \
- --no-scopes \
- --metadata block-project-ssh-keys=TRUE \
- --metadata-from-file ssh-keys="${SSHKEYS}" \
- --machine-type "${MACHINE}" \
- --image "${IMAGE}" \
- --zone "${ZONE}" \
- "${INSTANCE_NAME}"
-function cleanup {
- gcloud compute instances delete --quiet --zone "${ZONE}" "${INSTANCE_NAME}"
-}
-trap cleanup EXIT
-
-# Wait for the instance to become available (up to 5 minutes).
-declare timeout=300
-declare success=0
-declare -r start=$(date +%s)
-declare -r end=$((${start}+${timeout}))
-while [[ "$(date +%s)" -lt "${end}" ]] && [[ "${success}" -lt 3 ]]; do
- if gcloud compute ssh --ssh-key-file="${KEYNAME}" --zone "${ZONE}" "${USERNAME}"@"${INSTANCE_NAME}" -- true 2>/dev/null; then
- success=$((${success}+1))
- fi
-done
-if [[ "${success}" -eq "0" ]]; then
- echo "connect timed out after ${timeout} seconds."
- exit 1
-fi
-
-# Copy the local directory over.
-tar czf - --dereference --exclude=.git . |
- gcloud compute ssh \
- --ssh-key-file="${KEYNAME}" \
- --zone "${ZONE}" \
- "${USERNAME}"@"${INSTANCE_NAME}" -- \
- "${SSH_ARGS[@]}" \
- tar xzf -
-
-# Execute the command remotely.
-for cmd; do
- # Setup relevant environment.
- #
- # N.B. This is not a complete test environment, but is complete enough to
- # provide rudimentary sharding and test output support.
- declare -a PREFIX=( "env" )
- if [[ -v TEST_SHARD_INDEX ]]; then
- PREFIX+=( "TEST_SHARD_INDEX=${TEST_SHARD_INDEX}" )
- fi
- if [[ -v TEST_SHARD_STATUS_FILE ]]; then
- SHARD_STATUS_FILE=$(mktemp -u test-shard-status-XXXXXX)
- PREFIX+=( "TEST_SHARD_STATUS_FILE=/tmp/${SHARD_STATUS_FILE}" )
- fi
- if [[ -v TEST_TOTAL_SHARDS ]]; then
- PREFIX+=( "TEST_TOTAL_SHARDS=${TEST_TOTAL_SHARDS}" )
- fi
- if [[ -v TEST_TMPDIR ]]; then
- REMOTE_TMPDIR=$(mktemp -u test-XXXXXX)
- PREFIX+=( "TEST_TMPDIR=/tmp/${REMOTE_TMPDIR}" )
- # Create remotely.
- gcloud compute ssh \
- --ssh-key-file="${KEYNAME}" \
- --zone "${ZONE}" \
- "${USERNAME}"@"${INSTANCE_NAME}" -- \
- "${SSH_ARGS[@]}" \
- mkdir -p "/tmp/${REMOTE_TMPDIR}"
- fi
- if [[ -v XML_OUTPUT_FILE ]]; then
- TEST_XML_OUTPUT=$(mktemp -u xml-output-XXXXXX)
- PREFIX+=( "XML_OUTPUT_FILE=/tmp/${TEST_XML_OUTPUT}" )
- fi
- if [[ "${SUDO}" == "true" ]]; then
- PREFIX+=( "sudo" "-E" )
- fi
-
- # Execute the command.
- gcloud compute ssh \
- --ssh-key-file="${KEYNAME}" \
- --zone "${ZONE}" \
- "${USERNAME}"@"${INSTANCE_NAME}" -- \
- "${SSH_ARGS[@]}" \
- "${PREFIX[@]}" "${cmd}"
-
- # Collect relevant results.
- if [[ -v TEST_SHARD_STATUS_FILE ]]; then
- gcloud compute scp \
- --ssh-key-file="${KEYNAME}" \
- --zone "${ZONE}" \
- "${USERNAME}"@"${INSTANCE_NAME}":/tmp/"${SHARD_STATUS_FILE}" \
- "${TEST_SHARD_STATUS_FILE}" 2>/dev/null || true # Allowed to fail.
- fi
- if [[ -v XML_OUTPUT_FILE ]]; then
- gcloud compute scp \
- --ssh-key-file="${KEYNAME}" \
- --zone "${ZONE}" \
- "${USERNAME}"@"${INSTANCE_NAME}":/tmp/"${TEST_XML_OUTPUT}" \
- "${XML_OUTPUT_FILE}" 2>/dev/null || true # Allowed to fail.
- fi
-
- # Clean up the temporary directory.
- if [[ -v TEST_TMPDIR ]]; then
- gcloud compute ssh \
- --ssh-key-file="${KEYNAME}" \
- --zone "${ZONE}" \
- "${USERNAME}"@"${INSTANCE_NAME}" -- \
- "${SSH_ARGS[@]}" \
- rm -rf "/tmp/${REMOTE_TMPDIR}"
- fi
-done
diff --git a/tools/vm/test.cc b/tools/vm/test.cc
deleted file mode 100644
index c0ceacda1..000000000
--- a/tools/vm/test.cc
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2020 The gVisor Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "gtest/gtest.h"
-
-namespace {
-
-TEST(Image, Sanity0) {
- // Do nothing (in shard 0).
-}
-
-TEST(Image, Sanity1) {
- // Do nothing (in shard 1).
-}
-
-} // namespace
diff --git a/tools/vm/ubuntu1604/10_core.sh b/tools/vm/ubuntu1604/10_core.sh
deleted file mode 100755
index 629f7cf7a..000000000
--- a/tools/vm/ubuntu1604/10_core.sh
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/bin/bash
-
-# Copyright 2019 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xeo pipefail
-
-# Install all essential build tools.
-while true; do
- if (apt-get update && apt-get install -y \
- make \
- git-core \
- build-essential \
- linux-headers-$(uname -r) \
- pkg-config); then
- break
- fi
- result=$?
- if [[ $result -ne 100 ]]; then
- exit $result
- fi
-done
-
-# Install a recent go toolchain.
-if ! [[ -d /usr/local/go ]]; then
- wget https://dl.google.com/go/go1.13.5.linux-amd64.tar.gz
- tar -xvf go1.13.5.linux-amd64.tar.gz
- mv go /usr/local
-fi
-
-# Link the Go binary from /usr/bin; replacing anything there.
-(cd /usr/bin && rm -f go && ln -fs /usr/local/go/bin/go go)
diff --git a/tools/vm/ubuntu1604/15_gcloud.sh b/tools/vm/ubuntu1604/15_gcloud.sh
deleted file mode 100755
index bc2e5eccc..000000000
--- a/tools/vm/ubuntu1604/15_gcloud.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/bin/bash
-
-# Copyright 2019 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xeo pipefail
-
-# Install all essential build tools.
-while true; do
- if (apt-get update && apt-get install -y \
- apt-transport-https \
- ca-certificates \
- gnupg); then
- break
- fi
- result=$?
- if [[ $result -ne 100 ]]; then
- exit $result
- fi
-done
-
-# Add gcloud repositories.
-echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt cloud-sdk main" | \
- tee -a /etc/apt/sources.list.d/google-cloud-sdk.list
-
-# Add the appropriate key.
-curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | \
- apt-key --keyring /usr/share/keyrings/cloud.google.gpg add -
-
-# Install the gcloud SDK.
-while true; do
- if (apt-get update && apt-get install -y google-cloud-sdk); then
- break
- fi
- result=$?
- if [[ $result -ne 100 ]]; then
- exit $result
- fi
-done
diff --git a/tools/vm/ubuntu1604/20_bazel.sh b/tools/vm/ubuntu1604/20_bazel.sh
deleted file mode 100755
index bb7afa676..000000000
--- a/tools/vm/ubuntu1604/20_bazel.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash
-
-# Copyright 2019 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xeo pipefail
-
-declare -r BAZEL_VERSION=2.0.0
-
-# Install bazel dependencies.
-while true; do
- if (apt-get update && apt-get install -y \
- openjdk-8-jdk-headless \
- unzip); then
- break
- fi
- result=$?
- if [[ $result -ne 100 ]]; then
- exit $result
- fi
-done
-
-# Use the release installer.
-curl -L -o bazel-${BAZEL_VERSION}-installer-linux-x86_64.sh https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/bazel-${BAZEL_VERSION}-installer-linux-x86_64.sh
-chmod a+x bazel-${BAZEL_VERSION}-installer-linux-x86_64.sh
-./bazel-${BAZEL_VERSION}-installer-linux-x86_64.sh
-rm -f bazel-${BAZEL_VERSION}-installer-linux-x86_64.sh
diff --git a/tools/vm/ubuntu1604/30_docker.sh b/tools/vm/ubuntu1604/30_docker.sh
deleted file mode 100755
index d393133e4..000000000
--- a/tools/vm/ubuntu1604/30_docker.sh
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/bin/bash
-
-# Copyright 2019 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Add dependencies.
-while true; do
- if (apt-get update && apt-get install -y \
- apt-transport-https \
- ca-certificates \
- curl \
- gnupg-agent \
- software-properties-common); then
- break
- fi
- result=$?
- if [[ $result -ne 100 ]]; then
- exit $result
- fi
-done
-
-# Install the key.
-curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
-
-# Add the repository.
-add-apt-repository \
- "deb [arch=amd64] https://download.docker.com/linux/ubuntu \
- $(lsb_release -cs) \
- stable"
-
-# Install docker.
-while true; do
- if (apt-get update && apt-get install -y \
- docker-ce \
- docker-ce-cli \
- containerd.io); then
- break
- fi
- result=$?
- if [[ $result -ne 100 ]]; then
- exit $result
- fi
-done
-
-# Enable experimental features, for cross-building aarch64 images.
-# Enable Docker IPv6.
-cat > /etc/docker/daemon.json <<EOF
-{
- "experimental": true,
- "fixed-cidr-v6": "2001:db8:1::/64",
- "ipv6": true
-}
-EOF
diff --git a/tools/vm/ubuntu1604/40_kokoro.sh b/tools/vm/ubuntu1604/40_kokoro.sh
deleted file mode 100755
index d3b96c9ad..000000000
--- a/tools/vm/ubuntu1604/40_kokoro.sh
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/bin/bash
-
-# Copyright 2019 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xeo pipefail
-
-# Declare kokoro's required public keys.
-declare -r ssh_public_keys=(
- "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDg7L/ZaEauETWrPklUTky3kvxqQfe2Ax/2CsSqhNIGNMnK/8d79CHlmY9+dE1FFQ/RzKNCaltgy7XcN/fCYiCZr5jm2ZtnLuGNOTzupMNhaYiPL419qmL+5rZXt4/dWTrsHbFRACxT8j51PcRMO5wgbL0Bg2XXimbx8kDFaurL2gqduQYqlu4lxWCaJqOL71WogcimeL63Nq/yeH5PJPWpqE4P9VUQSwAzBWFK/hLeds/AiP3MgVS65qHBnhq0JsHy8JQsqjZbG7Iidt/Ll0+gqzEbi62gDIcczG4KC0iOVzDDP/1BxDtt1lKeA23ll769Fcm3rJyoBMYxjvdw1TDx sabujp@trigger.mtv.corp.google.com"
- "ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBNgGK/hCdjmulHfRE3hp4rZs38NCR8yAh0eDsztxqGcuXnuSnL7jOlRrbcQpremJ84omD4eKrIpwJUs+YokMdv4= sabujp@trigger.svl.corp.google.com"
-)
-
-# Install dependencies.
-while true; do
- if (apt-get update && apt-get install -y \
- rsync \
- coreutils \
- python-psutil \
- qemu-kvm \
- python-pip \
- python3-pip \
- zip); then
- break
- fi
- result=$?
- if [[ $result -ne 100 ]]; then
- exit $result
- fi
-done
-
-# junitparser is used to merge junit xml files.
-pip install --no-cache-dir junitparser
-
-# We need a kbuilder user, which may already exist.
-useradd -c "kbuilder user" -m -s /bin/bash kbuilder || true
-
-# We need to provision appropriate keys.
-mkdir -p ~kbuilder/.ssh
-(IFS=$'\n'; echo "${ssh_public_keys[*]}") > ~kbuilder/.ssh/authorized_keys
-chmod 0600 ~kbuilder/.ssh/authorized_keys
-chown -R kbuilder ~kbuilder/.ssh
-
-# Give passwordless sudo access.
-cat > /etc/sudoers.d/kokoro <<EOF
-kbuilder ALL=(ALL) NOPASSWD:ALL
-EOF
-
-# Ensure we can run Docker without sudo.
-usermod -aG docker kbuilder
-
-# Ensure that we can access kvm.
-usermod -aG kvm kbuilder
-
-# Ensure that /tmpfs exists and is writable by kokoro.
-#
-# Note that kokoro will typically attach a second disk (sdb) to the instance
-# that is used for the /tmpfs volume. In the future we could setup an init
-# script that formats and mounts this here; however, we don't expect our build
-# artifacts to be that large.
-mkdir -p /tmpfs && chmod 0777 /tmpfs && touch /tmpfs/READY
diff --git a/tools/vm/ubuntu1604/BUILD b/tools/vm/ubuntu1604/BUILD
deleted file mode 100644
index ab1df0c4c..000000000
--- a/tools/vm/ubuntu1604/BUILD
+++ /dev/null
@@ -1,7 +0,0 @@
-package(licenses = ["notice"])
-
-filegroup(
- name = "ubuntu1604",
- srcs = glob(["*.sh"]),
- visibility = ["//:sandbox"],
-)
diff --git a/tools/vm/ubuntu1804/BUILD b/tools/vm/ubuntu1804/BUILD
deleted file mode 100644
index 0c8856dde..000000000
--- a/tools/vm/ubuntu1804/BUILD
+++ /dev/null
@@ -1,7 +0,0 @@
-package(licenses = ["notice"])
-
-alias(
- name = "ubuntu1804",
- actual = "//tools/vm/ubuntu1604",
- visibility = ["//:sandbox"],
-)
diff --git a/tools/vm/zone.sh b/tools/vm/zone.sh
deleted file mode 100755
index 79569fb19..000000000
--- a/tools/vm/zone.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-
-# Copyright 2020 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-exec gcloud config get-value compute/zone
diff --git a/tools/workspace_status.sh b/tools/workspace_status.sh
deleted file mode 100755
index a22c8c9f2..000000000
--- a/tools/workspace_status.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/bash
-
-# Copyright 2018 The gVisor Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# The STABLE_ prefix will trigger a re-link if it changes.
-echo STABLE_VERSION $(git describe --always --tags --abbrev=12 --dirty || echo 0.0.0)