summaryrefslogtreecommitdiffhomepage
path: root/benchmarks/harness
diff options
context:
space:
mode:
Diffstat (limited to 'benchmarks/harness')
-rw-r--r--benchmarks/harness/BUILD184
-rw-r--r--benchmarks/harness/__init__.py36
-rw-r--r--benchmarks/harness/machine.py46
-rw-r--r--benchmarks/harness/machine_producers/BUILD6
-rw-r--r--benchmarks/harness/machine_producers/gcloud_producer.py114
-rw-r--r--benchmarks/harness/ssh_connection.py32
6 files changed, 296 insertions, 122 deletions
diff --git a/benchmarks/harness/BUILD b/benchmarks/harness/BUILD
index 52d4e42f8..48c548d59 100644
--- a/benchmarks/harness/BUILD
+++ b/benchmarks/harness/BUILD
@@ -1,13 +1,33 @@
-load("//tools:defs.bzl", "py_library", "py_requirement")
+load("//tools:defs.bzl", "pkg_tar", "py_library", "py_requirement")
package(
default_visibility = ["//benchmarks:__subpackages__"],
licenses = ["notice"],
)
+pkg_tar(
+ name = "installers",
+ srcs = [
+ "//tools/installers:head",
+ "//tools/installers:master",
+ "//tools/installers:runsc",
+ ],
+ mode = "0755",
+)
+
+filegroup(
+ name = "files",
+ srcs = [
+ ":installers",
+ ],
+)
+
py_library(
name = "harness",
srcs = ["__init__.py"],
+ data = [
+ ":files",
+ ],
)
py_library(
@@ -25,16 +45,43 @@ py_library(
srcs = ["container.py"],
deps = [
"//benchmarks/workloads",
- py_requirement("asn1crypto", False),
- py_requirement("chardet", False),
- py_requirement("certifi", False),
- py_requirement("docker", True),
- py_requirement("docker-pycreds", False),
- py_requirement("idna", False),
- py_requirement("ptyprocess", False),
- py_requirement("requests", False),
- py_requirement("urllib3", False),
- py_requirement("websocket-client", False),
+ py_requirement(
+ "asn1crypto",
+ direct = False,
+ ),
+ py_requirement(
+ "chardet",
+ direct = False,
+ ),
+ py_requirement(
+ "certifi",
+ direct = False,
+ ),
+ py_requirement("docker"),
+ py_requirement(
+ "docker-pycreds",
+ direct = False,
+ ),
+ py_requirement(
+ "idna",
+ direct = False,
+ ),
+ py_requirement(
+ "ptyprocess",
+ direct = False,
+ ),
+ py_requirement(
+ "requests",
+ direct = False,
+ ),
+ py_requirement(
+ "urllib3",
+ direct = False,
+ ),
+ py_requirement(
+ "websocket-client",
+ direct = False,
+ ),
],
)
@@ -47,17 +94,47 @@ py_library(
"//benchmarks/harness:ssh_connection",
"//benchmarks/harness:tunnel_dispatcher",
"//benchmarks/harness/machine_mocks",
- py_requirement("asn1crypto", False),
- py_requirement("chardet", False),
- py_requirement("certifi", False),
- py_requirement("docker", True),
- py_requirement("docker-pycreds", False),
- py_requirement("idna", False),
- py_requirement("ptyprocess", False),
- py_requirement("requests", False),
- py_requirement("six", False),
- py_requirement("urllib3", False),
- py_requirement("websocket-client", False),
+ py_requirement(
+ "asn1crypto",
+ direct = False,
+ ),
+ py_requirement(
+ "chardet",
+ direct = False,
+ ),
+ py_requirement(
+ "certifi",
+ direct = False,
+ ),
+ py_requirement("docker"),
+ py_requirement(
+ "docker-pycreds",
+ direct = False,
+ ),
+ py_requirement(
+ "idna",
+ direct = False,
+ ),
+ py_requirement(
+ "ptyprocess",
+ direct = False,
+ ),
+ py_requirement(
+ "requests",
+ direct = False,
+ ),
+ py_requirement(
+ "six",
+ direct = False,
+ ),
+ py_requirement(
+ "urllib3",
+ direct = False,
+ ),
+ py_requirement(
+ "websocket-client",
+ direct = False,
+ ),
],
)
@@ -66,10 +143,16 @@ py_library(
srcs = ["ssh_connection.py"],
deps = [
"//benchmarks/harness",
- py_requirement("bcrypt", False),
- py_requirement("cffi", True),
- py_requirement("paramiko", True),
- py_requirement("cryptography", False),
+ py_requirement(
+ "bcrypt",
+ direct = False,
+ ),
+ py_requirement("cffi"),
+ py_requirement("paramiko"),
+ py_requirement(
+ "cryptography",
+ direct = False,
+ ),
],
)
@@ -77,16 +160,43 @@ py_library(
name = "tunnel_dispatcher",
srcs = ["tunnel_dispatcher.py"],
deps = [
- py_requirement("asn1crypto", False),
- py_requirement("chardet", False),
- py_requirement("certifi", False),
- py_requirement("docker", True),
- py_requirement("docker-pycreds", False),
- py_requirement("idna", False),
- py_requirement("pexpect", True),
- py_requirement("ptyprocess", False),
- py_requirement("requests", False),
- py_requirement("urllib3", False),
- py_requirement("websocket-client", False),
+ py_requirement(
+ "asn1crypto",
+ direct = False,
+ ),
+ py_requirement(
+ "chardet",
+ direct = False,
+ ),
+ py_requirement(
+ "certifi",
+ direct = False,
+ ),
+ py_requirement("docker"),
+ py_requirement(
+ "docker-pycreds",
+ direct = False,
+ ),
+ py_requirement(
+ "idna",
+ direct = False,
+ ),
+ py_requirement("pexpect"),
+ py_requirement(
+ "ptyprocess",
+ direct = False,
+ ),
+ py_requirement(
+ "requests",
+ direct = False,
+ ),
+ py_requirement(
+ "urllib3",
+ direct = False,
+ ),
+ py_requirement(
+ "websocket-client",
+ direct = False,
+ ),
],
)
diff --git a/benchmarks/harness/__init__.py b/benchmarks/harness/__init__.py
index 61fd25f73..15aa2a69a 100644
--- a/benchmarks/harness/__init__.py
+++ b/benchmarks/harness/__init__.py
@@ -1,5 +1,5 @@
# python3
-# Copyright 2019 Google LLC
+# Copyright 2019 The gVisor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,18 +15,48 @@
import getpass
import os
+import subprocess
+import tempfile
# LOCAL_WORKLOADS_PATH defines the path to use for local workloads. This is a
# format string that accepts a single string parameter.
-LOCAL_WORKLOADS_PATH = os.path.join(
- os.path.dirname(__file__), "../workloads/{}/tar.tar")
+LOCAL_WORKLOADS_PATH = os.path.dirname(__file__) + "/../workloads/{}/tar.tar"
# REMOTE_WORKLOADS_PATH defines the path to use for storing the workloads on the
# remote host. This is a format string that accepts a single string parameter.
REMOTE_WORKLOADS_PATH = "workloads/{}"
+# INSTALLER_ROOT is the set of files that needs to be copied.
+INSTALLER_ARCHIVE = os.readlink(os.path.join(
+ os.path.dirname(__file__), "installers.tar"))
+
+# SSH_KEY_DIR holds SSH_PRIVATE_KEY for this run. bm-tools paramiko requires
+# keys generated with the '-t rsa -m PEM' options from ssh-keygen. This is
+# abstracted away from the user.
+SSH_KEY_DIR = tempfile.TemporaryDirectory()
+SSH_PRIVATE_KEY = "key"
+
# DEFAULT_USER is the default user running this script.
DEFAULT_USER = getpass.getuser()
# DEFAULT_USER_HOME is the home directory of the user running the script.
DEFAULT_USER_HOME = os.environ["HOME"] if "HOME" in os.environ else ""
+
+# Default directory to remotely installer "installer" targets.
+REMOTE_INSTALLERS_PATH = "installers"
+
+
+def make_key():
+ """Wraps a valid ssh key in a temporary directory."""
+ path = os.path.join(SSH_KEY_DIR.name, SSH_PRIVATE_KEY)
+ if not os.path.exists(path):
+ cmd = "ssh-keygen -t rsa -m PEM -b 4096 -f {key} -q -N".format(
+ key=path).split(" ")
+ cmd.append("")
+ subprocess.run(cmd, check=True)
+ return path
+
+
+def delete_key():
+ """Deletes temporary directory containing private key."""
+ SSH_KEY_DIR.cleanup()
diff --git a/benchmarks/harness/machine.py b/benchmarks/harness/machine.py
index 2df4c9e31..5bdc4aa85 100644
--- a/benchmarks/harness/machine.py
+++ b/benchmarks/harness/machine.py
@@ -1,5 +1,5 @@
# python3
-# Copyright 2019 Google LLC
+# Copyright 2019 The gVisor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -29,10 +29,11 @@ to run contianers.
"""
import logging
+import os
import re
import subprocess
import time
-from typing import Tuple
+from typing import List, Tuple
import docker
@@ -42,6 +43,8 @@ from benchmarks.harness import machine_mocks
from benchmarks.harness import ssh_connection
from benchmarks.harness import tunnel_dispatcher
+log = logging.getLogger(__name__)
+
class Machine(object):
"""The machine object is the primary object for benchmarks.
@@ -201,6 +204,7 @@ class RemoteMachine(Machine):
self._tunnel = tunnel_dispatcher.Tunnel(name, **kwargs)
self._tunnel.connect()
self._docker_client = self._tunnel.get_docker_client()
+ self._has_installers = False
def run(self, cmd: str) -> Tuple[str, str]:
return self._ssh_connection.run(cmd)
@@ -210,14 +214,46 @@ class RemoteMachine(Machine):
stdout, stderr = self._ssh_connection.run("cat '{}'".format(path))
return stdout + stderr
+ def install(self,
+ installer: str,
+ results: List[bool] = None,
+ index: int = -1):
+ """Method unique to RemoteMachine to handle installation of installers.
+
+ Handles installers, which install things that may change between runs (e.g.
+ runsc). Usually called from gcloud_producer, which expects this method to
+ to store results.
+
+ Args:
+ installer: the installer target to run.
+ results: Passed by the caller of where to store success.
+ index: Index for this method to store the result in the passed results
+ list.
+ """
+ # This generates a tarball of the full installer root (which will generate
+ # be the full bazel root directory) and sends it over.
+ if not self._has_installers:
+ archive = self._ssh_connection.send_installers()
+ self.run("tar -xvf {archive} -C {dir}".format(
+ archive=archive, dir=harness.REMOTE_INSTALLERS_PATH))
+ self._has_installers = True
+
+ # Execute the remote installer.
+ self.run("sudo {dir}/{file}".format(
+ dir=harness.REMOTE_INSTALLERS_PATH, file=installer))
+
+ if results:
+ results[index] = True
+
def pull(self, workload: str) -> str:
# Push to the remote machine and build.
logging.info("Building %s@%s remotely...", workload, self._name)
remote_path = self._ssh_connection.send_workload(workload)
+ remote_dir = os.path.dirname(remote_path)
# Workloads are all tarballs.
- self.run("tar -xvf {remote_path}/tar.tar -C {remote_path}".format(
- remote_path=remote_path))
- self.run("docker build --tag={} {}".format(workload, remote_path))
+ self.run("tar -xvf {remote_path} -C {remote_dir}".format(
+ remote_path=remote_path, remote_dir=remote_dir))
+ self.run("docker build --tag={} {}".format(workload, remote_dir))
return workload # Workload is the tag.
def container(self, image: str, **kwargs) -> container.Container:
diff --git a/benchmarks/harness/machine_producers/BUILD b/benchmarks/harness/machine_producers/BUILD
index 48ea0ef39..81f19bd08 100644
--- a/benchmarks/harness/machine_producers/BUILD
+++ b/benchmarks/harness/machine_producers/BUILD
@@ -31,7 +31,10 @@ py_library(
deps = [
"//benchmarks/harness:machine",
"//benchmarks/harness/machine_producers:machine_producer",
- py_requirement("PyYAML", False),
+ py_requirement(
+ "PyYAML",
+ direct = False,
+ ),
],
)
@@ -76,5 +79,6 @@ py_test(
python_version = "PY3",
tags = [
"local",
+ "manual",
],
)
diff --git a/benchmarks/harness/machine_producers/gcloud_producer.py b/benchmarks/harness/machine_producers/gcloud_producer.py
index e0b77d52b..513d16e4f 100644
--- a/benchmarks/harness/machine_producers/gcloud_producer.py
+++ b/benchmarks/harness/machine_producers/gcloud_producer.py
@@ -1,5 +1,5 @@
# python3
-# Copyright 2019 Google LLC
+# Copyright 2019 The gVisor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -46,12 +46,11 @@ class GCloudProducer(machine_producer.MachineProducer):
Produces Machine objects backed by GCP instances.
Attributes:
- project: The GCP project name under which to create the machines.
- ssh_key_file: path to a valid ssh private key. See README on vaild ssh keys.
image: image name as a string.
- image_project: image project as a string.
- machine_type: type of GCP to create. e.g. n1-standard-4
zone: string to a valid GCP zone.
+ machine_type: type of GCP to create (e.g. n1-standard-4).
+ installers: list of installers post-boot.
+ ssh_key_file: path to a valid ssh private key. See README on vaild ssh keys.
ssh_user: string of user name for ssh_key
ssh_password: string of password for ssh key
mock: a mock printer which will print mock data if required. Mock data is
@@ -60,21 +59,19 @@ class GCloudProducer(machine_producer.MachineProducer):
"""
def __init__(self,
- project: str,
- ssh_key_file: str,
image: str,
- image_project: str,
- machine_type: str,
zone: str,
+ machine_type: str,
+ installers: List[str],
+ ssh_key_file: str,
ssh_user: str,
ssh_password: str,
mock: gcloud_mock_recorder.MockPrinter = None):
- self.project = project
- self.ssh_key_file = ssh_key_file
self.image = image
- self.image_project = image_project
- self.machine_type = machine_type
self.zone = zone
+ self.machine_type = machine_type
+ self.installers = installers
+ self.ssh_key_file = ssh_key_file
self.ssh_user = ssh_user
self.ssh_password = ssh_password
self.mock = mock
@@ -87,10 +84,34 @@ class GCloudProducer(machine_producer.MachineProducer):
"Cannot ask for {num} machines!".format(num=num_machines))
with self.condition:
names = self._get_unique_names(num_machines)
- self._build_instances(names)
- instances = self._start_command(names)
+ instances = self._build_instances(names)
self._add_ssh_key_to_instances(names)
- return self._machines_from_instances(instances)
+ machines = self._machines_from_instances(instances)
+
+ # Install all bits in lock-step.
+ #
+ # This will perform paralell installations for however many machines we
+ # have, but it's easy to track errors because if installing (a, b, c), we
+ # won't install "c" until "b" is installed on all machines.
+ for installer in self.installers:
+ threads = [None] * len(machines)
+ results = [False] * len(machines)
+ for i in range(len(machines)):
+ threads[i] = threading.Thread(
+ target=machines[i].install, args=(installer, results, i))
+ threads[i].start()
+ for thread in threads:
+ thread.join()
+ for result in results:
+ if not result:
+ raise NotImplementedError(
+ "Installers failed on at least one machine!")
+
+ # Add this user to each machine's docker group.
+ for m in machines:
+ m.run("sudo setfacl -m user:$USER:rw /var/run/docker.sock")
+
+ return machines
def release_machines(self, machine_list: List[machine.Machine]):
"""Releases the requested number of machines, deleting the instances."""
@@ -123,15 +144,7 @@ class GCloudProducer(machine_producer.MachineProducer):
def _get_unique_names(self, num_names) -> List[str]:
"""Returns num_names unique names based on data from the GCP project."""
- curr_machines = self._list_machines()
- curr_names = set([machine["name"] for machine in curr_machines])
- ret = []
- while len(ret) < num_names:
- new_name = "machine-" + str(uuid.uuid4())
- if new_name not in curr_names:
- ret.append(new_name)
- curr_names.update(new_name)
- return ret
+ return ["machine-" + str(uuid.uuid4()) for _ in range(0, num_names)]
def _build_instances(self, names: List[str]) -> List[Dict[str, Any]]:
"""Creates instances using gcloud command.
@@ -151,34 +164,9 @@ class GCloudProducer(machine_producer.MachineProducer):
"_build_instances cannot create instances without names.")
cmd = "gcloud compute instances create".split(" ")
cmd.extend(names)
- cmd.extend(
- "--preemptible --image={image} --zone={zone} --machine-type={machine_type}"
- .format(
- image=self.image, zone=self.zone,
- machine_type=self.machine_type).split(" "))
- if self.image_project:
- cmd.append("--image-project={project}".format(project=self.image_project))
- res = self._run_command(cmd)
- return json.loads(res.stdout)
-
- def _start_command(self, names):
- """Starts instances using gcloud command.
-
- Runs the command `gcloud compute instances start` on list of instances by
- name and returns json data on started instances on success.
-
- Args:
- names: list of names of instances to start.
-
- Returns:
- List of json data describing started machines.
- """
- if not names:
- raise ValueError("_start_command cannot start empty instance list.")
- cmd = "gcloud compute instances start".split(" ")
- cmd.extend(names)
- cmd.append("--zone={zone}".format(zone=self.zone))
- cmd.append("--project={project}".format(project=self.project))
+ cmd.append("--image=" + self.image)
+ cmd.append("--zone=" + self.zone)
+ cmd.append("--machine-type=" + self.machine_type)
res = self._run_command(cmd)
return json.loads(res.stdout)
@@ -186,7 +174,7 @@ class GCloudProducer(machine_producer.MachineProducer):
"""Adds ssh key to instances by calling gcloud ssh command.
Runs the command `gcloud compute ssh instance_name` on list of images by
- name. Tries to ssh into given instance
+ name. Tries to ssh into given instance.
Args:
names: list of machine names to which to add the ssh-key
@@ -202,30 +190,18 @@ class GCloudProducer(machine_producer.MachineProducer):
cmd.append("--ssh-key-file={key}".format(key=self.ssh_key_file))
cmd.append("--zone={zone}".format(zone=self.zone))
cmd.append("--command=uname")
+ cmd.append("--ssh-key-expire-after=60m")
timeout = datetime.timedelta(seconds=5 * 60)
start = datetime.datetime.now()
while datetime.datetime.now() <= timeout + start:
try:
self._run_command(cmd)
break
- except subprocess.CalledProcessError as e:
+ except subprocess.CalledProcessError:
if datetime.datetime.now() > timeout + start:
raise TimeoutError(
"Could not SSH into instance after 5 min: {name}".format(
name=name))
- # 255 is the returncode for ssh connection refused.
- elif e.returncode == 255:
-
- continue
- else:
- raise e
-
- def _list_machines(self) -> List[Dict[str, Any]]:
- """Runs `list` gcloud command and returns list of Machine data."""
- cmd = "gcloud compute instances list --project {project}".format(
- project=self.project).split(" ")
- res = self._run_command(cmd)
- return json.loads(res.stdout)
def _run_command(self,
cmd: List[str],
@@ -261,7 +237,7 @@ class GCloudProducer(machine_producer.MachineProducer):
self.mock.record(res)
if res.returncode != 0:
raise subprocess.CalledProcessError(
- cmd=res.args,
+ cmd=" ".join(res.args),
output=res.stdout,
stderr=res.stderr,
returncode=res.returncode)
diff --git a/benchmarks/harness/ssh_connection.py b/benchmarks/harness/ssh_connection.py
index e0bf258f1..b8c8e42d4 100644
--- a/benchmarks/harness/ssh_connection.py
+++ b/benchmarks/harness/ssh_connection.py
@@ -1,5 +1,5 @@
# python3
-# Copyright 2019 Google LLC
+# Copyright 2019 The gVisor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,7 @@
# limitations under the License.
"""SSHConnection handles the details of SSH connections."""
+import logging
import os
import warnings
@@ -23,19 +24,27 @@ from benchmarks import harness
# Get rid of paramiko Cryptography Warnings.
warnings.filterwarnings(action="ignore", module=".*paramiko.*")
+log = logging.getLogger(__name__)
-def send_one_file(client: paramiko.SSHClient, path: str, remote_dir: str):
+
+def send_one_file(client: paramiko.SSHClient, path: str,
+ remote_dir: str) -> str:
"""Sends a single file via an SSH client.
Args:
client: The existing SSH client.
path: The local path.
remote_dir: The remote directory.
+
+ Returns:
+ :return: The remote path as a string.
"""
filename = path.split("/").pop()
- client.exec_command("mkdir -p " + remote_dir)
+ if remote_dir != ".":
+ client.exec_command("mkdir -p " + remote_dir)
with client.open_sftp() as ftp_client:
ftp_client.put(path, os.path.join(remote_dir, filename))
+ return os.path.join(remote_dir, filename)
class SSHConnection:
@@ -87,10 +96,13 @@ class SSHConnection:
The contents of stdout and stderr.
"""
with self._client() as client:
+ log.info("running command: %s", cmd)
_, stdout, stderr = client.exec_command(command=cmd)
- stdout.channel.recv_exit_status()
+ log.info("returned status: %d", stdout.channel.recv_exit_status())
stdout = stdout.read().decode("utf-8")
stderr = stderr.read().decode("utf-8")
+ log.info("stdout: %s", stdout)
+ log.info("stderr: %s", stderr)
return stdout, stderr
def send_workload(self, name: str) -> str:
@@ -103,6 +115,12 @@ class SSHConnection:
The remote path.
"""
with self._client() as client:
- send_one_file(client, harness.LOCAL_WORKLOADS_PATH.format(name),
- harness.REMOTE_WORKLOADS_PATH.format(name))
- return harness.REMOTE_WORKLOADS_PATH.format(name)
+ return send_one_file(client, harness.LOCAL_WORKLOADS_PATH.format(name),
+ harness.REMOTE_WORKLOADS_PATH.format(name))
+
+ def send_installers(self) -> str:
+ with self._client() as client:
+ return send_one_file(
+ client,
+ path=harness.INSTALLER_ARCHIVE,
+ remote_dir=harness.REMOTE_INSTALLERS_PATH)