summaryrefslogtreecommitdiffhomepage
path: root/benchmarks
diff options
context:
space:
mode:
Diffstat (limited to 'benchmarks')
-rw-r--r--benchmarks/BUILD4
-rw-r--r--benchmarks/README.md26
-rw-r--r--benchmarks/harness/machine_producers/gcloud_producer.py28
-rw-r--r--benchmarks/runner/__init__.py13
-rw-r--r--benchmarks/runner/commands.py16
5 files changed, 52 insertions, 35 deletions
diff --git a/benchmarks/BUILD b/benchmarks/BUILD
index 2a2d15d7e..ac44f479d 100644
--- a/benchmarks/BUILD
+++ b/benchmarks/BUILD
@@ -13,8 +13,8 @@ py_binary(
data = select({
":gcloud_rule": [],
"//conditions:default": [
- "//tools/images:ubuntu1604",
- "//tools/images:zone",
+ "//tools/vm:ubuntu1604",
+ "//tools/vm:zone",
],
}),
main = "run.py",
diff --git a/benchmarks/README.md b/benchmarks/README.md
index 6d1ea3ae2..814bcb220 100644
--- a/benchmarks/README.md
+++ b/benchmarks/README.md
@@ -10,7 +10,7 @@ The scripts assume the following:
(controller) and one or more machines on which docker containers will be run
(environment).
* The controller machine must have bazel installed along with this source
- code. You should be able to run a command like `bazel run :benchmarks --
+ code. You should be able to run a command like `bazel run //benchmarks --
--list`
* Environment machines must have docker and the required runtimes installed.
More specifically, you should be able to run a command like: `docker run
@@ -33,7 +33,7 @@ but it does support GCP workflows. To run locally, run the following from the
benchmarks directory:
```bash
-bazel run --define gcloud=off :benchmarks -- run-local startup
+bazel run --define gcloud=off //benchmarks -- run-local startup
...
method,metric,result
@@ -48,16 +48,20 @@ runtime, runc. Running on another installed runtime, like say runsc, is as
simple as:
```bash
-bazel run --define gcloud=off :benchmarks -- run-local startup --runtime=runsc
+bazel run --define gcloud=off //benchmarks -- run-local startup --runtime=runsc
```
-There is help: `bash bazel run --define gcloud=off :benchmarks -- --help bazel
-run --define gcloud=off :benchmarks -- run-local --help`
+There is help:
+
+```bash
+bazel run --define gcloud=off //benchmarks -- --help
+bazel run --define gcloud=off //benchmarks -- run-local --help
+```
To list available benchmarks, use the `list` commmand:
```bash
-bazel --define gcloud=off run :benchmarks -- list
+bazel --define gcloud=off run //benchmarks -- list
...
Benchmark: sysbench.cpu
@@ -70,7 +74,7 @@ Metrics: events_per_second
You can choose benchmarks by name or regex like:
```bash
-bazel run --define gcloud=off :benchmarks -- run-local startup.node
+bazel run --define gcloud=off //benchmarks -- run-local startup.node
...
metric,result
startup_time_ms,1671.7178000000001
@@ -80,7 +84,7 @@ startup_time_ms,1671.7178000000001
or
```bash
-bazel run --define gcloud=off :benchmarks -- run-local s
+bazel run --define gcloud=off //benchmarks -- run-local s
...
method,metric,result
startup.empty,startup_time_ms,1792.8292
@@ -98,13 +102,13 @@ You can run parameterized benchmarks, for example to run with different
runtimes:
```bash
-bazel run --define gcloud=off :benchmarks -- run-local --runtime=runc --runtime=runsc sysbench.cpu
+bazel run --define gcloud=off //benchmarks -- run-local --runtime=runc --runtime=runsc sysbench.cpu
```
Or with different parameters:
```bash
-bazel run --define gcloud=off :benchmarks -- run-local --max_prime=10 --max_prime=100 sysbench.cpu
+bazel run --define gcloud=off //benchmarks -- run-local --max_prime=10 --max_prime=100 sysbench.cpu
```
### On Google Compute Engine (GCE)
@@ -117,7 +121,7 @@ runtime is installed from the workspace. See the files in `tools/installers` for
supported install targets.
```bash
-bazel run :benchmarks -- run-gcp --installers=head --runtime=runsc sysbench.cpu
+bazel run //benchmarks -- run-gcp --installers=head --runtime=runsc sysbench.cpu
```
When running on GCE, the scripts generate a per run SSH key, which is added to
diff --git a/benchmarks/harness/machine_producers/gcloud_producer.py b/benchmarks/harness/machine_producers/gcloud_producer.py
index 513d16e4f..44d72f575 100644
--- a/benchmarks/harness/machine_producers/gcloud_producer.py
+++ b/benchmarks/harness/machine_producers/gcloud_producer.py
@@ -53,6 +53,8 @@ class GCloudProducer(machine_producer.MachineProducer):
ssh_key_file: path to a valid ssh private key. See README on vaild ssh keys.
ssh_user: string of user name for ssh_key
ssh_password: string of password for ssh key
+ internal: if true, use internal IPs of instances. Used if bm-tools is
+ running on a GCP vm when a firewall is set for external IPs.
mock: a mock printer which will print mock data if required. Mock data is
recorded output from subprocess calls (returncode, stdout, args).
condition: mutex for this class around machine creation and deleteion.
@@ -66,6 +68,7 @@ class GCloudProducer(machine_producer.MachineProducer):
ssh_key_file: str,
ssh_user: str,
ssh_password: str,
+ internal: bool,
mock: gcloud_mock_recorder.MockPrinter = None):
self.image = image
self.zone = zone
@@ -74,6 +77,7 @@ class GCloudProducer(machine_producer.MachineProducer):
self.ssh_key_file = ssh_key_file
self.ssh_user = ssh_user
self.ssh_password = ssh_password
+ self.internal = internal
self.mock = mock
self.condition = threading.Condition()
@@ -129,15 +133,13 @@ class GCloudProducer(machine_producer.MachineProducer):
machines = []
for instance in instances:
name = instance["name"]
+ external = instance["networkInterfaces"][0]["accessConfigs"][0]["natIP"]
+ internal = instance["networkInterfaces"][0]["networkIP"]
kwargs = {
- "hostname":
- instance["networkInterfaces"][0]["accessConfigs"][0]["natIP"],
- "key_path":
- self.ssh_key_file,
- "username":
- self.ssh_user,
- "key_password":
- self.ssh_password
+ "hostname": internal if self.internal else external,
+ "key_path": self.ssh_key_file,
+ "username": self.ssh_user,
+ "key_password": self.ssh_password
}
machines.append(machine.RemoteMachine(name=name, **kwargs))
return machines
@@ -168,7 +170,9 @@ class GCloudProducer(machine_producer.MachineProducer):
cmd.append("--zone=" + self.zone)
cmd.append("--machine-type=" + self.machine_type)
res = self._run_command(cmd)
- return json.loads(res.stdout)
+ data = res.stdout
+ data = str(data, "utf-8") if isinstance(data, (bytes, bytearray)) else data
+ return json.loads(data)
def _add_ssh_key_to_instances(self, names: List[str]) -> None:
"""Adds ssh key to instances by calling gcloud ssh command.
@@ -186,11 +190,13 @@ class GCloudProducer(machine_producer.MachineProducer):
TimeoutError: when 3 unsuccessful tries to ssh into the host return 255.
"""
for name in names:
- cmd = "gcloud compute ssh {name}".format(name=name).split(" ")
+ cmd = "gcloud compute ssh {user}@{name}".format(
+ user=self.ssh_user, name=name).split(" ")
+ if self.internal:
+ cmd.append("--internal-ip")
cmd.append("--ssh-key-file={key}".format(key=self.ssh_key_file))
cmd.append("--zone={zone}".format(zone=self.zone))
cmd.append("--command=uname")
- cmd.append("--ssh-key-expire-after=60m")
timeout = datetime.timedelta(seconds=5 * 60)
start = datetime.datetime.now()
while datetime.datetime.now() <= timeout + start:
diff --git a/benchmarks/runner/__init__.py b/benchmarks/runner/__init__.py
index ba27dc69f..fc59cf505 100644
--- a/benchmarks/runner/__init__.py
+++ b/benchmarks/runner/__init__.py
@@ -19,6 +19,7 @@ import logging
import pkgutil
import pydoc
import re
+import subprocess
import sys
import types
from typing import List
@@ -120,14 +121,13 @@ def run_mock(ctx, **kwargs):
@runner.command("run-gcp", commands.GCPCommand)
@click.pass_context
-def run_gcp(ctx, image_file: str, zone_file: str, machine_type: str,
- installers: List[str], **kwargs):
+def run_gcp(ctx, image_file: str, zone_file: str, internal: bool,
+ machine_type: str, installers: List[str], **kwargs):
"""Runs all benchmarks on GCP instances."""
# Resolve all files.
- image = open(image_file).read().rstrip()
- zone = open(zone_file).read().rstrip()
-
+ image = subprocess.check_output([image_file]).rstrip()
+ zone = subprocess.check_output([zone_file]).rstrip()
key_file = harness.make_key()
producer = gcloud_producer.GCloudProducer(
@@ -137,7 +137,8 @@ def run_gcp(ctx, image_file: str, zone_file: str, machine_type: str,
installers,
ssh_key_file=key_file,
ssh_user=harness.DEFAULT_USER,
- ssh_password="")
+ ssh_password="",
+ internal=internal)
try:
run(ctx, producer, **kwargs)
diff --git a/benchmarks/runner/commands.py b/benchmarks/runner/commands.py
index 0fccb2fad..9a391eb01 100644
--- a/benchmarks/runner/commands.py
+++ b/benchmarks/runner/commands.py
@@ -101,15 +101,20 @@ class GCPCommand(RunCommand):
image_file = click.core.Option(
("--image_file",),
- help="The file containing the image for VMs.",
+ help="The binary that emits the GCP image.",
default=os.path.join(
- os.path.dirname(__file__), "../../tools/images/ubuntu1604.txt"),
+ os.path.dirname(__file__), "../../tools/vm/ubuntu1604"),
)
zone_file = click.core.Option(
("--zone_file",),
- help="The file containing the GCP zone.",
- default=os.path.join(
- os.path.dirname(__file__), "../../tools/images/zone.txt"),
+ help="The binary that emits the GCP zone.",
+ default=os.path.join(os.path.dirname(__file__), "../../tools/vm/zone"),
+ )
+ internal = click.core.Option(
+ ("--internal/--no-internal",),
+ help="""Use instance internal IPs. Used if bm-tools runner is running on
+ GCP instance with firewall rules blocking external IPs.""",
+ default=False,
)
installers = click.core.Option(
("--installers",),
@@ -124,6 +129,7 @@ class GCPCommand(RunCommand):
self.params.extend([
image_file,
zone_file,
+ internal,
machine_type,
installers,
])