summaryrefslogtreecommitdiffhomepage
path: root/benchmarks/harness
diff options
context:
space:
mode:
Diffstat (limited to 'benchmarks/harness')
-rw-r--r--benchmarks/harness/machine.py9
-rw-r--r--benchmarks/harness/machine_producers/gcloud_producer.py28
-rw-r--r--benchmarks/harness/ssh_connection.py9
3 files changed, 30 insertions, 16 deletions
diff --git a/benchmarks/harness/machine.py b/benchmarks/harness/machine.py
index 3d32d3dda..5bdc4aa85 100644
--- a/benchmarks/harness/machine.py
+++ b/benchmarks/harness/machine.py
@@ -43,6 +43,8 @@ from benchmarks.harness import machine_mocks
from benchmarks.harness import ssh_connection
from benchmarks.harness import tunnel_dispatcher
+log = logging.getLogger(__name__)
+
class Machine(object):
"""The machine object is the primary object for benchmarks.
@@ -236,9 +238,10 @@ class RemoteMachine(Machine):
archive=archive, dir=harness.REMOTE_INSTALLERS_PATH))
self._has_installers = True
- # Execute the remote installer.
- self.run("sudo {dir}/{file}".format(
- dir=harness.REMOTE_INSTALLERS_PATH, file=installer))
+ # Execute the remote installer.
+ self.run("sudo {dir}/{file}".format(
+ dir=harness.REMOTE_INSTALLERS_PATH, file=installer))
+
if results:
results[index] = True
diff --git a/benchmarks/harness/machine_producers/gcloud_producer.py b/benchmarks/harness/machine_producers/gcloud_producer.py
index 513d16e4f..44d72f575 100644
--- a/benchmarks/harness/machine_producers/gcloud_producer.py
+++ b/benchmarks/harness/machine_producers/gcloud_producer.py
@@ -53,6 +53,8 @@ class GCloudProducer(machine_producer.MachineProducer):
ssh_key_file: path to a valid ssh private key. See README on vaild ssh keys.
ssh_user: string of user name for ssh_key
ssh_password: string of password for ssh key
+ internal: if true, use internal IPs of instances. Used if bm-tools is
+ running on a GCP vm when a firewall is set for external IPs.
mock: a mock printer which will print mock data if required. Mock data is
recorded output from subprocess calls (returncode, stdout, args).
condition: mutex for this class around machine creation and deleteion.
@@ -66,6 +68,7 @@ class GCloudProducer(machine_producer.MachineProducer):
ssh_key_file: str,
ssh_user: str,
ssh_password: str,
+ internal: bool,
mock: gcloud_mock_recorder.MockPrinter = None):
self.image = image
self.zone = zone
@@ -74,6 +77,7 @@ class GCloudProducer(machine_producer.MachineProducer):
self.ssh_key_file = ssh_key_file
self.ssh_user = ssh_user
self.ssh_password = ssh_password
+ self.internal = internal
self.mock = mock
self.condition = threading.Condition()
@@ -129,15 +133,13 @@ class GCloudProducer(machine_producer.MachineProducer):
machines = []
for instance in instances:
name = instance["name"]
+ external = instance["networkInterfaces"][0]["accessConfigs"][0]["natIP"]
+ internal = instance["networkInterfaces"][0]["networkIP"]
kwargs = {
- "hostname":
- instance["networkInterfaces"][0]["accessConfigs"][0]["natIP"],
- "key_path":
- self.ssh_key_file,
- "username":
- self.ssh_user,
- "key_password":
- self.ssh_password
+ "hostname": internal if self.internal else external,
+ "key_path": self.ssh_key_file,
+ "username": self.ssh_user,
+ "key_password": self.ssh_password
}
machines.append(machine.RemoteMachine(name=name, **kwargs))
return machines
@@ -168,7 +170,9 @@ class GCloudProducer(machine_producer.MachineProducer):
cmd.append("--zone=" + self.zone)
cmd.append("--machine-type=" + self.machine_type)
res = self._run_command(cmd)
- return json.loads(res.stdout)
+ data = res.stdout
+ data = str(data, "utf-8") if isinstance(data, (bytes, bytearray)) else data
+ return json.loads(data)
def _add_ssh_key_to_instances(self, names: List[str]) -> None:
"""Adds ssh key to instances by calling gcloud ssh command.
@@ -186,11 +190,13 @@ class GCloudProducer(machine_producer.MachineProducer):
TimeoutError: when 3 unsuccessful tries to ssh into the host return 255.
"""
for name in names:
- cmd = "gcloud compute ssh {name}".format(name=name).split(" ")
+ cmd = "gcloud compute ssh {user}@{name}".format(
+ user=self.ssh_user, name=name).split(" ")
+ if self.internal:
+ cmd.append("--internal-ip")
cmd.append("--ssh-key-file={key}".format(key=self.ssh_key_file))
cmd.append("--zone={zone}".format(zone=self.zone))
cmd.append("--command=uname")
- cmd.append("--ssh-key-expire-after=60m")
timeout = datetime.timedelta(seconds=5 * 60)
start = datetime.datetime.now()
while datetime.datetime.now() <= timeout + start:
diff --git a/benchmarks/harness/ssh_connection.py b/benchmarks/harness/ssh_connection.py
index a50e34293..b8c8e42d4 100644
--- a/benchmarks/harness/ssh_connection.py
+++ b/benchmarks/harness/ssh_connection.py
@@ -13,7 +13,7 @@
# limitations under the License.
"""SSHConnection handles the details of SSH connections."""
-
+import logging
import os
import warnings
@@ -24,6 +24,8 @@ from benchmarks import harness
# Get rid of paramiko Cryptography Warnings.
warnings.filterwarnings(action="ignore", module=".*paramiko.*")
+log = logging.getLogger(__name__)
+
def send_one_file(client: paramiko.SSHClient, path: str,
remote_dir: str) -> str:
@@ -94,10 +96,13 @@ class SSHConnection:
The contents of stdout and stderr.
"""
with self._client() as client:
+ log.info("running command: %s", cmd)
_, stdout, stderr = client.exec_command(command=cmd)
- stdout.channel.recv_exit_status()
+ log.info("returned status: %d", stdout.channel.recv_exit_status())
stdout = stdout.read().decode("utf-8")
stderr = stderr.read().decode("utf-8")
+ log.info("stdout: %s", stdout)
+ log.info("stderr: %s", stderr)
return stdout, stderr
def send_workload(self, name: str) -> str: