# iperf3 scenario
# iperf3 homepage at: http://software.es.net/iperf/
+from __future__ import absolute_import
+from __future__ import print_function
+
import logging
-import json
+
import pkg_resources
+from oslo_serialization import jsonutils
import yardstick.ssh as ssh
from yardstick.benchmark.scenarios import base
LOG = logging.getLogger(__name__)
-LOG.setLevel(logging.DEBUG)
class Iperf(base.Scenario):
"""
__scenario_type__ = "Iperf3"
- def __init__(self, context):
- self.context = context
- self.user = context.get('user', 'ubuntu')
- self.host_ipaddr = context['host']
- self.target_ipaddr = context['target_ipaddr']
- self.key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
- LOG.debug("setup, key %s", self.key_filename)
- LOG.debug("host:%s, user:%s", self.host_ipaddr, self.user)
- self.host = ssh.SSH(self.user, self.host_ipaddr,
- key_filename=self.key_filename)
- self.host.wait(timeout=600)
-
- LOG.debug("target:%s, user:%s", self.target_ipaddr, self.user)
- self.target = ssh.SSH(self.user, self.target_ipaddr,
- key_filename=self.key_filename)
+ host = self.context_cfg['host']
+ host_user = host.get('user', 'ubuntu')
+ host_ssh_port = host.get('ssh_port', ssh.DEFAULT_PORT)
+ host_ip = host.get('ip', None)
+ host_key_filename = host.get('key_filename', '~/.ssh/id_rsa')
+ target = self.context_cfg['target']
+ target_user = target.get('user', 'ubuntu')
+ target_ssh_port = target.get('ssh_port', ssh.DEFAULT_PORT)
+ target_ip = target.get('ip', None)
+ target_key_filename = target.get('key_filename', '~/.ssh/id_rsa')
+
+ LOG.info("user:%s, target:%s", target_user, target_ip)
+ self.target = ssh.SSH(target_user, target_ip,
+ key_filename=target_key_filename,
+ port=target_ssh_port)
self.target.wait(timeout=600)
+ LOG.info("user:%s, host:%s", host_user, host_ip)
+ self.host = ssh.SSH(host_user, host_ip,
+ key_filename=host_key_filename, port=host_ssh_port)
+ self.host.wait(timeout=600)
+
cmd = "iperf3 -s -D"
LOG.debug("Starting iperf3 server with command: %s", cmd)
status, _, stderr = self.target.execute(cmd)
if status:
raise RuntimeError(stderr)
+ self.setup_done = True
+
def teardown(self):
LOG.debug("teardown")
self.host.close()
status, stdout, stderr = self.target.execute("pkill iperf3")
if status:
- LOG.warn(stderr)
+ LOG.warning(stderr)
self.target.close()
- def run(self, args):
+ def run(self, result):
"""execute the benchmark"""
+ if not self.setup_done:
+ self.setup()
# if run by a duration runner, get the duration time and setup as arg
- time = self.context.get('duration', None)
- options = args['options']
+ time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
+ options = self.scenario_cfg['options']
- cmd = "iperf3 -c %s --json" % (self.target_ipaddr)
+ cmd = "iperf3 -c %s --json" % (self.context_cfg['target']['ipaddr'])
+ # If there are no options specified
+ if not options:
+ options = ""
+
+ use_UDP = False
if "udp" in options:
cmd += " --udp"
+ use_UDP = True
+ if "bandwidth" in options:
+ cmd += " --bandwidth %s" % options["bandwidth"]
else:
# tcp obviously
if "nodelay" in options:
# error cause in json dict on stdout
raise RuntimeError(stdout)
- output = json.loads(stdout)
-
- # convert bits per second to bytes per second
- bytes_per_second = \
- int((output["end"]["sum_received"]["bits_per_second"])) / 8
-
- if "sla" in args:
- sla_bytes_per_second = int(args["sla"]["bytes_per_second"])
- assert bytes_per_second >= sla_bytes_per_second, \
- "bytes_per_second %d < sla (%d)" % \
- (bytes_per_second, sla_bytes_per_second)
-
- return output
+ # Note: convert all ints to floats in order to avoid
+ # schema conflicts in influxdb. We probably should add
+ # a format func in the future.
+ result.update(
+ jsonutils.loads(stdout, parse_int=float))
+
+ if "sla" in self.scenario_cfg:
+ sla_iperf = self.scenario_cfg["sla"]
+ if not use_UDP:
+ sla_bytes_per_second = int(sla_iperf["bytes_per_second"])
+
+ # convert bits per second to bytes per second
+ bit_per_second = \
+ int(result["end"]["sum_received"]["bits_per_second"])
+ bytes_per_second = bit_per_second / 8
+ assert bytes_per_second >= sla_bytes_per_second, \
+ "bytes_per_second %d < sla:bytes_per_second (%d); " % \
+ (bytes_per_second, sla_bytes_per_second)
+ else:
+ sla_jitter = float(sla_iperf["jitter"])
+
+ jitter_ms = float(result["end"]["sum"]["jitter_ms"])
+ assert jitter_ms <= sla_jitter, \
+ "jitter_ms %f > sla:jitter %f; " % \
+ (jitter_ms, sla_jitter)
def _test():
- '''internal test function'''
+ """internal test function"""
+ key_filename = pkg_resources.resource_filename('yardstick.resources',
+ 'files/yardstick_key')
+ ctx = {
+ 'host': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename
+ },
+ 'target': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename,
+ 'ipaddr': '10.229.47.137',
+ }
+ }
logger = logging.getLogger('yardstick')
logger.setLevel(logging.DEBUG)
- key_filename = pkg_resources.resource_filename('yardstick.resources',
- 'files/yardstick_key')
- runner_cfg = {}
- runner_cfg['type'] = 'Duration'
- runner_cfg['duration'] = 5
- runner_cfg['host'] = '10.0.2.33'
- runner_cfg['target_ipaddr'] = '10.0.2.53'
- runner_cfg['user'] = 'ubuntu'
- runner_cfg['output_filename'] = "/tmp/yardstick.out"
- runner_cfg['key_filename'] = key_filename
-
- scenario_args = {}
- scenario_args['options'] = {"bytes": 10000000000}
- scenario_args['sla'] = \
- {"bytes_per_second": 2900000000, "action": "monitor"}
-
- from yardstick.benchmark.runners import base as base_runner
- runner = base_runner.Runner.get(runner_cfg)
- runner.run("Iperf3", scenario_args)
- runner.join()
- base_runner.Runner.release(runner)
+ options = {'packetsize': 120}
+ args = {'options': options}
+ result = {}
+
+ p = Iperf(args, ctx)
+ p.run(result)
+ print(result)
+
if __name__ == '__main__':
_test()