X-Git-Url: https://gerrit.opnfv.org/gerrit/gitweb?a=blobdiff_plain;f=yardstick%2Fbenchmark%2Fscenarios%2Fnetworking%2Fiperf3.py;h=a3d273750b43ac9f381406fd39cd0c0d753ea28a;hb=c894c814d62f0c839d381b7370f3d20bf02db0b0;hp=e31a892d2ea606a29fe3b0676c1903b769eca92f;hpb=a1378b700a9234e7fcbf77c7890030b306bc65ea;p=yardstick.git diff --git a/yardstick/benchmark/scenarios/networking/iperf3.py b/yardstick/benchmark/scenarios/networking/iperf3.py index e31a892d2..a3d273750 100644 --- a/yardstick/benchmark/scenarios/networking/iperf3.py +++ b/yardstick/benchmark/scenarios/networking/iperf3.py @@ -10,11 +10,16 @@ # iperf3 scenario # iperf3 homepage at: http://software.es.net/iperf/ +from __future__ import absolute_import +from __future__ import print_function + import logging -import json + import pkg_resources +from oslo_serialization import jsonutils import yardstick.ssh as ssh +from yardstick.common import utils from yardstick.benchmark.scenarios import base LOG = logging.getLogger(__name__) @@ -45,51 +50,64 @@ For more info see http://software.es.net/iperf type: int unit: bytes default: - + length - length of buffer to read or write, + (default 128 KB for TCP, 8 KB for UDP) + type: int + unit: k + default: - + window - set window size / socket buffer size + set TCP windows size. for UDP way to test, this will set to accept UDP + packet buffer size, limit the max size of acceptable data packet. + type: int + unit: k + default: - """ __scenario_type__ = "Iperf3" - def __init__(self, context): - self.context = context - self.user = context.get('user', 'ubuntu') - self.host_ipaddr = context['host'] - self.target_ipaddr = context['target'] - self.key_filename = self.context.get('key_filename', '~/.ssh/id_rsa') + def __init__(self, scenario_cfg, context_cfg): + self.scenario_cfg = scenario_cfg + self.context_cfg = context_cfg self.setup_done = False def setup(self): - LOG.debug("setup, key %s", self.key_filename) - LOG.info("host:%s, user:%s", self.host_ipaddr, self.user) - self.host = ssh.SSH(self.user, self.host_ipaddr, - key_filename=self.key_filename) - self.host.wait(timeout=600) + host = self.context_cfg['host'] + target = self.context_cfg['target'] - LOG.info("target:%s, user:%s", self.target_ipaddr, self.user) - self.target = ssh.SSH(self.user, self.target_ipaddr, - key_filename=self.key_filename) + LOG.info("user:%s, target:%s", target['user'], target['ip']) + self.target = ssh.SSH.from_node(target, defaults={"user": "ubuntu"}) self.target.wait(timeout=600) + LOG.info("user:%s, host:%s", host['user'], host['ip']) + self.host = ssh.SSH.from_node(host, defaults={"user": "ubuntu"}) + self.host.wait(timeout=600) + cmd = "iperf3 -s -D" LOG.debug("Starting iperf3 server with command: %s", cmd) status, _, stderr = self.target.execute(cmd) if status: raise RuntimeError(stderr) + self.setup_done = True + def teardown(self): LOG.debug("teardown") self.host.close() status, stdout, stderr = self.target.execute("pkill iperf3") if status: - LOG.warn(stderr) + LOG.warning(stderr) self.target.close() - def run(self, args): + def run(self, result): """execute the benchmark""" + if not self.setup_done: + self.setup() # if run by a duration runner, get the duration time and setup as arg - time = self.context.get('duration', None) - options = args['options'] + time = self.scenario_cfg["runner"].get("duration", None) \ + if "runner" in self.scenario_cfg else None + options = self.scenario_cfg['options'] - cmd = "iperf3 -c %s --json" % (self.target_ipaddr) + cmd = "iperf3 -c %s --json" % (self.context_cfg['target']['ipaddr']) # If there are no options specified if not options: @@ -115,6 +133,12 @@ For more info see http://software.es.net/iperf elif "blockcount" in options: cmd += " --blockcount %d" % options["blockcount"] + if "length" in options: + cmd += " --length %s" % options["length"] + + if "window" in options: + cmd += " --window %s" % options["window"] + LOG.debug("Executing command: %s", cmd) status, stdout, stderr = self.host.execute(cmd) @@ -122,58 +146,62 @@ For more info see http://software.es.net/iperf # error cause in json dict on stdout raise RuntimeError(stdout) - output = json.loads(stdout) + # Note: convert all ints to floats in order to avoid + # schema conflicts in influxdb. We probably should add + # a format func in the future. + iperf_result = jsonutils.loads(stdout, parse_int=float) + result.update(utils.flatten_dict_key(iperf_result)) - if "sla" in args: - sla_iperf = args["sla"] + if "sla" in self.scenario_cfg: + sla_iperf = self.scenario_cfg["sla"] if not use_UDP: sla_bytes_per_second = int(sla_iperf["bytes_per_second"]) # convert bits per second to bytes per second bit_per_second = \ - int(output["end"]["sum_received"]["bits_per_second"]) + int(iperf_result["end"]["sum_received"]["bits_per_second"]) bytes_per_second = bit_per_second / 8 assert bytes_per_second >= sla_bytes_per_second, \ - "bytes_per_second %d < sla:bytes_per_second (%d)" % \ + "bytes_per_second %d < sla:bytes_per_second (%d); " % \ (bytes_per_second, sla_bytes_per_second) else: sla_jitter = float(sla_iperf["jitter"]) - jitter_ms = float(output["end"]["sum"]["jitter_ms"]) + jitter_ms = float(iperf_result["end"]["sum"]["jitter_ms"]) assert jitter_ms <= sla_jitter, \ - "jitter_ms %f > sla:jitter %f" % \ + "jitter_ms %f > sla:jitter %f; " % \ (jitter_ms, sla_jitter) - return output - def _test(): - '''internal test function''' + """internal test function""" + key_filename = pkg_resources.resource_filename('yardstick.resources', + 'files/yardstick_key') + ctx = { + 'host': { + 'ip': '10.229.47.137', + 'user': 'root', + 'key_filename': key_filename + }, + 'target': { + 'ip': '10.229.47.137', + 'user': 'root', + 'key_filename': key_filename, + 'ipaddr': '10.229.47.137', + } + } logger = logging.getLogger('yardstick') logger.setLevel(logging.DEBUG) - key_filename = pkg_resources.resource_filename('yardstick.resources', - 'files/yardstick_key') - runner_cfg = {} - runner_cfg['type'] = 'Duration' - runner_cfg['duration'] = 5 - runner_cfg['host'] = '10.0.2.33' - runner_cfg['target_ipaddr'] = '10.0.2.53' - runner_cfg['user'] = 'ubuntu' - runner_cfg['output_filename'] = "/tmp/yardstick.out" - runner_cfg['key_filename'] = key_filename - - scenario_args = {} - scenario_args['options'] = {"bytes": 10000000000} - scenario_args['sla'] = \ - {"bytes_per_second": 2900000000, "action": "monitor"} - - from yardstick.benchmark.runners import base as base_runner - runner = base_runner.Runner.get(runner_cfg) - runner.run("Iperf3", scenario_args) - runner.join() - base_runner.Runner.release(runner) + options = {'packetsize': 120} + args = {'options': options} + result = {} + + p = Iperf(args, ctx) + p.run(result) + print(result) + if __name__ == '__main__': _test()