1 ##############################################################################
2 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 # bulk data test and req/rsp test are supported
14 import yardstick.ssh as ssh
15 from yardstick.benchmark.scenarios import base
17 LOG = logging.getLogger(__name__)
20 class Netperf(base.Scenario):
21 """Execute netperf between two hosts
24 testname - to specify the test you wish to perform.
25 the valid testnames are TCP_STREAM, TCP_RR, UDP_STREAM, UDP_RR
29 send_msg_size - value set the local send size to value bytes.
33 recv_msg_size - setting the receive size for the remote system.
37 req_rsp_size - set the request and/or response sizes based on sizespec.
41 duration - duration of the test
46 read link below for more netperf args description:
47 http://www.netperf.org/netperf/training/Netperf.html
49 __scenario_type__ = "Netperf"
51 TARGET_SCRIPT = 'netperf_benchmark.bash'
53 def __init__(self, context):
54 self.context = context
55 self.setup_done = False
59 self.target_script = pkg_resources.resource_filename(
60 'yardstick.benchmark.scenarios.networking',
61 Netperf.TARGET_SCRIPT)
62 user = self.context.get('user', 'ubuntu')
63 host = self.context.get('host', None)
64 target = self.context.get('target', None)
65 key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
67 # netserver start automatically during the vm boot
68 LOG.info("user:%s, target:%s", user, target)
69 self.server = ssh.SSH(user, target, key_filename=key_filename)
70 self.server.wait(timeout=600)
72 LOG.info("user:%s, host:%s", user, host)
73 self.client = ssh.SSH(user, host, key_filename=key_filename)
74 self.client.wait(timeout=600)
77 self.client.run("cat > ~/netperf.sh",
78 stdin=open(self.target_script, "rb"))
80 self.setup_done = True
82 def run(self, args, result):
83 """execute the benchmark"""
85 if not self.setup_done:
89 ipaddr = args.get("ipaddr", '127.0.0.1')
90 options = args['options']
91 testname = options.get("testname", 'TCP_STREAM')
92 duration_time = self.context.get("duration", None)
93 arithmetic_time = options.get("duration", None)
95 testlen = duration_time
97 testlen = arithmetic_time
101 cmd_args = "-H %s -l %s -t %s" % (ipaddr, testlen, testname)
103 # get test specific options
104 default_args = "-O 'THROUGHPUT,THROUGHPUT_UNITS,MEAN_LATENCY'"
105 cmd_args += " -- %s" % default_args
106 option_pair_list = [("send_msg_size", "-m"),
107 ("recv_msg_size", "-M"),
108 ("req_rsp_size", "-r")]
109 for option_pair in option_pair_list:
110 if option_pair[0] in options:
111 cmd_args += " %s %s" % (option_pair[1],
112 options[option_pair[0]])
114 cmd = "sudo bash netperf.sh %s" % (cmd_args)
115 LOG.debug("Executing command: %s", cmd)
116 status, stdout, stderr = self.client.execute(cmd)
119 raise RuntimeError(stderr)
121 result.update(json.loads(stdout))
123 if result['mean_latency'] == '':
124 raise RuntimeError(stdout)
127 mean_latency = float(result['mean_latency'])
129 sla_max_mean_latency = int(args["sla"]["mean_latency"])
131 assert mean_latency <= sla_max_mean_latency, \
132 "mean_latency %f > sla_max_mean_latency(%f); " % \
133 (mean_latency, sla_max_mean_latency)
137 '''internal test function'''
138 logger = logging.getLogger('yardstick')
139 logger.setLevel(logging.DEBUG)
141 key_filename = pkg_resources.resource_filename('yardstick.resources',
142 'files/yardstick_key')
144 runner_cfg['type'] = 'Duration'
145 runner_cfg['duration'] = 5
146 runner_cfg['clinet'] = '10.0.2.33'
147 runner_cfg['server'] = '10.0.2.53'
148 runner_cfg['user'] = 'ubuntu'
149 runner_cfg['output_filename'] = "/tmp/yardstick.out"
150 runner_cfg['key_filename'] = key_filename
153 scenario_args['options'] = {"testname": 'TCP_STREAM'}
155 from yardstick.benchmark.runners import base as base_runner
156 runner = base_runner.Runner.get(runner_cfg)
157 runner.run("Netperf", scenario_args)
159 base_runner.Runner.release(runner)
161 if __name__ == '__main__':