fb54970898dd66465d08c13eadbbfb025bca5d22
[yardstick.git] / yardstick / benchmark / scenarios / networking / netperf.py
1 ##############################################################################
2 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
3 #
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 # bulk data test and req/rsp test are supported
10 import pkg_resources
11 import logging
12 import json
13
14 import yardstick.ssh as ssh
15 from yardstick.benchmark.scenarios import base
16
17 LOG = logging.getLogger(__name__)
18
19
20 class Netperf(base.Scenario):
21     """Execute netperf between two hosts
22
23   Parameters
24     testname - to specify the test you wish to perform.
25     the valid testnames are TCP_STREAM, TCP_RR, UDP_STREAM, UDP_RR
26         type:    string
27         unit:    na
28         default: TCP_STREAM
29     send_msg_size - value set the local send size to value bytes.
30         type:    int
31         unit:    bytes
32         default: na
33     recv_msg_size - setting the receive size for the remote system.
34         type:    int
35         unit:    bytes
36         default: na
37     req_rsp_size - set the request and/or response sizes based on sizespec.
38         type:    string
39         unit:    na
40         default: na
41     duration - duration of the test
42         type:    int
43         unit:    seconds
44         default: 20
45
46     read link below for more netperf args description:
47     http://www.netperf.org/netperf/training/Netperf.html
48     """
49     __scenario_type__ = "Netperf"
50
51     TARGET_SCRIPT = 'netperf_benchmark.bash'
52
53     def __init__(self, context):
54         self.context = context
55         self.setup_done = False
56
57     def setup(self):
58         '''scenario setup'''
59         self.target_script = pkg_resources.resource_filename(
60             'yardstick.benchmark.scenarios.networking',
61             Netperf.TARGET_SCRIPT)
62         user = self.context.get('user', 'ubuntu')
63         host = self.context.get('host', None)
64         target = self.context.get('target', None)
65         key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
66
67         # netserver start automatically during the vm boot
68         LOG.info("user:%s, target:%s", user, target)
69         self.server = ssh.SSH(user, target, key_filename=key_filename)
70         self.server.wait(timeout=600)
71
72         LOG.info("user:%s, host:%s", user, host)
73         self.client = ssh.SSH(user, host, key_filename=key_filename)
74         self.client.wait(timeout=600)
75
76         # copy script to host
77         self.client.run("cat > ~/netperf.sh",
78                         stdin=open(self.target_script, "rb"))
79
80         self.setup_done = True
81
82     def run(self, args, result):
83         """execute the benchmark"""
84
85         if not self.setup_done:
86             self.setup()
87
88         # get global options
89         ipaddr = args.get("ipaddr", '127.0.0.1')
90         options = args['options']
91         testname = options.get("testname", 'TCP_STREAM')
92         duration_time = self.context.get("duration", None)
93         arithmetic_time = options.get("duration", None)
94         if duration_time:
95             testlen = duration_time
96         elif arithmetic_time:
97             testlen = arithmetic_time
98         else:
99             testlen = 20
100
101         cmd_args = "-H %s -l %s -t %s" % (ipaddr, testlen, testname)
102
103         # get test specific options
104         default_args = "-O 'THROUGHPUT,THROUGHPUT_UNITS,MEAN_LATENCY'"
105         cmd_args += " -- %s" % default_args
106         option_pair_list = [("send_msg_size", "-m"),
107                             ("recv_msg_size", "-M"),
108                             ("req_rsp_size", "-r")]
109         for option_pair in option_pair_list:
110             if option_pair[0] in options:
111                 cmd_args += " %s %s" % (option_pair[1],
112                                         options[option_pair[0]])
113
114         cmd = "sudo bash netperf.sh %s" % (cmd_args)
115         LOG.debug("Executing command: %s", cmd)
116         status, stdout, stderr = self.client.execute(cmd)
117
118         if status:
119             raise RuntimeError(stderr)
120
121         result.update(json.loads(stdout))
122
123         if result['mean_latency'] == '':
124             raise RuntimeError(stdout)
125
126         # sla check
127         mean_latency = float(result['mean_latency'])
128         if "sla" in args:
129             sla_max_mean_latency = int(args["sla"]["mean_latency"])
130
131             assert mean_latency <= sla_max_mean_latency, \
132                 "mean_latency %f > sla_max_mean_latency(%f); " % \
133                 (mean_latency, sla_max_mean_latency)
134
135
136 def _test():
137     '''internal test function'''
138     logger = logging.getLogger('yardstick')
139     logger.setLevel(logging.DEBUG)
140
141     key_filename = pkg_resources.resource_filename('yardstick.resources',
142                                                    'files/yardstick_key')
143     runner_cfg = {}
144     runner_cfg['type'] = 'Duration'
145     runner_cfg['duration'] = 5
146     runner_cfg['clinet'] = '10.0.2.33'
147     runner_cfg['server'] = '10.0.2.53'
148     runner_cfg['user'] = 'ubuntu'
149     runner_cfg['output_filename'] = "/tmp/yardstick.out"
150     runner_cfg['key_filename'] = key_filename
151
152     scenario_args = {}
153     scenario_args['options'] = {"testname": 'TCP_STREAM'}
154
155     from yardstick.benchmark.runners import base as base_runner
156     runner = base_runner.Runner.get(runner_cfg)
157     runner.run("Netperf", scenario_args)
158     runner.join()
159     base_runner.Runner.release(runner)
160
161 if __name__ == '__main__':
162     _test()