1 ##############################################################################
2 # Copyright (c) 2015 Ericsson AB and others.
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
10 from __future__ import print_function
15 from oslo_serialization import jsonutils
17 import yardstick.ssh as ssh
18 from yardstick.benchmark.scenarios import base
20 LOG = logging.getLogger(__name__)
23 class Perf(base.Scenario):
24 """Execute perf benchmark in a host
27 events - perf tool software, hardware or tracepoint events
30 default: ['task-clock']
31 load - simulate load on the host by doing IO operations
36 For more info about perf and perf events see https://perf.wiki.kernel.org
39 __scenario_type__ = "Perf"
41 TARGET_SCRIPT = 'perf_benchmark.bash'
43 def __init__(self, scenario_cfg, context_cfg):
44 self.scenario_cfg = scenario_cfg
45 self.context_cfg = context_cfg
46 self.setup_done = False
50 self.target_script = pkg_resources.resource_filename(
51 'yardstick.benchmark.scenarios.compute', Perf.TARGET_SCRIPT)
52 host = self.context_cfg['host']
53 user = host.get('user', 'ubuntu')
54 ssh_port = host.get("ssh_port", ssh.DEFAULT_PORT)
55 ip = host.get('ip', None)
56 key_filename = host.get('key_filename', '~/.ssh/id_rsa')
58 LOG.info("user:%s, host:%s", user, ip)
59 self.client = ssh.SSH(user, ip, key_filename=key_filename,
61 self.client.wait(timeout=600)
64 self.client._put_file_shell(self.target_script, '~/perf_benchmark.sh')
66 self.setup_done = True
68 def run(self, result):
69 """execute the benchmark"""
71 if not self.setup_done:
74 options = self.scenario_cfg['options']
75 events = options.get('events', ['task-clock'])
79 events_string += event + " "
81 # if run by a duration runner
82 duration_time = self.scenario_cfg["runner"].get("duration", None) \
83 if "runner" in self.scenario_cfg else None
84 # if run by an arithmetic runner
85 arithmetic_time = options.get("duration", None)
87 duration = duration_time
89 duration = arithmetic_time
94 load = "dd if=/dev/urandom of=/dev/null"
96 load = "sleep %d" % duration
98 cmd = "sudo bash perf_benchmark.sh '%s' %d %s" \
99 % (load, duration, events_string)
101 LOG.debug("Executing command: %s", cmd)
102 status, stdout, stderr = self.client.execute(cmd)
105 raise RuntimeError(stdout)
107 result.update(jsonutils.loads(stdout))
109 if "sla" in self.scenario_cfg:
110 metric = self.scenario_cfg['sla']['metric']
111 exp_val = self.scenario_cfg['sla']['expected_value']
112 smaller_than_exp = 'smaller_than_expected' \
113 in self.scenario_cfg['sla']
115 if metric not in result:
116 assert False, "Metric (%s) not found." % metric
119 assert result[metric] < exp_val, "%s %d >= %d (sla); " \
120 % (metric, result[metric], exp_val)
122 assert result[metric] >= exp_val, "%s %d < %d (sla); " \
123 % (metric, result[metric], exp_val)
127 """internal test function"""
128 key_filename = pkg_resources.resource_filename('yardstick.resources',
129 'files/yardstick_key')
132 'ip': '10.229.47.137',
134 'key_filename': key_filename
138 logger = logging.getLogger('yardstick')
139 logger.setLevel(logging.DEBUG)
141 options = {'load': True}
142 args = {'options': options}
150 if __name__ == '__main__':