1 ##############################################################################
2 # Copyright (c) 2015 Ericsson AB and others.
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
13 import yardstick.ssh as ssh
14 from yardstick.benchmark.scenarios import base
16 LOG = logging.getLogger(__name__)
17 LOG.setLevel(logging.DEBUG)
20 class Perf(base.Scenario):
21 """Execute perf benchmark in a host
24 events - perf tool software, hardware or tracepoint events
27 default: ['task-clock']
28 load - simulate load on the host by doing IO operations
33 For more info about perf and perf events see https://perf.wiki.kernel.org
36 __scenario_type__ = "Perf"
38 TARGET_SCRIPT = 'perf_benchmark.bash'
40 def __init__(self, context):
41 self.context = context
42 self.setup_done = False
46 self.target_script = pkg_resources.resource_filename(
47 'yardstick.benchmark.scenarios.compute', Perf.TARGET_SCRIPT)
48 user = self.context.get('user', 'ubuntu')
49 host = self.context.get('host', None)
50 key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
52 LOG.debug("user:%s, host:%s", user, host)
53 self.client = ssh.SSH(user, host, key_filename=key_filename)
54 self.client.wait(timeout=600)
57 self.client.run("cat > ~/perf_benchmark.sh",
58 stdin=open(self.target_script, "rb"))
60 self.setup_done = True
63 """execute the benchmark"""
65 if not self.setup_done:
68 options = args['options']
69 events = options.get('events', ['task-clock'])
73 events_string += event + " "
75 # if run by a duration runner
76 duration_time = self.context.get("duration", None)
77 # if run by an arithmetic runner
78 arithmetic_time = options.get("duration", None)
80 duration = duration_time
82 duration = arithmetic_time
87 load = "dd if=/dev/urandom of=/dev/null"
89 load = "sleep %d" % duration
91 cmd = "sudo bash perf_benchmark.sh '%s' %d %s" \
92 % (load, duration, events_string)
94 LOG.debug("Executing command: %s", cmd)
95 status, stdout, stderr = self.client.execute(cmd)
98 raise RuntimeError(stdout)
100 output = json.loads(stdout)
103 metric = args['sla']['metric']
104 exp_val = args['sla']['expected_value']
105 smaller_than_exp = 'smaller_than_expected' in args['sla']
107 if metric not in output:
108 assert False, "Metric (%s) not found." % metric
111 assert output[metric] < exp_val, "%s %d >= %d (sla)" \
112 % (metric, output[metric], exp_val)
114 assert output[metric] >= exp_val, "%s %d < %d (sla)" \
115 % (metric, output[metric], exp_val)
120 """internal test function"""
121 key_filename = pkg_resources.resource_filename('yardstick.resources',
122 'files/yardstick_key')
123 ctx = {'host': '172.16.0.137',
125 'key_filename': key_filename
128 logger = logging.getLogger('yardstick')
129 logger.setLevel(logging.DEBUG)
133 options = {'load': True}
134 args = {'options': options}
139 if __name__ == '__main__':