'private_ip_attr': 'private_ip'}
result = heat.HeatContext._get_server(self.mock_context, attr_name)
- self.assertEqual(result.public_ip, '127.0.0.1')
- self.assertEqual(result.private_ip, '10.0.0.1')
+ self.assertEqual(result['ip'], '127.0.0.1')
+ self.assertEqual(result['private_ip'], '10.0.0.1')
def setUp(self):
self.ctx = {
- "host": "192.168.50.28",
- "user": "root",
- "key_filename": "mykey.key"
+ "host": {
+ "ip": "192.168.50.28",
+ "user": "root",
+ "key_filename": "mykey.key"
+ }
}
def test_cyclictest_successful_setup(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
+ c = cyclictest.Cyclictest({}, self.ctx)
c.setup()
mock_ssh.SSH().execute.return_value = (0, '', '')
def test_cyclictest_successful_no_sla(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
options = {
"affinity": 2,
"interval": 100,
args = {
"options": options,
}
+ c = cyclictest.Cyclictest(args, self.ctx)
result = {}
c.server = mock_ssh.SSH()
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- c.run(args, result)
+ c.run(result)
expected_result = json.loads(sample_output)
self.assertEqual(result, expected_result)
def test_cyclictest_successful_sla(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
options = {
"affinity": 2,
"interval": 100,
"options": options,
"sla": sla
}
+ c = cyclictest.Cyclictest(args, self.ctx)
result = {}
c.server = mock_ssh.SSH()
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- c.run(args, result)
+ c.run(result)
expected_result = json.loads(sample_output)
self.assertEqual(result, expected_result)
def test_cyclictest_unsuccessful_sla_min_latency(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
args = {
"options": {},
"sla": {"max_min_latency": 10}
}
+ c = cyclictest.Cyclictest(args, self.ctx)
result = {}
c.server = mock_ssh.SSH()
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, c.run, args, result)
+ self.assertRaises(AssertionError, c.run, result)
def test_cyclictest_unsuccessful_sla_avg_latency(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
args = {
"options": {},
"sla": {"max_avg_latency": 10}
}
+ c = cyclictest.Cyclictest(args, self.ctx)
result = {}
c.server = mock_ssh.SSH()
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, c.run, args, result)
+ self.assertRaises(AssertionError, c.run, result)
def test_cyclictest_unsuccessful_sla_max_latency(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
args = {
"options": {},
"sla": {"max_max_latency": 10}
}
+ c = cyclictest.Cyclictest(args, self.ctx)
result = {}
c.server = mock_ssh.SSH()
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, c.run, args, result)
+ self.assertRaises(AssertionError, c.run, result)
def test_cyclictest_unsuccessful_script_error(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
options = {
"affinity": 2,
"interval": 100,
"options": options,
"sla": sla
}
+ c = cyclictest.Cyclictest(args, self.ctx)
result = {}
c.server = mock_ssh.SSH()
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, c.run, args, result)
+ self.assertRaises(RuntimeError, c.run, result)
def main():
def setUp(self):
self.ctx = {
- 'host': '172.16.0.137',
- 'target': '172.16.0.138',
- 'user': 'cirros',
- 'key_filename': "mykey.key"
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138',
+ }
}
def test_iperf_successful_setup(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
+ p = iperf3.Iperf({}, self.ctx)
mock_ssh.SSH().execute.return_value = (0, '', '')
p.setup()
def test_iperf_unsuccessful_setup(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
+ p = iperf3.Iperf({}, self.ctx)
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
self.assertRaises(RuntimeError, p.setup)
def test_iperf_successful_teardown(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
+ p = iperf3.Iperf({}, self.ctx)
mock_ssh.SSH().execute.return_value = (0, '', '')
p.host = mock_ssh.SSH()
p.target = mock_ssh.SSH()
def test_iperf_successful_no_sla(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {'options': options}
result = {}
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output(self.output_name_tcp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, expected_result)
def test_iperf_successful_sla(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {
'options': options,
}
result = {}
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output(self.output_name_tcp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, expected_result)
def test_iperf_unsuccessful_sla(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {
'options': options,
}
result = {}
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output(self.output_name_tcp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
def test_iperf_successful_sla_jitter(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {"udp":"udp","bandwidth":"20m"}
args = {
'options': options,
}
result = {}
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output(self.output_name_udp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, expected_result)
def test_iperf_unsuccessful_sla_jitter(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {"udp":"udp","bandwidth":"20m"}
args = {
'options': options,
}
result = {}
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output(self.output_name_udp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
def test_iperf_unsuccessful_script_error(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {'options': options}
result = {}
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p.run, args, result)
+ self.assertRaises(RuntimeError, p.run, result)
def _read_sample_output(self,filename):
curr_path = os.path.dirname(os.path.abspath(__file__))
def setUp(self):
self.ctx = {
- 'host': '172.16.0.137',
- 'target': '172.16.0.138',
- 'user': 'cirros',
- 'key_filename': "mykey.key"
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'cirros',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138'
+ }
}
def test_netperf_successful_setup(self, mock_ssh):
- p = netperf.Netperf(self.ctx)
+ p = netperf.Netperf({}, self.ctx)
mock_ssh.SSH().execute.return_value = (0, '', '')
p.setup()
def test_netperf_successful_no_sla(self, mock_ssh):
- p = netperf.Netperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {'options': options}
result = {}
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output()
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, expected_result)
def test_netperf_successful_sla(self, mock_ssh):
- p = netperf.Netperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {
'options': options,
}
result = {}
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output()
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, expected_result)
def test_netperf_unsuccessful_sla(self, mock_ssh):
- p = netperf.Netperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {
'options': options,
}
result = {}
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output()
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
def test_netperf_unsuccessful_script_error(self, mock_ssh):
- p = netperf.Netperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {'options': options}
result = {}
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p.run, args, result)
+ self.assertRaises(RuntimeError, p.run, result)
def _read_sample_output(self):
curr_path = os.path.dirname(os.path.abspath(__file__))
def setUp(self):
self.ctx = {
- 'host': '172.16.0.137',
- 'user': 'cirros',
- 'key_filename': "mykey.key"
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ },
+ "target": {
+ "ipaddr": "10.229.17.105",
}
+ }
@mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
def test_ping_successful_no_sla(self, mock_ssh):
- p = ping.Ping(self.ctx)
-
args = {
'options': {'packetsize': 200},
- 'ipaddr': '172.16.0.138'
}
result = {}
+ p = ping.Ping(args, self.ctx)
+
mock_ssh.SSH().execute.return_value = (0, '100', '')
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, {'rtt': 100.0})
@mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
def test_ping_successful_sla(self, mock_ssh):
- p = ping.Ping(self.ctx)
-
args = {
'options': {'packetsize': 200},
- 'ipaddr': '172.16.0.138',
'sla': {'max_rtt': 150}
}
result = {}
+ p = ping.Ping(args, self.ctx)
+
mock_ssh.SSH().execute.return_value = (0, '100', '')
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, {'rtt': 100.0})
@mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
def test_ping_unsuccessful_sla(self, mock_ssh):
- p = ping.Ping(self.ctx)
-
args = {
'options': {'packetsize': 200},
- 'ipaddr': '172.16.0.138',
'sla': {'max_rtt': 50}
- }
+ }
result = {}
+ p = ping.Ping(args, self.ctx)
+
mock_ssh.SSH().execute.return_value = (0, '100', '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
@mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
def test_ping_unsuccessful_script_error(self, mock_ssh):
- p = ping.Ping(self.ctx)
-
args = {
'options': {'packetsize': 200},
- 'ipaddr': '172.16.0.138',
'sla': {'max_rtt': 50}
- }
+ }
result = {}
+ p = ping.Ping(args, self.ctx)
+
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p.run, args, result)
+ self.assertRaises(RuntimeError, p.run, result)
def main():
def setUp(self):
self.ctx = {
- 'host': '172.16.0.137',
- 'target': '172.16.0.138',
- 'user': 'cirros',
- 'key_filename': "mykey.key"
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138'
+ }
}
def test_pktgen_successful_setup(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60},
- 'ipaddr': '172.16.0.139'
}
+ p = pktgen.Pktgen(args, self.ctx)
p.setup()
mock_ssh.SSH().execute.return_value = (0, '', '')
def test_pktgen_successful_iptables_setup(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139'
}
+ p = pktgen.Pktgen(args, self.ctx)
p.server = mock_ssh.SSH()
p.number_of_ports = args['options']['number_of_ports']
def test_pktgen_unsuccessful_iptables_setup(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139'
}
+
+ p = pktgen.Pktgen(args, self.ctx)
p.server = mock_ssh.SSH()
p.number_of_ports = args['options']['number_of_ports']
def test_pktgen_successful_iptables_get_result(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139'
}
+
+ p = pktgen.Pktgen(args, self.ctx)
p.server = mock_ssh.SSH()
p.number_of_ports = args['options']['number_of_ports']
def test_pktgen_unsuccessful_iptables_get_result(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139'
}
+
+ p = pktgen.Pktgen(args, self.ctx)
+
p.server = mock_ssh.SSH()
p.number_of_ports = args['options']['number_of_ports']
def test_pktgen_successful_no_sla(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139'
}
result = {}
+ p = pktgen.Pktgen(args, self.ctx)
+
p.server = mock_ssh.SSH()
p.client = mock_ssh.SSH()
"packets_sent": 149776, "flows": 110}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = json.loads(sample_output)
expected_result["packets_received"] = 149300
self.assertEqual(result, expected_result)
def test_pktgen_successful_sla(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139',
'sla': {'max_ppm': 10000}
}
result = {}
+
+ p = pktgen.Pktgen(args, self.ctx)
+
p.server = mock_ssh.SSH()
p.client = mock_ssh.SSH()
"packets_sent": 149776, "flows": 110}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = json.loads(sample_output)
expected_result["packets_received"] = 149300
self.assertEqual(result, expected_result)
def test_pktgen_unsuccessful_sla(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139',
'sla': {'max_ppm': 1000}
}
result = {}
+ p = pktgen.Pktgen(args, self.ctx)
+
p.server = mock_ssh.SSH()
p.client = mock_ssh.SSH()
sample_output = '{"packets_per_second": 9753, "errors": 0, \
"packets_sent": 149776, "flows": 110}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
def test_pktgen_unsuccessful_script_error(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139',
'sla': {'max_ppm': 1000}
}
result = {}
+ p = pktgen.Pktgen(args, self.ctx)
+
p.server = mock_ssh.SSH()
p.client = mock_ssh.SSH()
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p.run, args, result)
+ self.assertRaises(RuntimeError, p.run, result)
def main():
def setUp(self):
self.ctx = {
- 'host': '172.16.0.137',
- 'user': 'cirros',
- 'key_filename': 'mykey.key'
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': 'mykey.key'
+ }
}
self.sample_output = {
'read': 'fio_read_sample_output.json',
def test_fio_successful_setup(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
'ramp_time': 10
}
args = {'options': options}
+ p = fio.Fio(args, self.ctx)
p.setup()
mock_ssh.SSH().execute.return_value = (0, '', '')
def test_fio_successful_no_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
'ramp_time': 10
}
args = {'options': options}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
sample_output = self._read_sample_output(self.sample_output['rw'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
'"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
def test_fio_successful_read_no_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
'ramp_time': 10
}
args = {'options': options}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
sample_output = self._read_sample_output(self.sample_output['read'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = '{"read_bw": 36113, "read_iops": 9028,' \
'"read_lat": 108.7}'
def test_fio_successful_write_no_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
'ramp_time': 10
}
args = {'options': options}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
sample_output = self._read_sample_output(self.sample_output['write'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = '{"write_bw": 35107, "write_iops": 8776,'\
'"write_lat": 111.74}'
def test_fio_successful_lat_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
'options': options,
'sla': {'write_lat': 300.1}
}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
sample_output = self._read_sample_output(self.sample_output['rw'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
'"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
def test_fio_unsuccessful_lat_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
'options': options,
'sla': {'write_lat': 200.1}
}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
sample_output = self._read_sample_output(self.sample_output['rw'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
def test_fio_successful_bw_iops_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
'options': options,
'sla': {'read_iops': 20000}
}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
sample_output = self._read_sample_output(self.sample_output['rw'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
'"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
def test_fio_unsuccessful_bw_iops_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
'options': options,
'sla': {'read_iops': 30000}
}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
sample_output = self._read_sample_output(self.sample_output['rw'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
def test_fio_unsuccessful_script_error(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
'ramp_time': 10
}
args = {'options': options}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p.run, args, result)
+ self.assertRaises(RuntimeError, p.run, result)
def _read_sample_output(self, file_name):
curr_path = os.path.dirname(os.path.abspath(__file__))
@abc.abstractmethod
def _get_server(self, attr_name):
- '''get server object by name from context
+ '''get server info by name from context
'''
@staticmethod
def get_server(attr_name):
- '''lookup server object by name from context
+ '''lookup server info by name from context
attr_name: either a name for a server created by yardstick or a dict
with attribute name mapping when using external heat templates
'''
break
if server is None:
- raise ValueError("context not found for server '%s'" %
- attr_name["name"])
+ raise ValueError("context not found for server '%r'" %
+ attr_name)
return server
##############################################################################
import sys
+import pkg_resources
from yardstick.benchmark.contexts.base import Context
from yardstick.benchmark.contexts.model import Server
print "Context '%s' undeployed" % self.name
def _get_server(self, attr_name):
- '''lookup server object by name from context
+ '''lookup server info by name from context
attr_name: either a name for a server created by yardstick or a dict
with attribute name mapping when using external heat templates
'''
+ key_filename = pkg_resources.resource_filename(
+ 'yardstick.resources', 'files/yardstick_key')
+
if type(attr_name) is dict:
cname = attr_name["name"].split(".")[1]
if cname != self.name:
server = Server(attr_name["name"].split(".")[0], self, {})
server.public_ip = public_ip
server.private_ip = private_ip
- return server
else:
if attr_name not in self._server_map:
return None
- return self._server_map[attr_name]
+ server = self._server_map[attr_name]
+
+ if server is None:
+ return None
+
+ result = {
+ "user": server.context.user,
+ "key_filename": key_filename,
+ "private_ip": server.private_ip
+ }
+ # Target server may only have private_ip
+ if server.public_ip:
+ result["ip"] = server.public_ip
+
+ return result
LOG = logging.getLogger(__name__)
-def _worker_process(queue, cls, method_name, scenario_cfg):
+def _worker_process(queue, cls, method_name, scenario_cfg, context_cfg):
sequence = 1
LOG.info("worker START, step(%s, %d, %d, %d), class %s",
arg_name, start, stop, step, cls)
- benchmark = cls(runner_cfg)
+ benchmark = cls(scenario_cfg, context_cfg)
benchmark.setup()
method = getattr(benchmark, method_name)
queue.put({'runner_id': runner_cfg['runner_id'],
- 'scenario_cfg': scenario_cfg})
+ 'scenario_cfg': scenario_cfg,
+ 'context_cfg': context_cfg})
sla_action = None
if "sla" in scenario_cfg:
errors = ""
try:
- method(scenario_cfg, data)
+ method(data)
except AssertionError as assertion:
# SLA validation failed in scenario, determine what to do now
if sla_action == "assert":
__execution_type__ = 'Arithmetic'
- def _run_benchmark(self, cls, method, scenario_cfg):
+ def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
self.process = multiprocessing.Process(
target=_worker_process,
- args=(self.result_queue, cls, method, scenario_cfg))
+ args=(self.result_queue, cls, method, scenario_cfg, context_cfg))
self.process.start()
Runner.release(runner)
def __init__(self, config, queue):
- self.context = {}
self.config = config
self.periodic_action_process = None
self.result_queue = queue
log.debug("post-stop data: \n%s" % data)
self.result_queue.put({'post-stop-action-data': data})
- def run(self, scenario_type, scenario_cfg):
+ def run(self, scenario_cfg, context_cfg):
+ scenario_type = scenario_cfg["type"]
class_name = base_scenario.Scenario.get(scenario_type)
path_split = class_name.split(".")
module_path = ".".join(path_split[:-1])
self.result_queue))
self.periodic_action_process.start()
- self._run_benchmark(cls, "run", scenario_cfg)
+ self._run_benchmark(cls, "run", scenario_cfg, context_cfg)
def join(self):
self.process.join()
LOG = logging.getLogger(__name__)
-def _worker_process(queue, cls, method_name, scenario_cfg):
+def _worker_process(queue, cls, method_name, scenario_cfg, context_cfg):
sequence = 1
runner_cfg['runner_id'] = os.getpid()
- benchmark = cls(runner_cfg)
+ benchmark = cls(scenario_cfg, context_cfg)
benchmark.setup()
method = getattr(benchmark, method_name)
sla_action = scenario_cfg["sla"].get("action", "assert")
queue.put({'runner_id': runner_cfg['runner_id'],
- 'scenario_cfg': scenario_cfg})
+ 'scenario_cfg': scenario_cfg,
+ 'context_cfg': context_cfg})
start = time.time()
while True:
errors = ""
try:
- method(scenario_cfg, data)
+ method(data)
except AssertionError as assertion:
# SLA validation failed in scenario, determine what to do now
if sla_action == "assert":
'''
__execution_type__ = 'Duration'
- def _run_benchmark(self, cls, method, scenario_cfg):
+ def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
self.process = multiprocessing.Process(
target=_worker_process,
- args=(self.result_queue, cls, method, scenario_cfg))
+ args=(self.result_queue, cls, method, scenario_cfg, context_cfg))
self.process.start()
LOG = logging.getLogger(__name__)
-def _worker_process(queue, cls, method_name, scenario_cfg):
+def _worker_process(queue, cls, method_name, scenario_cfg, context_cfg):
sequence = 1
runner_cfg['runner_id'] = os.getpid()
- benchmark = cls(runner_cfg)
+ benchmark = cls(scenario_cfg, context_cfg)
benchmark.setup()
method = getattr(benchmark, method_name)
queue.put({'runner_id': runner_cfg['runner_id'],
- 'scenario_cfg': scenario_cfg})
+ 'scenario_cfg': scenario_cfg,
+ 'context_cfg': context_cfg})
sla_action = None
if "sla" in scenario_cfg:
errors = ""
try:
- method(scenario_cfg, data)
+ method(data)
except AssertionError as assertion:
# SLA validation failed in scenario, determine what to do now
if sla_action == "assert":
'''
__execution_type__ = 'Iteration'
- def _run_benchmark(self, cls, method, scenario_cfg):
+ def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
self.process = multiprocessing.Process(
target=_worker_process,
- args=(self.result_queue, cls, method, scenario_cfg))
+ args=(self.result_queue, cls, method, scenario_cfg, context_cfg))
self.process.start()
LOG = logging.getLogger(__name__)
-def _worker_process(queue, cls, method_name, scenario_cfg):
+def _worker_process(queue, cls, method_name, scenario_cfg, context_cfg):
sequence = 1
LOG.info("worker START, sequence_values(%s, %s), class %s",
arg_name, sequence_values, cls)
- benchmark = cls(runner_cfg)
+ benchmark = cls(scenario_cfg, context_cfg)
benchmark.setup()
method = getattr(benchmark, method_name)
queue.put({'runner_id': runner_cfg['runner_id'],
- 'scenario_cfg': scenario_cfg})
+ 'scenario_cfg': scenario_cfg,
+ 'context_cfg': context_cfg})
sla_action = None
if "sla" in scenario_cfg:
errors = ""
try:
- method(scenario_cfg, data)
+ method(data)
except AssertionError as assertion:
# SLA validation failed in scenario, determine what to do now
if sla_action == "assert":
__execution_type__ = 'Sequence'
- def _run_benchmark(self, cls, method, scenario_cfg):
+ def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
self.process = multiprocessing.Process(
target=_worker_process,
- args=(self.result_queue, cls, method, scenario_cfg))
+ args=(self.result_queue, cls, method, scenario_cfg, context_cfg))
self.process.start()
TARGET_SCRIPT = "cyclictest_benchmark.bash"
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
self.target_script = pkg_resources.resource_filename(
"yardstick.benchmark.scenarios.compute",
Cyclictest.TARGET_SCRIPT)
- user = self.context.get("user", "root")
- host = self.context.get("host", None)
- key_filename = self.context.get("key_filename", "~/.ssh/id_rsa")
+ host = self.context_cfg["host"]
+ user = host.get("user", "root")
+ ip = host.get("ip", None)
+ key_filename = host.get("key_filename", "~/.ssh/id_rsa")
- LOG.debug("user:%s, host:%s", user, host)
+ LOG.debug("user:%s, host:%s", user, ip)
print "key_filename:" + key_filename
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ self.client = ssh.SSH(user, ip, key_filename=key_filename)
self.client.wait(timeout=600)
# copy script to host
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
default_args = "-m -n -q"
if not self.setup_done:
self.setup()
- options = args["options"]
+ options = self.scenario_cfg["options"]
affinity = options.get("affinity", 1)
interval = options.get("interval", 1000)
priority = options.get("priority", 99)
result.update(json.loads(stdout))
- if "sla" in args:
+ if "sla" in self.scenario_cfg:
sla_error = ""
for t, latency in result.items():
- if 'max_%s_latency' % t not in args['sla']:
+ if 'max_%s_latency' % t not in self.scenario_cfg['sla']:
continue
- sla_latency = int(args['sla']['max_%s_latency' % t])
+ sla_latency = int(self.scenario_cfg['sla'][
+ 'max_%s_latency' % t])
latency = int(latency)
if latency > sla_latency:
sla_error += "%s latency %d > sla:max_%s_latency(%d); " % \
key_filename = pkg_resources.resource_filename("yardstick.resources",
"files/yardstick_key")
ctx = {
- "host": "192.168.50.28",
- "user": "root",
- "key_filename": key_filename
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename
+ }
}
logger = logging.getLogger("yardstick")
logger.setLevel(logging.DEBUG)
- cyclictest = Cyclictest(ctx)
-
options = {
"affinity": 2,
"interval": 100,
"options": options,
"sla": sla
}
+ result = {}
- result = cyclictest.run(args)
+ cyclictest = Cyclictest(args, ctx)
+ cyclictest.run(result)
print result
if __name__ == '__main__':
TARGET_SCRIPT = "lmbench_benchmark.bash"
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
self.target_script = pkg_resources.resource_filename(
"yardstick.benchmark.scenarios.compute",
Lmbench.TARGET_SCRIPT)
- user = self.context.get("user", "ubuntu")
- host = self.context.get("host", None)
- key_filename = self.context.get('key_filename', "~/.ssh/id_rsa")
+ host = self.context_cfg["host"]
+ user = host.get("user", "ubuntu")
+ ip = host.get("ip", None)
+ key_filename = host.get('key_filename', "~/.ssh/id_rsa")
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", user, ip)
+ self.client = ssh.SSH(user, ip, key_filename=key_filename)
self.client.wait(timeout=600)
# copy script to host
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
if not self.setup_done:
self.setup()
- options = args['options']
+ options = self.scenario_cfg['options']
stride = options.get('stride', 128)
stop_size = options.get('stop_size', 16)
if status:
raise RuntimeError(stderr)
- result.update(json.loads(stdout))
-
- if "sla" in args:
+ result.update({"latencies": json.loads(stdout)})
+ if "sla" in self.scenario_cfg:
sla_error = ""
- sla_max_latency = int(args['sla']['max_latency'])
+ sla_max_latency = int(self.scenario_cfg['sla']['max_latency'])
for t_latency in result:
latency = t_latency['latency']
if latency > sla_max_latency:
"""internal test function"""
key_filename = pkg_resources.resource_filename('yardstick.resources',
'files/yardstick_key')
- ctx = {'host': '172.16.0.137',
- 'user': 'ubuntu',
- 'key_filename': key_filename
- }
+ ctx = {
+ 'host': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename
+ }
+ }
logger = logging.getLogger('yardstick')
logger.setLevel(logging.DEBUG)
- p = Lmbench(ctx)
-
options = {'stride': 128, 'stop_size': 16}
-
args = {'options': options}
- result = p.run(args)
+ result = {}
+
+ p = Lmbench(args, ctx)
+ p.run(result)
print result
if __name__ == '__main__':
TARGET_SCRIPT = 'perf_benchmark.bash'
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
"""scenario setup"""
self.target_script = pkg_resources.resource_filename(
'yardstick.benchmark.scenarios.compute', Perf.TARGET_SCRIPT)
- user = self.context.get('user', 'ubuntu')
- host = self.context.get('host', None)
- key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+ host = self.context_cfg['host']
+ user = host.get('user', 'ubuntu')
+ ip = host.get('ip', None)
+ key_filename = host.get('key_filename', '~/.ssh/id_rsa')
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", user, ip)
+ self.client = ssh.SSH(user, ip, key_filename=key_filename)
self.client.wait(timeout=600)
# copy script to host
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
if not self.setup_done:
self.setup()
- options = args['options']
+ options = self.scenario_cfg['options']
events = options.get('events', ['task-clock'])
events_string = ""
events_string += event + " "
# if run by a duration runner
- duration_time = self.context.get("duration", None)
+ duration_time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
# if run by an arithmetic runner
arithmetic_time = options.get("duration", None)
if duration_time:
result.update(json.loads(stdout))
- if "sla" in args:
- metric = args['sla']['metric']
- exp_val = args['sla']['expected_value']
- smaller_than_exp = 'smaller_than_expected' in args['sla']
+ if "sla" in self.scenario_cfg:
+ metric = self.scenario_cfg['sla']['metric']
+ exp_val = self.scenario_cfg['sla']['expected_value']
+ smaller_than_exp = 'smaller_than_expected' \
+ in self.scenario_cfg['sla']
if metric not in result:
assert False, "Metric (%s) not found." % metric
"""internal test function"""
key_filename = pkg_resources.resource_filename('yardstick.resources',
'files/yardstick_key')
- ctx = {'host': '172.16.0.137',
- 'user': 'ubuntu',
- 'key_filename': key_filename
- }
+ ctx = {
+ 'host': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename
+ }
+ }
logger = logging.getLogger('yardstick')
logger.setLevel(logging.DEBUG)
- p = Perf(ctx)
-
options = {'load': True}
args = {'options': options}
+ result = {}
- result = p.run(args)
+ p = Perf(args, ctx)
+ p.run(result)
print result
if __name__ == '__main__':
"""
__scenario_type__ = "Iperf3"
- def __init__(self, context):
- self.context = context
- self.user = context.get('user', 'ubuntu')
- self.host_ipaddr = context['host']
- self.target_ipaddr = context['target']
- self.key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
- LOG.debug("setup, key %s", self.key_filename)
- LOG.info("host:%s, user:%s", self.host_ipaddr, self.user)
- self.host = ssh.SSH(self.user, self.host_ipaddr,
- key_filename=self.key_filename)
- self.host.wait(timeout=600)
-
- LOG.info("target:%s, user:%s", self.target_ipaddr, self.user)
- self.target = ssh.SSH(self.user, self.target_ipaddr,
- key_filename=self.key_filename)
+ host = self.context_cfg['host']
+ host_user = host.get('user', 'ubuntu')
+ host_ip = host.get('ip', None)
+ host_key_filename = host.get('key_filename', '~/.ssh/id_rsa')
+ target = self.context_cfg['target']
+ target_user = target.get('user', 'ubuntu')
+ target_ip = target.get('ip', None)
+ target_key_filename = target.get('key_filename', '~/.ssh/id_rsa')
+
+ LOG.info("user:%s, target:%s", target_user, target_ip)
+ self.target = ssh.SSH(target_user, target_ip,
+ key_filename=target_key_filename)
self.target.wait(timeout=600)
+ LOG.info("user:%s, host:%s", host_user, host_ip)
+ self.host = ssh.SSH(host_user, host_ip,
+ key_filename=host_key_filename)
+ self.host.wait(timeout=600)
+
cmd = "iperf3 -s -D"
LOG.debug("Starting iperf3 server with command: %s", cmd)
status, _, stderr = self.target.execute(cmd)
if status:
raise RuntimeError(stderr)
+ self.setup_done = True
+
def teardown(self):
LOG.debug("teardown")
self.host.close()
LOG.warn(stderr)
self.target.close()
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
+ if not self.setup_done:
+ self.setup()
# if run by a duration runner, get the duration time and setup as arg
- time = self.context.get('duration', None)
- options = args['options']
+ time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
+ options = self.scenario_cfg['options']
- cmd = "iperf3 -c %s --json" % (self.target_ipaddr)
+ cmd = "iperf3 -c %s --json" % (self.context_cfg['target']['ipaddr'])
# If there are no options specified
if not options:
result.update(json.loads(stdout))
- if "sla" in args:
- sla_iperf = args["sla"]
+ if "sla" in self.scenario_cfg:
+ sla_iperf = self.scenario_cfg["sla"]
if not use_UDP:
sla_bytes_per_second = int(sla_iperf["bytes_per_second"])
def _test():
'''internal test function'''
+ key_filename = pkg_resources.resource_filename('yardstick.resources',
+ 'files/yardstick_key')
+ ctx = {
+ 'host': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename
+ },
+ 'target': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename,
+ 'ipaddr': '10.229.47.137',
+ }
+ }
logger = logging.getLogger('yardstick')
logger.setLevel(logging.DEBUG)
- key_filename = pkg_resources.resource_filename('yardstick.resources',
- 'files/yardstick_key')
- runner_cfg = {}
- runner_cfg['type'] = 'Duration'
- runner_cfg['duration'] = 5
- runner_cfg['host'] = '10.0.2.33'
- runner_cfg['target_ipaddr'] = '10.0.2.53'
- runner_cfg['user'] = 'ubuntu'
- runner_cfg['output_filename'] = "/tmp/yardstick.out"
- runner_cfg['key_filename'] = key_filename
-
- scenario_args = {}
- scenario_args['options'] = {"bytes": 10000000000}
- scenario_args['sla'] = \
- {"bytes_per_second": 2900000000, "action": "monitor"}
-
- from yardstick.benchmark.runners import base as base_runner
- runner = base_runner.Runner.get(runner_cfg)
- runner.run("Iperf3", scenario_args)
- runner.join()
- base_runner.Runner.release(runner)
+ options = {'packetsize': 120}
+ args = {'options': options}
+ result = {}
+
+ p = Iperf(args, ctx)
+ p.run(result)
+ print result
if __name__ == '__main__':
_test()
TARGET_SCRIPT = 'netperf_benchmark.bash'
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
self.target_script = pkg_resources.resource_filename(
'yardstick.benchmark.scenarios.networking',
Netperf.TARGET_SCRIPT)
- user = self.context.get('user', 'ubuntu')
- host = self.context.get('host', None)
- target = self.context.get('target', None)
- key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+ host = self.context_cfg['host']
+ host_user = host.get('user', 'ubuntu')
+ host_ip = host.get('ip', None)
+ host_key_filename = host.get('key_filename', '~/.ssh/id_rsa')
+ target = self.context_cfg['target']
+ target_user = target.get('user', 'ubuntu')
+ target_ip = target.get('ip', None)
+ target_key_filename = target.get('key_filename', '~/.ssh/id_rsa')
# netserver start automatically during the vm boot
- LOG.info("user:%s, target:%s", user, target)
- self.server = ssh.SSH(user, target, key_filename=key_filename)
+ LOG.info("user:%s, target:%s", target_user, target_ip)
+ self.server = ssh.SSH(target_user, target_ip,
+ key_filename=target_key_filename)
self.server.wait(timeout=600)
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", host_user, host_ip)
+ self.client = ssh.SSH(host_user, host_ip,
+ key_filename=host_key_filename)
self.client.wait(timeout=600)
# copy script to host
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
if not self.setup_done:
self.setup()
# get global options
- ipaddr = args.get("ipaddr", '127.0.0.1')
- options = args['options']
+ ipaddr = self.context_cfg['target'].get("ipaddr", '127.0.0.1')
+ options = self.scenario_cfg['options']
testname = options.get("testname", 'TCP_STREAM')
- duration_time = self.context.get("duration", None)
+ duration_time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
arithmetic_time = options.get("duration", None)
if duration_time:
testlen = duration_time
# sla check
mean_latency = float(result['mean_latency'])
- if "sla" in args:
- sla_max_mean_latency = int(args["sla"]["mean_latency"])
+ if "sla" in self.scenario_cfg:
+ sla_max_mean_latency = int(
+ self.scenario_cfg["sla"]["mean_latency"])
assert mean_latency <= sla_max_mean_latency, \
"mean_latency %f > sla_max_mean_latency(%f); " % \
def _test():
'''internal test function'''
- logger = logging.getLogger('yardstick')
+ key_filename = pkg_resources.resource_filename("yardstick.resources",
+ "files/yardstick_key")
+ ctx = {
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename
+ },
+ "target": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename,
+ "ipaddr": "10.229.47.137"
+ }
+ }
+
+ logger = logging.getLogger("yardstick")
logger.setLevel(logging.DEBUG)
- key_filename = pkg_resources.resource_filename('yardstick.resources',
- 'files/yardstick_key')
- runner_cfg = {}
- runner_cfg['type'] = 'Duration'
- runner_cfg['duration'] = 5
- runner_cfg['clinet'] = '10.0.2.33'
- runner_cfg['server'] = '10.0.2.53'
- runner_cfg['user'] = 'ubuntu'
- runner_cfg['output_filename'] = "/tmp/yardstick.out"
- runner_cfg['key_filename'] = key_filename
-
- scenario_args = {}
- scenario_args['options'] = {"testname": 'TCP_STREAM'}
-
- from yardstick.benchmark.runners import base as base_runner
- runner = base_runner.Runner.get(runner_cfg)
- runner.run("Netperf", scenario_args)
- runner.join()
- base_runner.Runner.release(runner)
+ options = {
+ "testname": 'TCP_STREAM'
+ }
+
+ args = {"options": options}
+ result = {}
+
+ netperf = Netperf(args, ctx)
+ netperf.run(result)
+ print result
if __name__ == '__main__':
_test()
TARGET_SCRIPT = 'ping_benchmark.bash'
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.target_script = pkg_resources.resource_filename(
'yardstick.benchmark.scenarios.networking', Ping.TARGET_SCRIPT)
- user = self.context.get('user', 'ubuntu')
- host = self.context.get('host', None)
- key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+ host = self.context_cfg['host']
+ user = host.get('user', 'ubuntu')
+ ip = host.get('ip', None)
+ key_filename = host.get('key_filename', '~/.ssh/id_rsa')
- LOG.info("user:%s, host:%s", user, host)
+ LOG.info("user:%s, host:%s", user, ip)
- self.connection = ssh.SSH(user, host, key_filename=key_filename)
+ self.connection = ssh.SSH(user, ip, key_filename=key_filename)
self.connection.wait()
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
- if "options" in args:
- options = "-s %s" % args['options'].get("packetsize", '56')
+ if "options" in self.scenario_cfg:
+ options = "-s %s" % \
+ self.scenario_cfg['options'].get("packetsize", '56')
else:
options = ""
- destination = args.get("ipaddr", '127.0.0.1')
+ destination = self.context_cfg['target'].get("ipaddr", '127.0.0.1')
LOG.debug("ping '%s' '%s'", options, destination)
result["rtt"] = float(stdout)
- if "sla" in args:
- sla_max_rtt = int(args["sla"]["max_rtt"])
+ if "sla" in self.scenario_cfg:
+ sla_max_rtt = int(self.scenario_cfg["sla"]["max_rtt"])
assert result["rtt"] <= sla_max_rtt, "rtt %f > sla:max_rtt(%f); " % \
(result["rtt"], sla_max_rtt)
+
+
+def _test():
+ '''internal test function'''
+ key_filename = pkg_resources.resource_filename("yardstick.resources",
+ "files/yardstick_key")
+ ctx = {
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename
+ },
+ "target": {
+ "ipaddr": "10.229.17.105",
+ }
+ }
+
+ logger = logging.getLogger("yardstick")
+ logger.setLevel(logging.DEBUG)
+
+ args = {}
+ result = {}
+
+ p = Ping(args, ctx)
+ p.run(result)
+ print result
+
+if __name__ == '__main__':
+ _test()
TARGET_SCRIPT = 'pktgen_benchmark.bash'
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
self.target_script = pkg_resources.resource_filename(
'yardstick.benchmark.scenarios.networking',
Pktgen.TARGET_SCRIPT)
- user = self.context.get('user', 'ubuntu')
- host = self.context.get('host', None)
- target = self.context.get('target', None)
- key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
-
- LOG.info("user:%s, target:%s", user, target)
- self.server = ssh.SSH(user, target, key_filename=key_filename)
+ host = self.context_cfg['host']
+ host_user = host.get('user', 'ubuntu')
+ host_ip = host.get('ip', None)
+ host_key_filename = host.get('key_filename', '~/.ssh/id_rsa')
+ target = self.context_cfg['target']
+ target_user = target.get('user', 'ubuntu')
+ target_ip = target.get('ip', None)
+ target_key_filename = target.get('key_filename', '~/.ssh/id_rsa')
+
+ LOG.info("user:%s, target:%s", target_user, target_ip)
+ self.server = ssh.SSH(target_user, target_ip,
+ key_filename=target_key_filename)
self.server.wait(timeout=600)
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", host_user, host_ip)
+ self.client = ssh.SSH(host_user, host_ip,
+ key_filename=host_key_filename)
self.client.wait(timeout=600)
# copy script to host
raise RuntimeError(stderr)
return int(stdout)
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
if not self.setup_done:
self.setup()
- ipaddr = args.get("ipaddr", '127.0.0.1')
+ ipaddr = self.context_cfg["target"].get("ipaddr", '127.0.0.1')
- options = args['options']
+ options = self.scenario_cfg['options']
packetsize = options.get("packetsize", 60)
self.number_of_ports = options.get("number_of_ports", 10)
# if run by a duration runner
- duration_time = self.context.get("duration", None)
+ duration_time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
# if run by an arithmetic runner
arithmetic_time = options.get("duration", None)
result['packets_received'] = self._iptables_get_result()
- if "sla" in args:
+ if "sla" in self.scenario_cfg:
sent = result['packets_sent']
received = result['packets_received']
ppm = 1000000 * (sent - received) / sent
- sla_max_ppm = int(args["sla"]["max_ppm"])
+ sla_max_ppm = int(self.scenario_cfg["sla"]["max_ppm"])
assert ppm <= sla_max_ppm, "ppm %d > sla_max_ppm %d; " \
% (ppm, sla_max_ppm)
'''internal test function'''
key_filename = pkg_resources.resource_filename('yardstick.resources',
'files/yardstick_key')
- ctx = {'host': '172.16.0.137',
- 'target': '172.16.0.138',
- 'user': 'ubuntu',
- 'key_filename': key_filename
- }
+ ctx = {
+ 'host': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename
+ },
+ 'target': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename,
+ 'ipaddr': '10.229.47.137',
+ }
+ }
logger = logging.getLogger('yardstick')
logger.setLevel(logging.DEBUG)
- p = Pktgen(ctx)
-
options = {'packetsize': 120}
+ args = {'options': options}
+ result = {}
- args = {'options': options,
- 'ipaddr': '192.168.111.31'}
- result = p.run(args)
+ p = Pktgen(args, ctx)
+ p.run(result)
print result
if __name__ == '__main__':
TARGET_SCRIPT = "fio_benchmark.bash"
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
self.target_script = pkg_resources.resource_filename(
"yardstick.benchmark.scenarios.storage",
Fio.TARGET_SCRIPT)
- user = self.context.get("user", "root")
- host = self.context.get("host", None)
- key_filename = self.context.get("key_filename", "~/.ssh/id_rsa")
+ host = self.context_cfg["host"]
+ user = host.get("user", "root")
+ ip = host.get("ip", None)
+ key_filename = host.get("key_filename", "~/.ssh/id_rsa")
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", user, ip)
+ self.client = ssh.SSH(user, ip, key_filename=key_filename)
self.client.wait(timeout=600)
# copy script to host
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
default_args = "-ioengine=libaio -direct=1 -group_reporting " \
"-numjobs=1 -time_based --output-format=json"
if not self.setup_done:
self.setup()
- options = args["options"]
+ options = self.scenario_cfg["options"]
filename = options.get("filename", "/home/ec2-user/data.raw")
bs = options.get("bs", "4k")
iodepth = options.get("iodepth", "1")
ramp_time = options.get("ramp_time", 20)
name = "yardstick-fio"
# if run by a duration runner
- duration_time = self.context.get("duration", None)
+ duration_time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
# if run by an arithmetic runner
arithmetic_time = options.get("duration", None)
if duration_time:
result["write_iops"] = raw_data["jobs"][0]["write"]["iops"]
result["write_lat"] = raw_data["jobs"][0]["write"]["lat"]["mean"]
- if "sla" in args:
+ if "sla" in self.scenario_cfg:
sla_error = ""
for k, v in result.items():
- if k not in args['sla']:
+ if k not in self.scenario_cfg['sla']:
continue
if "lat" in k:
# For lattency small value is better
- max_v = float(args['sla'][k])
+ max_v = float(self.scenario_cfg['sla'][k])
if v > max_v:
sla_error += "%s %f > sla:%s(%f); " % (k, v, k, max_v)
else:
# For bandwidth and iops big value is better
- min_v = int(args['sla'][k])
+ min_v = int(self.scenario_cfg['sla'][k])
if v < min_v:
sla_error += "%s %d < " \
"sla:%s(%d); " % (k, v, k, min_v)
key_filename = pkg_resources.resource_filename("yardstick.resources",
"files/yardstick_key")
ctx = {
- "host": "10.0.0.101",
- "user": "ec2-user",
- "key_filename": key_filename
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename
+ }
}
logger = logging.getLogger("yardstick")
logger.setLevel(logging.DEBUG)
- fio = Fio(ctx)
-
options = {
"filename": "/home/ec2-user/data.raw",
"bs": "4k",
"ramp_time": 1,
"duration": 10
}
+ result = {}
args = {"options": options}
- result = fio.run(args)
+ fio = Fio(args, ctx)
+ fio.run(result)
print result
if __name__ == '__main__':
import os
import yaml
import atexit
-import pkg_resources
import ipaddress
from yardstick.benchmark.contexts.base import Context
return False
-def run_one_scenario(scenario_cfg, output_file):
- '''run one scenario using context'''
- key_filename = pkg_resources.resource_filename(
- 'yardstick.resources', 'files/yardstick_key')
+def _is_same_heat_context(host_attr, target_attr):
+ '''check if two servers are in the same heat context
+ host_attr: either a name for a server created by yardstick or a dict
+ with attribute name mapping when using external heat templates
+ target_attr: either a name for a server created by yardstick or a dict
+ with attribute name mapping when using external heat templates
+ '''
+ host = None
+ target = None
+ for context in Context.list:
+ if context.__context_type__ != "Heat":
+ continue
+
+ host = context._get_server(host_attr)
+ if host is None:
+ continue
+
+ target = context._get_server(target_attr)
+ if target is None:
+ return False
+
+ # Both host and target is not None, then they are in the
+ # same heat context.
+ return True
+
+ return False
- # TODO support get multi hosts/vms info
- host = Context.get_server(scenario_cfg["host"])
+def run_one_scenario(scenario_cfg, output_file):
+ '''run one scenario using context'''
runner_cfg = scenario_cfg["runner"]
- runner_cfg['host'] = host.public_ip
- runner_cfg['user'] = host.context.user
- runner_cfg['key_filename'] = key_filename
runner_cfg['output_filename'] = output_file
+ # TODO support get multi hosts/vms info
+ context_cfg = {}
+ context_cfg['host'] = Context.get_server(scenario_cfg["host"])
+
if "target" in scenario_cfg:
if is_ip_addr(scenario_cfg["target"]):
- scenario_cfg["ipaddr"] = scenario_cfg["target"]
+ context_cfg['target'] = {}
+ context_cfg['target']["ipaddr"] = scenario_cfg["target"]
else:
- target = Context.get_server(scenario_cfg["target"])
-
- # get public IP for target server, some scenarios require it
- if target.public_ip:
- runner_cfg['target'] = target.public_ip
-
- # TODO scenario_cfg["ipaddr"] is bad naming
- if host.context != target.context:
- # target is in another context, get its public IP
- scenario_cfg["ipaddr"] = target.public_ip
+ context_cfg['target'] = Context.get_server(scenario_cfg["target"])
+ if _is_same_heat_context(scenario_cfg["host"],
+ scenario_cfg["target"]):
+ context_cfg["target"]["ipaddr"] = \
+ context_cfg["target"]["private_ip"]
else:
- # target is in the same context, get its private IP
- scenario_cfg["ipaddr"] = target.private_ip
+ context_cfg["target"]["ipaddr"] = \
+ context_cfg["target"]["ip"]
runner = base_runner.Runner.get(runner_cfg)
print "Starting runner of type '%s'" % runner_cfg["type"]
- runner.run(scenario_cfg["type"], scenario_cfg)
+ runner.run(scenario_cfg, context_cfg)
return runner