Extend lmbench scenario to measure memory bandwidth
[yardstick.git] / tests / unit / benchmark / scenarios / compute / test_lmbench.py
1 #!/usr/bin/env python
2
3 ##############################################################################
4 # Copyright (c) 2015 Ericsson AB and others.
5 #
6 # All rights reserved. This program and the accompanying materials
7 # are made available under the terms of the Apache License, Version 2.0
8 # which accompanies this distribution, and is available at
9 # http://www.apache.org/licenses/LICENSE-2.0
10 ##############################################################################
11
12 # Unittest for yardstick.benchmark.scenarios.compute.lmbench.Lmbench
13
14 import mock
15 import unittest
16 import json
17
18 from yardstick.benchmark.scenarios.compute import lmbench
19
20
21 @mock.patch('yardstick.benchmark.scenarios.compute.lmbench.ssh')
22 class LmbenchTestCase(unittest.TestCase):
23
24     def setUp(self):
25         self.ctx = {
26             'host': {
27                 'ip': '172.16.0.137',
28                 'user': 'cirros',
29                 'key_filename': "mykey.key"
30             }
31         }
32
33         self.result = {}
34
35     def test_successful_setup(self, mock_ssh):
36
37         l = lmbench.Lmbench({}, self.ctx)
38         mock_ssh.SSH().execute.return_value = (0, '', '')
39
40         l.setup()
41         self.assertIsNotNone(l.client)
42         self.assertTrue(l.setup_done)
43
44     def test_unsuccessful_unknown_type_run(self, mock_ssh):
45
46         options = {
47             "test_type": "foo"
48         }
49         args = {'options': options}
50
51         l = lmbench.Lmbench(args, self.ctx)
52
53         self.assertRaises(RuntimeError, l.run, self.result)
54
55     def test_successful_latency_run_no_sla(self, mock_ssh):
56
57         options = {
58             "test_type": "latency",
59             "stride": 64,
60             "stop_size": 16
61         }
62         args = {'options': options}
63         l = lmbench.Lmbench(args, self.ctx)
64
65         sample_output = '[{"latency": 4.944, "size": 0.00049}]'
66         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
67         l.run(self.result)
68         expected_result = json.loads('{"latencies": ' + sample_output + "}")
69         self.assertEqual(self.result, expected_result)
70
71     def test_successful_bandwidth_run_no_sla(self, mock_ssh):
72
73         options = {
74             "test_type": "bandwidth",
75             "size": 500,
76             "benchmark": "rd",
77             "warmup": 0
78         }
79         args = {"options": options}
80         l = lmbench.Lmbench(args, self.ctx)
81
82         sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 11025.5}'
83         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
84         l.run(self.result)
85         expected_result = json.loads(sample_output)
86         self.assertEqual(self.result, expected_result)
87
88     def test_successful_latency_run_sla(self, mock_ssh):
89
90         options = {
91             "test_type": "latency",
92             "stride": 64,
93             "stop_size": 16
94         }
95         args = {
96             "options": options,
97             "sla": {"max_latency": 35}
98         }
99         l = lmbench.Lmbench(args, self.ctx)
100
101         sample_output = '[{"latency": 4.944, "size": 0.00049}]'
102         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
103         l.run(self.result)
104         expected_result = json.loads('{"latencies": ' + sample_output + "}")
105         self.assertEqual(self.result, expected_result)
106
107     def test_successful_bandwidth_run_sla(self, mock_ssh):
108
109         options = {
110             "test_type": "bandwidth",
111             "size": 500,
112             "benchmark": "rd",
113             "warmup": 0
114         }
115         args = {
116             "options": options,
117             "sla": {"min_bandwidth": 10000}
118         }
119         l = lmbench.Lmbench(args, self.ctx)
120
121         sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 11025.5}'
122         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
123         l.run(self.result)
124         expected_result = json.loads(sample_output)
125         self.assertEqual(self.result, expected_result)
126
127     def test_unsuccessful_latency_run_sla(self, mock_ssh):
128
129         options = {
130             "test_type": "latency",
131             "stride": 64,
132             "stop_size": 16
133         }
134         args = {
135             "options": options,
136             "sla": {"max_latency": 35}
137         }
138         l = lmbench.Lmbench(args, self.ctx)
139
140         sample_output = '[{"latency": 37.5, "size": 0.00049}]'
141         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
142         self.assertRaises(AssertionError, l.run, self.result)
143
144     def test_unsuccessful_bandwidth_run_sla(self, mock_ssh):
145
146         options = {
147             "test_type": "bandwidth",
148             "size": 500,
149             "benchmark": "rd",
150             "warmup": 0
151         }
152         args = {
153             "options": options,
154             "sla": {"min_bandwidth": 10000}
155         }
156         l = lmbench.Lmbench(args, self.ctx)
157
158         sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 9925.5}'
159         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
160         self.assertRaises(AssertionError, l.run, self.result)
161
162     def test_unsuccessful_script_error(self, mock_ssh):
163
164         options = {"test_type": "bandwidth"}
165         args = {"options": options}
166         l = lmbench.Lmbench(args, self.ctx)
167
168         mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
169         self.assertRaises(RuntimeError, l.run, self.result)