move flatten dict key to common utils 35/36935/6
authorrexlee8776 <limingjiang@huawei.com>
Wed, 5 Jul 2017 02:50:06 +0000 (02:50 +0000)
committerrexlee8776 <limingjiang@huawei.com>
Thu, 6 Jul 2017 07:49:10 +0000 (07:49 +0000)
So it can easily be used by other testcase to unify result

JIRA: YARDSTICK-702

Change-Id: Id4fde38a9a0c2a87a6c870bdb7b0c8f3a3b371ac
Signed-off-by: rexlee8776 <limingjiang@huawei.com>
12 files changed:
tests/unit/benchmark/scenarios/compute/test_lmbench.py
tests/unit/benchmark/scenarios/compute/test_ramspeed.py
tests/unit/benchmark/scenarios/networking/test_iperf3.py
tests/unit/benchmark/scenarios/networking/test_ping.py
tests/unit/common/test_utils.py
tests/unit/dispatcher/test_influxdb.py
yardstick/benchmark/scenarios/compute/lmbench.py
yardstick/benchmark/scenarios/compute/ramspeed.py
yardstick/benchmark/scenarios/networking/iperf3.py
yardstick/benchmark/scenarios/networking/ping.py
yardstick/common/utils.py
yardstick/dispatcher/influxdb.py

index 08f5da3..65939c6 100644 (file)
@@ -68,8 +68,7 @@ class LmbenchTestCase(unittest.TestCase):
         sample_output = '[{"latency": 4.944, "size": 0.00049}]'
         mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
         l.run(self.result)
-        expected_result = jsonutils.loads(
-            '{"latencies": ' + sample_output + "}")
+        expected_result = {"latencies0.latency": 4.944, "latencies0.size": 0.00049}
         self.assertEqual(self.result, expected_result)
 
     def test_successful_bandwidth_run_no_sla(self, mock_ssh):
@@ -105,8 +104,7 @@ class LmbenchTestCase(unittest.TestCase):
         sample_output = '[{"latency": 4.944, "size": 0.00049}]'
         mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
         l.run(self.result)
-        expected_result = jsonutils.loads(
-            '{"latencies": ' + sample_output + "}")
+        expected_result = {"latencies0.latency": 4.944, "latencies0.size": 0.00049}
         self.assertEqual(self.result, expected_result)
 
     def test_successful_bandwidth_run_sla(self, mock_ssh):
@@ -191,3 +189,10 @@ class LmbenchTestCase(unittest.TestCase):
 
         mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, l.run, self.result)
+
+
+def main():
+    unittest.main()
+
+if __name__ == '__main__':
+    main()
index 85d4964..4f71fbb 100644 (file)
@@ -18,6 +18,7 @@ import unittest
 import mock
 from oslo_serialization import jsonutils
 
+from yardstick.common import utils
 from yardstick.benchmark.scenarios.compute import ramspeed
 
 
@@ -77,7 +78,7 @@ class RamspeedTestCase(unittest.TestCase):
  "Block_size(kb)": 32768, "Bandwidth(MBps)": 8340.85}]}'
         mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
         r.run(self.result)
-        expected_result = jsonutils.loads(sample_output)
+        expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
         self.assertEqual(self.result, expected_result)
 
     def test_ramspeed_successful_run_sla(self, mock_ssh):
@@ -113,7 +114,7 @@ class RamspeedTestCase(unittest.TestCase):
  "Block_size(kb)": 32768, "Bandwidth(MBps)": 8340.85}]}'
         mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
         r.run(self.result)
-        expected_result = jsonutils.loads(sample_output)
+        expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
         self.assertEqual(self.result, expected_result)
 
     def test_ramspeed_unsuccessful_run_sla(self, mock_ssh):
@@ -179,7 +180,7 @@ class RamspeedTestCase(unittest.TestCase):
  "Bandwidth(MBps)": 9401.58}]}'
         mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
         r.run(self.result)
-        expected_result = jsonutils.loads(sample_output)
+        expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
         self.assertEqual(self.result, expected_result)
 
     def test_ramspeed_mem_successful_run_sla(self, mock_ssh):
@@ -200,7 +201,7 @@ class RamspeedTestCase(unittest.TestCase):
  "Bandwidth(MBps)": 9401.58}]}'
         mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
         r.run(self.result)
-        expected_result = jsonutils.loads(sample_output)
+        expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
         self.assertEqual(self.result, expected_result)
 
     def test_ramspeed_mem_unsuccessful_run_sla(self, mock_ssh):
index 45ff1b7..3312453 100644 (file)
@@ -19,6 +19,7 @@ import unittest
 import mock
 from oslo_serialization import jsonutils
 
+from yardstick.common import utils
 from yardstick.benchmark.scenarios.networking import iperf3
 
 
@@ -81,7 +82,7 @@ class IperfTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output(self.output_name_tcp)
         mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
-        expected_result = jsonutils.loads(sample_output)
+        expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
         p.run(result)
         self.assertEqual(result, expected_result)
 
@@ -100,7 +101,7 @@ class IperfTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output(self.output_name_tcp)
         mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
-        expected_result = jsonutils.loads(sample_output)
+        expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
         p.run(result)
         self.assertEqual(result, expected_result)
 
@@ -135,7 +136,7 @@ class IperfTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output(self.output_name_udp)
         mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
-        expected_result = jsonutils.loads(sample_output)
+        expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
         p.run(result)
         self.assertEqual(result, expected_result)
 
index 5269309..0635324 100644 (file)
@@ -45,7 +45,7 @@ class PingTestCase(unittest.TestCase):
 
         mock_ssh.SSH.from_node().execute.return_value = (0, '100', '')
         p.run(result)
-        self.assertEqual(result, {'rtt': {'ares': 100.0}})
+        self.assertEqual(result, {'rtt.ares': 100.0})
 
     @mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
     def test_ping_successful_sla(self, mock_ssh):
@@ -61,7 +61,7 @@ class PingTestCase(unittest.TestCase):
 
         mock_ssh.SSH.from_node().execute.return_value = (0, '100', '')
         p.run(result)
-        self.assertEqual(result, {'rtt': {'ares': 100.0}})
+        self.assertEqual(result, {'rtt.ares': 100.0})
 
     @mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
     def test_ping_unsuccessful_sla(self, mock_ssh):
index 8f52b53..c4c61ce 100644 (file)
@@ -109,6 +109,37 @@ class GetParaFromYaml(unittest.TestCase):
         return file_path
 
 
+class CommonUtilTestCase(unittest.TestCase):
+    def setUp(self):
+        self.data = {
+            "benchmark": {
+                "data": {
+                    "mpstat": {
+                        "cpu0": {
+                            "%sys": "0.00",
+                            "%idle": "99.00"
+                        },
+                        "loadavg": [
+                            "1.09",
+                            "0.29"
+                        ]
+                    },
+                    "rtt": "1.03"
+                }
+            }
+        }
+    def test__dict_key_flatten(self):
+        line = 'mpstat.loadavg1=0.29,rtt=1.03,mpstat.loadavg0=1.09,' \
+               'mpstat.cpu0.%idle=99.00,mpstat.cpu0.%sys=0.00'
+        # need to sort for assert to work
+        line = ",".join(sorted(line.split(',')))
+        flattened_data = utils.flatten_dict_key(
+            self.data['benchmark']['data'])
+        result = ",".join(
+            ("=".join(item) for item in sorted(flattened_data.items())))
+        self.assertEqual(result, line)
+
+
 def main():
     unittest.main()
 
index a5d9b07..7ebe8c9 100644 (file)
@@ -76,23 +76,6 @@ class InfluxdbDispatcherTestCase(unittest.TestCase):
             },
             "runner_id": 8921
         }
-        self.data3 = {
-            "benchmark": {
-                "data": {
-                    "mpstat": {
-                        "cpu0": {
-                            "%sys": "0.00",
-                            "%idle": "99.00"
-                        },
-                        "loadavg": [
-                            "1.09",
-                            "0.29"
-                        ]
-                    },
-                    "rtt": "1.03"
-                }
-            }
-        }
 
         self.yardstick_conf = {'dispatcher_influxdb': {}}
 
@@ -113,18 +96,6 @@ class InfluxdbDispatcherTestCase(unittest.TestCase):
         }
         self.assertEqual(influxdb.flush_result_data(data), 0)
 
-    def test__dict_key_flatten(self):
-        line = 'mpstat.loadavg1=0.29,rtt=1.03,mpstat.loadavg0=1.09,' \
-               'mpstat.cpu0.%idle=99.00,mpstat.cpu0.%sys=0.00'
-        # need to sort for assert to work
-        line = ",".join(sorted(line.split(',')))
-        influxdb = InfluxdbDispatcher(self.yardstick_conf)
-        flattened_data = influxdb._dict_key_flatten(
-            self.data3['benchmark']['data'])
-        result = ",".join(
-            [k + "=" + v for k, v in sorted(flattened_data.items())])
-        self.assertEqual(result, line)
-
     def test__get_nano_timestamp(self):
         influxdb = InfluxdbDispatcher(self.yardstick_conf)
         results = {'timestamp': '1451461248.925574'}
index c99fc98..801f7fa 100644 (file)
@@ -15,6 +15,7 @@ import pkg_resources
 from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
+from yardstick.common import utils
 from yardstick.benchmark.scenarios import base
 
 LOG = logging.getLogger(__name__)
@@ -127,30 +128,32 @@ class Lmbench(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
+        lmbench_result = {}
         if test_type == 'latency':
-            result.update(
+            lmbench_result.update(
                 {"latencies": jsonutils.loads(stdout)})
         else:
-            result.update(jsonutils.loads(stdout))
+            lmbench_result.update(jsonutils.loads(stdout))
+        result.update(utils.flatten_dict_key(lmbench_result))
 
         if "sla" in self.scenario_cfg:
             sla_error = ""
             if test_type == 'latency':
                 sla_max_latency = int(self.scenario_cfg['sla']['max_latency'])
-                for t_latency in result["latencies"]:
+                for t_latency in lmbench_result["latencies"]:
                     latency = t_latency['latency']
                     if latency > sla_max_latency:
                         sla_error += "latency %f > sla:max_latency(%f); " \
                             % (latency, sla_max_latency)
             elif test_type == 'bandwidth':
                 sla_min_bw = int(self.scenario_cfg['sla']['min_bandwidth'])
-                bw = result["bandwidth(MBps)"]
+                bw = lmbench_result["bandwidth(MBps)"]
                 if bw < sla_min_bw:
                     sla_error += "bandwidth %f < " \
                                  "sla:min_bandwidth(%f)" % (bw, sla_min_bw)
             elif test_type == 'latency_for_cache':
                 sla_latency = float(self.scenario_cfg['sla']['max_latency'])
-                cache_latency = float(result['L1cache'])
+                cache_latency = float(lmbench_result['L1cache'])
                 if sla_latency < cache_latency:
                     sla_error += "latency %f > sla:max_latency(%f); " \
                         % (cache_latency, sla_latency)
index 850ee59..ca64935 100644 (file)
@@ -14,6 +14,7 @@ import pkg_resources
 from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
+from yardstick.common import utils
 from yardstick.benchmark.scenarios import base
 
 LOG = logging.getLogger(__name__)
@@ -128,12 +129,13 @@ class Ramspeed(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(jsonutils.loads(stdout))
+        ramspeed_result = jsonutils.loads(stdout)
+        result.update(utils.flatten_dict_key(ramspeed_result))
 
         if "sla" in self.scenario_cfg:
             sla_error = ""
             sla_min_bw = int(self.scenario_cfg['sla']['min_bandwidth'])
-            for i in result["Result"]:
+            for i in ramspeed_result["Result"]:
                 bw = i["Bandwidth(MBps)"]
                 if bw < sla_min_bw:
                     sla_error += "Bandwidth %f < " \
index 334f3a9..3135af9 100644 (file)
@@ -19,6 +19,7 @@ import pkg_resources
 from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
+from yardstick.common import utils
 from yardstick.benchmark.scenarios import base
 
 LOG = logging.getLogger(__name__)
@@ -131,8 +132,8 @@ For more info see http://software.es.net/iperf
         # Note: convert all ints to floats in order to avoid
         # schema conflicts in influxdb. We probably should add
         # a format func in the future.
-        result.update(
-            jsonutils.loads(stdout, parse_int=float))
+        iperf_result = jsonutils.loads(stdout, parse_int=float)
+        result.update(utils.flatten_dict_key(iperf_result))
 
         if "sla" in self.scenario_cfg:
             sla_iperf = self.scenario_cfg["sla"]
@@ -141,7 +142,7 @@ For more info see http://software.es.net/iperf
 
                 # convert bits per second to bytes per second
                 bit_per_second = \
-                    int(result["end"]["sum_received"]["bits_per_second"])
+                    int(iperf_result["end"]["sum_received"]["bits_per_second"])
                 bytes_per_second = bit_per_second / 8
                 assert bytes_per_second >= sla_bytes_per_second, \
                     "bytes_per_second %d < sla:bytes_per_second (%d); " % \
@@ -149,7 +150,7 @@ For more info see http://software.es.net/iperf
             else:
                 sla_jitter = float(sla_iperf["jitter"])
 
-                jitter_ms = float(result["end"]["sum"]["jitter_ms"])
+                jitter_ms = float(iperf_result["end"]["sum"]["jitter_ms"])
                 assert jitter_ms <= sla_jitter, \
                     "jitter_ms  %f > sla:jitter %f; " % \
                     (jitter_ms, sla_jitter)
index a929e53..6a7927d 100644 (file)
@@ -15,6 +15,7 @@ import pkg_resources
 import logging
 
 import yardstick.ssh as ssh
+from yardstick.common import utils
 from yardstick.benchmark.scenarios import base
 
 LOG = logging.getLogger(__name__)
@@ -57,8 +58,8 @@ class Ping(base.Scenario):
         destination = self.context_cfg['target'].get('ipaddr', '127.0.0.1')
         dest_list = [s.strip() for s in destination.split(',')]
 
-        result["rtt"] = {}
-        rtt_result = result["rtt"]
+        rtt_result = {}
+        ping_result = {"rtt": rtt_result}
 
         for pos, dest in enumerate(dest_list):
             if 'targets' in self.scenario_cfg:
@@ -88,6 +89,7 @@ class Ping(base.Scenario):
                         (rtt_result[target_vm_name], sla_max_rtt)
             else:
                 LOG.error("ping '%s' '%s' timeout", options, target_vm)
+        result.update(utils.flatten_dict_key(ping_result))
 
 
 def _test():    # pragma: no cover
index 7aab469..7633777 100644 (file)
@@ -23,6 +23,8 @@ import logging
 import os
 import subprocess
 import sys
+import collections
+import six
 from functools import reduce
 
 import yaml
@@ -189,3 +191,24 @@ def get_port_ip(sshclient, port):
     if status:
         raise RuntimeError(stderr)
     return stdout.rstrip()
+
+
+def flatten_dict_key(data):
+    next_data = {}
+
+    # use list, because iterable is too generic
+    if not any(isinstance(v, (collections.Mapping, list)) for v in data.values()):
+        return data
+
+    for k, v in six.iteritems(data):
+        if isinstance(v, collections.Mapping):
+            for n_k, n_v in six.iteritems(v):
+                next_data["%s.%s" % (k, n_k)] = n_v
+        # use list because iterable is too generic
+        elif isinstance(v, list):
+            for index, item in enumerate(v):
+                next_data["%s%d" % (k, index)] = item
+        else:
+            next_data[k] = v
+
+    return flatten_dict_key(next_data)
index 373aae1..f157e91 100644 (file)
@@ -12,10 +12,9 @@ from __future__ import absolute_import
 import logging
 import time
 
-import collections
 import requests
-import six
 
+from yardstick.common import utils
 from third_party.influxdb.influxdb_line_protocol import make_lines
 from yardstick.dispatcher.base import Base as DispatchBase
 
@@ -80,7 +79,7 @@ class InfluxdbDispatcher(DispatchBase):
         msg = {}
         point = {
             "measurement": case,
-            "fields": self._dict_key_flatten(data["data"]),
+            "fields": utils.flatten_dict_key(data["data"]),
             "time": self._get_nano_timestamp(data),
             "tags": self._get_extended_tags(criteria),
         }
@@ -89,27 +88,6 @@ class InfluxdbDispatcher(DispatchBase):
 
         return make_lines(msg).encode('utf-8')
 
-    def _dict_key_flatten(self, data):
-        next_data = {}
-
-        # use list, because iterable is too generic
-        if not [v for v in data.values() if
-                isinstance(v, (collections.Mapping, list))]:
-            return data
-
-        for k, v in six.iteritems(data):
-            if isinstance(v, collections.Mapping):
-                for n_k, n_v in six.iteritems(v):
-                    next_data["%s.%s" % (k, n_k)] = n_v
-            # use list because iterable is too generic
-            elif isinstance(v, list):
-                for index, item in enumerate(v):
-                    next_data["%s%d" % (k, index)] = item
-            else:
-                next_data[k] = v
-
-        return self._dict_key_flatten(next_data)
-
     def _get_nano_timestamp(self, results):
         try:
             timestamp = results["timestamp"]