Standardize Output 09/40409/4
authormbeierl <mark.beierl@dell.com>
Mon, 28 Aug 2017 13:43:31 +0000 (09:43 -0400)
committermbeierl <mark.beierl@dell.com>
Mon, 28 Aug 2017 23:09:24 +0000 (19:09 -0400)
Re-arrange the JSON output so that it matches the format that
gets sent and retrieved from the testresults db.

Change-Id: I9f15e553e94b27b9e5a36f4984789c5e710feac4
JIRA: STORPERF-200
Signed-off-by: mbeierl <mark.beierl@dell.com>
docker/local-docker-compose.yaml
docker/storperf-master/rest_server.py
docker/storperf-master/storperf/db/test_results_db.py
docker/storperf-master/storperf/storperf_master.py
docker/storperf-master/storperf/test_executor.py
docker/storperf-master/storperf/utilities/data_handler.py
docker/storperf-master/tests/utilities_tests/data_handler_test.py

index fe83d22..13382d6 100644 (file)
@@ -16,8 +16,6 @@ services:
             context: storperf-master
             args:
                 ARCH: ${ARCH}
-        ports:
-            - "8001:8000"
         env_file: ${ENV_FILE}
         volumes:
             - ${CARBON_DIR}:/opt/graphite/storage/whisper
@@ -45,7 +43,7 @@ services:
         container_name: "storperf-graphite"
         build: storperf-graphite
         ports:
-            - "8080:8080"
+            - "8000:8080"
 
     storperf-httpfrontend:
         container_name: "storperf-httpfrontend"
index bd4b66b..6da2004 100644 (file)
@@ -224,13 +224,13 @@ class Job(Resource):
     )
     def get(self):
 
-        metrics_type = "metrics"
-        if request.args.get('type'):
-            metrics_type = request.args.get('type')
-
         workload_id = request.args.get('id')
 
         if workload_id:
+            metrics_type = "metrics"
+            if request.args.get('type'):
+                metrics_type = request.args.get('type')
+
             if metrics_type == "metrics":
                 return jsonify(storperf.fetch_results(workload_id))
 
@@ -240,7 +240,10 @@ class Job(Resource):
             if metrics_type == "status":
                 return jsonify(storperf.fetch_job_status(workload_id))
         else:
-            return jsonify({"job_ids": storperf.fetch_all_jobs()})
+            metrics_type = None
+            if request.args.get('type'):
+                metrics_type = request.args.get('type')
+            return jsonify(storperf.fetch_all_jobs(metrics_type))
 
     @swagger.operation(
         parameters=[
index a2f7038..d6aabee 100644 (file)
@@ -8,38 +8,19 @@
 ##############################################################################
 
 import json
-import os
 import requests
 
 
-def get_installer_type(logger=None):
-    """
-    Get installer type (fuel, apex, joid, compass)
-    """
-    try:
-        installer = os.environ['INSTALLER_TYPE']
-    except KeyError:
-        if logger:
-            logger.error("Impossible to retrieve the installer type")
-        installer = "Unknown_installer"
-
-    return installer
-
-
-def push_results_to_db(db_url, project, case_name,
-                       test_start, test_stop, logger, pod_name,
-                       version, scenario, criteria, build_tag, details):
+def push_results_to_db(db_url, details, logger):
     """
     POST results to the Result target DB
     """
     url = db_url + "/results"
-    installer = get_installer_type(logger)
 
-    params = {"project_name": project, "case_name": case_name,
-              "pod_name": pod_name, "installer": installer,
-              "version": version, "scenario": scenario, "criteria": criteria,
-              "build_tag": build_tag, "start_date": test_start,
-              "stop_date": test_stop, "details": details}
+    params = details.copy()
+    params.pop('details')
+
+    logger.info("popped params= %s" % params)
 
     headers = {'Content-Type': 'application/json'}
     try:
@@ -53,9 +34,8 @@ def push_results_to_db(db_url, project, case_name,
             logger.debug(r.status_code)
             logger.debug(r.content)
         return json.loads(r.content)
-    except Exception, e:
-        logger.error("Error [push_results_to_db('%s', '%s', '%s', " +
-                     "'%s', '%s', '%s', '%s', '%s', '%s')]:" %
-                     (db_url, project, case_name, pod_name, version,
-                      scenario, criteria, build_tag, details), e)
+    except Exception:
+        if logger:
+            logger.exception("Error [push_results_to_db('%s', '%s', '%s')]:" %
+                             (db_url, params, details['details']))
         return None
index 3b0af78..4e99e57 100644 (file)
@@ -363,7 +363,7 @@ class StorPerfMaster(object):
 
     def fetch_results(self, job_id):
         if self._test_executor.job_db.job_id == job_id:
-            return self._test_executor.metadata['metrics']
+            return self._test_executor.metadata['details']['metrics']
 
         workload_params = self.job_db.fetch_workload_params(job_id)
         if 'report' in workload_params:
@@ -377,8 +377,25 @@ class StorPerfMaster(object):
     def fetch_job_status(self, job_id):
         return self._test_executor.execution_status(job_id)
 
-    def fetch_all_jobs(self):
-        return self.job_db.fetch_jobs()
+    def fetch_all_jobs(self, metrics_type):
+        job_list = self.job_db.fetch_jobs()
+        job_report = {}
+        if metrics_type is None:
+            job_report['job_ids'] = job_list
+        elif metrics_type == "metadata":
+            job_report['results'] = []
+            for job in job_list:
+                if metrics_type == 'metadata':
+                    metadata = self.fetch_metadata(job)
+                    if 'report' in metadata:
+                        metadata['report']['_id'] = job
+                        metadata['report']['start_date'] = \
+                            metadata['report']['start_time']
+                        metadata['report']['end_date'] = \
+                            metadata['report']['end_time']
+                        metadata['report']['_id'] = job
+                        job_report['results'].append(metadata['report'])
+        return job_report
 
     def _setup_slave(self, slave):
         logger = logging.getLogger(__name__ + ":" + slave)
index dc178d8..3d1d9f2 100644 (file)
@@ -39,7 +39,6 @@ class TestExecutor(object):
         self.filename = None
         self.deadline = None
         self.steady_state_samples = 10
-        self.metadata = {}
         self.start_time = None
         self.end_time = None
         self.current_workload = None
@@ -57,6 +56,27 @@ class TestExecutor(object):
         self._workload_executors = []
         self._workload_thread = None
         self._thread_gate = None
+        self._setup_metadata({})
+
+    def _setup_metadata(self, metadata={}):
+        try:
+            installer = os.environ['INSTALLER_TYPE']
+        except KeyError:
+            self.logger.error("Cannot determine installer")
+            installer = "Unknown_installer"
+
+        self.metadata = {}
+        self.metadata['project_name'] = 'storperf'
+        self.metadata['installer'] = installer
+        self.metadata['pod_name'] = 'Unknown'
+        self.metadata['version'] = 'Unknown'
+        self.metadata['scenario'] = 'Unknown'
+        self.metadata['build_tag'] = 'Unknown'
+        self.metadata['test_case'] = 'Unknown'
+        self.metadata['details'] = {}
+        self.metadata['details']['metrics'] = {}
+        self.metadata.update(metadata)
+        self.metadata['case_name'] = self.metadata['test_case']
 
     @property
     def slaves(self):
@@ -171,8 +191,7 @@ class TestExecutor(object):
     def execute(self, metadata):
         self.job_db.create_job_id()
         self.job_db.record_workload_params(metadata)
-        self.metadata = metadata
-        self.metadata['metrics'] = {}
+        self._setup_metadata(metadata)
         self._workload_thread = Thread(target=self.execute_workloads,
                                        args=(),
                                        name="Workload thread")
@@ -313,10 +332,10 @@ class TestExecutor(object):
 
         self.end_time = time.time()
         self._terminated = True
-        report = {'report': json.dumps(self.metadata)}
-        self.job_db.record_workload_params(report)
         self.broadcast_event()
         self.unregister(data_handler.data_event)
+        report = {'report': json.dumps(self.metadata)}
+        self.job_db.record_workload_params(report)
         self.job_db.job_id = None
 
     def execute_on_node(self, workload):
index 44b1f6b..471c295 100644 (file)
@@ -15,7 +15,6 @@ from storperf.db import test_results_db
 from storperf.db.graphite_db import GraphiteDB
 from storperf.db.job_db import JobDB
 from storperf.utilities import data_treatment as DataTreatment
-from storperf.utilities import dictionary
 from storperf.utilities import math as math
 from storperf.utilities import steady_state as SteadyState
 
@@ -36,8 +35,6 @@ class DataHandler(object):
             self._push_to_db(executor)
         else:
             workload = '.'.join(executor.current_workload.split('.')[1:6])
-            if 'metrics' not in executor.metadata:
-                executor.metadata['metrics'] = {}
 
             steady_state = True
             metrics = {}
@@ -67,19 +64,21 @@ class DataHandler(object):
                     metrics[metric][io_type]['average'] = average
 
                     metrics_key = '%s.%s.%s' % (workload, io_type, metric)
-                    executor.metadata['metrics'][metrics_key] = average
+                    executor.metadata['details']['metrics'][metrics_key] = \
+                        average
 
                     if not steady:
                         steady_state = False
 
-            if 'report_data' not in executor.metadata:
-                executor.metadata['report_data'] = {}
+            if 'report_data' not in executor.metadata['details']:
+                executor.metadata['details']['report_data'] = {}
 
-            if 'steady_state' not in executor.metadata:
-                executor.metadata['steady_state'] = {}
+            if 'steady_state' not in executor.metadata['details']:
+                executor.metadata['details']['steady_state'] = {}
 
-            executor.metadata['report_data'][workload] = metrics
-            executor.metadata['steady_state'][workload] = steady_state
+            executor.metadata['details']['report_data'][workload] = metrics
+            executor.metadata['details']['steady_state'][workload] = \
+                steady_state
 
             workload_name = executor.current_workload.split('.')[1]
 
@@ -128,60 +127,36 @@ class DataHandler(object):
         return SteadyState.steady_state(data_series)
 
     def _push_to_db(self, executor):
-        pod_name = dictionary.get_key_from_dict(executor.metadata,
-                                                'pod_name',
-                                                'Unknown')
-        version = dictionary.get_key_from_dict(executor.metadata,
-                                               'version',
-                                               'Unknown')
-        scenario = dictionary.get_key_from_dict(executor.metadata,
-                                                'scenario_name',
-                                                'Unknown')
-        build_tag = dictionary.get_key_from_dict(executor.metadata,
-                                                 'build_tag',
-                                                 'Unknown')
-        test_case = dictionary.get_key_from_dict(executor.metadata,
-                                                 'test_case',
-                                                 'Unknown')
-        duration = executor.end_time - executor.start_time
-
-        payload = executor.metadata
+        executor.metadata['duration'] = executor.end_time - executor.start_time
 
         steady_state = True
-        for _, value in executor.metadata['steady_state'].items():
+        for _, value in executor.metadata['details']['steady_state'].items():
             steady_state = steady_state and value
 
-        payload['timestart'] = executor.start_time
-        payload['duration'] = duration
+        executor.metadata['timestart'] = executor.start_time
 
         if steady_state:
             criteria = 'PASS'
         else:
             criteria = 'FAIL'
+        executor.metadata['criteria'] = criteria
 
-        start_time = time.strftime('%Y-%m-%d %H:%M:%S',
-                                   time.gmtime(executor.start_time))
+        executor.metadata['start_time'] = \
+            time.strftime('%Y-%m-%d %H:%M:%S',
+                          time.gmtime(executor.start_time))
 
-        end_time = time.strftime('%Y-%m-%d %H:%M:%S',
-                                 time.gmtime(executor.end_time))
+        executor.metadata['end_time'] = \
+            time.strftime('%Y-%m-%d %H:%M:%S',
+                          time.gmtime(executor.end_time))
 
         test_db = os.environ.get('TEST_DB_URL')
         if test_db is not None:
             self.logger.info("Pushing results to %s" % (test_db))
             try:
-                response = test_results_db.push_results_to_db(test_db,
-                                                              "storperf",
-                                                              test_case,
-                                                              start_time,
-                                                              end_time,
-                                                              self.logger,
-                                                              pod_name,
-                                                              version,
-                                                              scenario,
-                                                              criteria,
-                                                              build_tag,
-                                                              payload)
+                response = test_results_db.push_results_to_db(
+                    test_db,
+                    executor.metadata,
+                    self.logger)
                 executor.result_url = response['href']
-            except Exception as e:
-                self.logger.exception("Error pushing results into Database",
-                                      e)
+            except Exception:
+                self.logger.exception("Error pushing results into Database")
index 6010bd3..f028bd0 100644 (file)
@@ -35,6 +35,8 @@ class DataHandlerTest(unittest.TestCase):
         self.steady_state_samples = 10
         self.end_time = 1
         self.metadata = {}
+        self.metadata['details'] = {}
+        self.metadata['details']['metrics'] = {}
         self.block_sizes = "1"
         self.queue_depths = "1"
         mock.job_id = "1"
@@ -134,7 +136,7 @@ class DataHandlerTest(unittest.TestCase):
         self._terminated = True
         mock_results_db.side_effect = self.push_results_to_db
         mock_graphite_db.side_effect = MockGraphiteDB
-        self.metadata = {
+        self.metadata['details'] = {
             "steady_state": {
                 "rr.queue-depth.8.block-size.16384": True,
                 "rr.queue-depth.8.block-size.2048": False,
@@ -182,17 +184,20 @@ class DataHandlerTest(unittest.TestCase):
         self.assertEqual(False, self.pushed)
         self.assertEqual(False, self._terminated)
 
-        self.assertEqual(expected_slope, self.metadata['report_data']
+        self.assertEqual(expected_slope, self.metadata['details']
+                         ['report_data']
                          ['rw.queue-depth.8.block-size.8192']
                          ['lat_ns.mean']
                          ['read']
                          ['slope'])
-        self.assertEqual(expected_range, self.metadata['report_data']
+        self.assertEqual(expected_range, self.metadata['details']
+                         ['report_data']
                          ['rw.queue-depth.8.block-size.8192']
                          ['lat_ns.mean']
                          ['read']
                          ['range'])
-        self.assertEqual(expected_average, self.metadata['report_data']
+        self.assertEqual(expected_average, self.metadata['details']
+                         ['report_data']
                          ['rw.queue-depth.8.block-size.8192']
                          ['lat_ns.mean']
                          ['read']
@@ -246,22 +251,26 @@ class DataHandlerTest(unittest.TestCase):
 
         self.data_handler.data_event(self)
 
-        self.assertEqual(expected_slope, self.metadata['report_data']
+        self.assertEqual(expected_slope, self.metadata['details']
+                         ['report_data']
                          ['rw.queue-depth.8.block-size.8192']
                          ['lat_ns.mean']
                          ['read']
                          ['slope'])
-        self.assertEqual(expected_range, self.metadata['report_data']
+        self.assertEqual(expected_range, self.metadata['details']
+                         ['report_data']
                          ['rw.queue-depth.8.block-size.8192']
                          ['lat_ns.mean']
                          ['read']
                          ['range'])
-        self.assertEqual(expected_average, self.metadata['report_data']
+        self.assertEqual(expected_average, self.metadata['details']
+                         ['report_data']
                          ['rw.queue-depth.8.block-size.8192']
                          ['lat_ns.mean']
                          ['read']
                          ['average'])
-        self.assertEqual(report_data, self.metadata['report_data']
+        self.assertEqual(report_data, self.metadata['details']
+                         ['report_data']
                          ['rw.queue-depth.8.block-size.8192']
                          ['lat_ns.mean']
                          ['read']
@@ -277,7 +286,7 @@ class DataHandlerTest(unittest.TestCase):
         mock_results_db.side_effect = self.push_results_to_db
         self.start_time = 1504559100
         self.end_time = 1504560000
-        self.metadata = {
+        self.metadata['details'] = {
             "scenario_name": "ceph_ws,wr,rs,rr,rw",
             "status": "OK",
             "steady_state": {
@@ -289,9 +298,11 @@ class DataHandlerTest(unittest.TestCase):
             "volume_size": 10
         }
         self.data_handler._push_to_db(self)
-        self.assertEqual('FAIL', self.db_results[9],
+        self.assertEqual('FAIL', self.db_results[1]['criteria'],
                          'Expected FAIL in criteria')
-        self.assertEqual('2017-09-04 21:05:00', self.db_results[3],
+        self.assertEqual('2017-09-04 21:05:00',
+                         self.db_results[1]['start_time'],
                          'Start time')
-        self.assertEqual('2017-09-04 21:20:00', self.db_results[4],
+        self.assertEqual('2017-09-04 21:20:00',
+                         self.db_results[1]['end_time'],
                          'End time')