Integrate Report with CLI. 85/36685/6
authorTaseer <taseer94@gmail.com>
Thu, 29 Jun 2017 00:24:25 +0000 (05:24 +0500)
committerTaseer <taseer94@gmail.com>
Thu, 29 Jun 2017 16:04:03 +0000 (21:04 +0500)
- Migrate fixtures to conftest to be used by multiple files

Change-Id: I9a705b93189ac1f2e6fd2fc4e3f05aec7af379f1
Signed-off-by: Taseer Ahmed <taseer94@gmail.com>
qtip/cli/commands/cmd_report.py
qtip/reporter/console.py
tests/conftest.py
tests/unit/ansible_library/plugins/action/calculate_test.py
tests/unit/cli/cmd_report_test.py
tests/unit/reporter/console_test.py

index 4176fd9..1a58aa6 100644 (file)
@@ -7,11 +7,42 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from asq.initiators import query
 import click
+from prettytable import PrettyTable
 
 from qtip.reporter.console import ConsoleReporter
 
 
+def extract_section(sections, section_name, node):
+    """ Extract information related to QPI """
+    qpi = query(sections).where(lambda child: child['name'] == node) \
+                         .select_many(lambda child: child['sections']) \
+                         .where(lambda child: child['name'] == section_name) \
+                         .first()
+    return qpi
+
+
+def display_report(report, section_name, node):
+    table_workload = PrettyTable(['Workload', 'Description',
+                                  'Result', 'Score'])
+    table_workload.align = 'l'
+
+    section_report = extract_section(report['nodes'], section_name, node)
+
+    for metric in section_report['metrics']:
+        for wl in metric['workloads']:
+            table_workload.add_row([wl['name'],
+                                    wl['description'],
+                                    wl['result'],
+                                    wl['score']])
+    return {
+        "ss": section_report['score'],
+        "desc": section_report['description'],
+        "table": table_workload
+    }
+
+
 @click.group()
 def cli():
     """ View QTIP results"""
@@ -19,9 +50,13 @@ def cli():
 
 
 @cli.command('show')
-@click.argument('metric')
-@click.option('-p', '--path', help='Path to result directory')
-def show(metric, path):
-    reporter = ConsoleReporter({})
-    report = reporter.render(metric, path)
-    click.echo(report)
+@click.option('-n', '--node', help="Compute node in OPNFV cluster")
+@click.argument('section-name')
+def show(node, section_name):
+    qpi = ConsoleReporter.load_result()
+    result = display_report(qpi, section_name, node)
+
+    click.echo("Node Score: {}".format(qpi['score']))
+    click.echo("Section Score: {}".format(result['ss']))
+    click.echo("Description: {}".format(result['desc']))
+    click.echo(result['table'])
index da04930..9aaa5f7 100644 (file)
@@ -7,14 +7,12 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-import glob
 import json
+import os
 from os import path
-from jinja2 import Environment
-from jinja2 import FileSystemLoader
 
 from qtip.base import BaseActor
-from qtip.reporter import filters
+
 
 ROOT_DIR = path.join(path.dirname(__file__), path.pardir, path.pardir)
 
@@ -22,25 +20,9 @@ ROOT_DIR = path.join(path.dirname(__file__), path.pardir, path.pardir)
 class ConsoleReporter(BaseActor):
     """ report benchmark result to console """
 
-    def __init__(self, config, parent=None):
-        super(ConsoleReporter, self).__init__(config, parent=parent)
-
-        # TODO (taseer) load template from config
-        tpl_path = path.join(path.dirname(__file__), 'templates')
-        tpl_loader = FileSystemLoader(tpl_path)
-        self._env = Environment(loader=tpl_loader)
-        self._env.filters['justify'] = filters.justify
-
-    def load_result(self, result_path):
-        result_dirs = glob.glob('{}/qtip-*'.format(result_path))
-        # select the last (latest) directory for rendering report, result_dirs[-1]
-        with open(path.join(result_path, result_dirs[-1], 'result.json')) as sample:
-            result = json.load(sample)
+    @staticmethod
+    def load_result():
+        result_path = path.join(os.getcwd(), 'results', 'current', 'qpi.json')
+        with open(result_path) as qpi:
+            result = json.load(qpi)
         return result
-
-    def render(self, metric, result_path):
-        template = self._env.get_template('base.j2')
-        var_dict = self.load_result(result_path)
-        var_dict['metric_name'] = metric
-        out = template.render(var_dict)
-        return out
index 32042f2..ddec94b 100644 (file)
@@ -43,3 +43,151 @@ def collectors_config(plan_config):
 @pytest.fixture(scope='session')
 def logfile_config(collectors_config):
     return collectors_config[0]
+
+
+@pytest.fixture(scope='session')
+def metrics():
+    return {
+        "ssl_rsa": {
+            "rsa_sign": [500],
+            "rsa_verify": [600]
+        }
+    }
+
+
+@pytest.fixture(scope='session')
+def metric_spec():
+    return {
+        "name": "ssl_rsa",
+        "workloads": [
+            {"name": "rsa_sign"},
+            {"name": "rsa_verify"}
+        ]
+    }
+
+
+@pytest.fixture(scope='session')
+def section_spec(metric_spec):
+    return {
+        "name": "ssl",
+        "description": "cryptography and SSL/TLS performance",
+        "metrics": [metric_spec]
+    }
+
+
+@pytest.fixture(scope='session')
+def qpi_spec(section_spec):
+    return {
+        "name": "compute",
+        "description": "QTIP Performance Index of compute",
+        "sections": [section_spec]
+    }
+
+
+@pytest.fixture(scope='session')
+def rsa_sign_baseline():
+    return {'name': 'rsa_sign', 'baseline': '500'}
+
+
+@pytest.fixture(scope='session')
+def rsa_verify_baseline():
+    return {"name": "rsa_verify", "baseline": 600}
+
+
+@pytest.fixture(scope='session')
+def metric_baseline(rsa_sign_baseline, rsa_verify_baseline):
+    return {
+        "name": "ssl_rsa",
+        "workloads": [rsa_sign_baseline, rsa_verify_baseline]
+    }
+
+
+@pytest.fixture(scope='session')
+def section_baseline(metric_baseline):
+    return {
+        "name": "ssl",
+        "metrics": [metric_baseline]
+    }
+
+
+@pytest.fixture(scope='session')
+def qpi_baseline(section_baseline):
+    return {
+        "name": "compute-baseline",
+        "description": "The baseline for compute QPI",
+        "score": 2048,
+        "sections": [section_baseline]
+    }
+
+
+@pytest.fixture(scope='session')
+def metric_result():
+    return {'score': 1.0,
+            'name': 'ssl_rsa',
+            'description': 'metric',
+            'workloads': [{'description': 'workload', 'name': 'rsa_sign',
+                           'score': 1.0, 'result': 500},
+                          {'description': 'workload', 'name': 'rsa_verify',
+                           'score': 1.0, 'result': 600}]}
+
+
+@pytest.fixture(scope='session')
+def section_result(metric_result):
+    return {'score': 1.0,
+            'name': 'ssl',
+            'description': 'cryptography and SSL/TLS performance',
+            'metrics': [metric_result]}
+
+
+@pytest.fixture(scope='session')
+def info():
+    return {
+        "system_info": {
+            "kernel": "4.4.0-72-generic x86_64 (64 bit)",
+            "product": "EC600G3",
+            "os": "Ubuntu 16.04 xenial",
+            "cpu": "2 Deca core Intel Xeon E5-2650 v3s (-HT-MCP-SMP-)",
+            "disk": "1200.3GB (25.1% used)",
+            "memory": "30769.7/128524.1MB"
+        }
+    }
+
+
+@pytest.fixture(scope='session')
+def spec():
+    """ refers to resources/QPI/compute.yaml """
+    return {
+        "sections": [{
+            "name": "ssl",
+            "description": "cryptography and SSL/TLS performance",
+            "metrics": [{
+                "workloads": [
+                    {"name": "rsa_sign",
+                     "description": "rsa x bits sign per second"},
+                    {"name": "rsa_verify",
+                     "description": "rsa x bits verify per second"}]
+            }]
+        }]
+    }
+
+
+@pytest.fixture(scope='session')
+def qpi_result(section_result, info):
+    return {'score': 2048,
+            'name': 'compute',
+            'description': 'QTIP Performance Index of compute',
+            'system_info': info,
+            'sections': [section_result],
+            'spec': "https://git.opnfv.org/qtip/tree/resources/QPI/compute.yaml",
+            'baseline': "https://git.opnfv.org/qtip/tree/resources/QPI/compute-baseline.json",
+            }
+
+
+@pytest.fixture(scope='session')
+def aggregated_report(qpi_result):
+    return {
+        'score': 2017,
+        'name': 'compute',
+        'description': 'POD compute QPI',
+        'nodes': [qpi_result]
+    }
index 2a0b6a7..b053a4b 100644 (file)
@@ -12,126 +12,6 @@ import pytest
 from qtip.ansible_library.plugins.action import calculate
 
 
-@pytest.fixture()
-def metrics():
-    return {
-        "ssl_rsa": {
-            "rsa_sign": [500],
-            "rsa_verify": [600]
-        }
-    }
-
-
-@pytest.fixture()
-def metric_spec():
-    return {
-        "name": "ssl_rsa",
-        "workloads": [
-            {"name": "rsa_sign"},
-            {"name": "rsa_verify"}
-        ]
-    }
-
-
-@pytest.fixture()
-def section_spec(metric_spec):
-    return {
-        "name": "ssl",
-        "description": "cryptography and SSL/TLS performance",
-        "metrics": [metric_spec]
-    }
-
-
-@pytest.fixture()
-def qpi_spec(section_spec):
-    return {
-        "name": "compute",
-        "description": "QTIP Performance Index of compute",
-        "sections": [section_spec]
-    }
-
-
-@pytest.fixture()
-def rsa_sign_baseline():
-    return {'name': 'rsa_sign', 'baseline': '500'}
-
-
-@pytest.fixture()
-def rsa_verify_baseline():
-    return {"name": "rsa_verify", "baseline": 600}
-
-
-@pytest.fixture()
-def metric_baseline(rsa_sign_baseline, rsa_verify_baseline):
-    return {
-        "name": "ssl_rsa",
-        "workloads": [rsa_sign_baseline, rsa_verify_baseline]
-    }
-
-
-@pytest.fixture()
-def section_baseline(metric_baseline):
-    return {
-        "name": "ssl",
-        "metrics": [metric_baseline]
-    }
-
-
-@pytest.fixture()
-def qpi_baseline(section_baseline):
-    return {
-        "name": "compute-baseline",
-        "description": "The baseline for compute QPI",
-        "score": 2048,
-        "sections": [section_baseline]
-    }
-
-
-@pytest.fixture()
-def metric_result():
-    return {'score': 1.0,
-            'name': 'ssl_rsa',
-            'description': 'metric',
-            'workloads': [{'description': 'workload', 'name': 'rsa_sign',
-                           'score': 1.0, 'result': 500},
-                          {'description': 'workload', 'name': 'rsa_verify',
-                           'score': 1.0, 'result': 600}]}
-
-
-@pytest.fixture()
-def section_result(metric_result):
-    return {'score': 1.0,
-            'name': 'ssl',
-            'description': 'cryptography and SSL/TLS performance',
-            'metrics': [metric_result]}
-
-
-@pytest.fixture()
-def info():
-    return {
-        "system_info": {
-            "kernel": "4.4.0-72-generic x86_64 (64 bit)",
-            "product": "EC600G3",
-            "os": "Ubuntu 16.04 xenial",
-            "cpu": "2 Deca core Intel Xeon E5-2650 v3s (-HT-MCP-SMP-)",
-            "disk": "1200.3GB (25.1% used)",
-            "memory": "30769.7/128524.1MB"
-        }
-    }
-
-
-@pytest.fixture()
-def qpi_result(section_result, info):
-    return {'score': 2048,
-            'name': 'compute',
-            'description': 'QTIP Performance Index of compute',
-            'system_info': info,
-            'sections': [section_result],
-            'spec': "https://git.opnfv.org/qtip/tree/resources/QPI/compute.yaml",
-            'baseline': "https://git.opnfv.org/qtip/tree/resources/QPI/compute-baseline.json",
-            }
-
-
 def test_calc_metric(metric_spec, metrics, metric_baseline, metric_result):
     assert calculate.calc_metric(metric_spec,
                                  metrics['ssl_rsa'],
index 064ed8f..e4e68fe 100644 (file)
@@ -7,35 +7,13 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-import pytest
-from os import path
 
-from click.testing import CliRunner
-from qtip.cli.entry import cli
+from qtip.cli.commands import cmd_report as report
 
 
-@pytest.fixture(scope="module")
-def runner():
-    return CliRunner()
+def test_dhrystone(aggregated_report):
+    """Test report"""
 
-
-@pytest.fixture(scope="module")
-def result_path():
-    result = path.join(path.dirname(__file__), path.pardir, path.pardir,
-                       'data/reporter')
-    return result
-
-
-@pytest.mark.parametrize(['report_name'], [
-    ('dhrystone',),
-    ('whetstone',),
-    ('dpi',),
-    ('ramspeed',),
-    ('ssl',)
-])
-def test_dhrystone(report_name, runner, result_path):
-    """Test dhrystone report"""
-
-    result = runner.invoke(cli, ['report', 'show', report_name, '-p', result_path])
-    for line in str(result).split('\n'):
-        assert len(line) <= 80
+    result = report.display_report(aggregated_report, 'ssl', 'compute')
+    assert result['ss'] == 1.0
+    assert result['desc'] == 'cryptography and SSL/TLS performance'
index 11fb3f7..64e32c4 100644 (file)
@@ -8,7 +8,6 @@
 ##############################################################################
 
 import pytest
-from os import path
 
 from qtip.reporter.console import ConsoleReporter
 
@@ -18,27 +17,5 @@ def console_reporter():
     return ConsoleReporter({})
 
 
-@pytest.fixture
-def result_path():
-    result = path.join(path.dirname(__file__), path.pardir, path.pardir,
-                       'data/reporter')
-    return result
-
-
 def test_constructor(console_reporter):
     assert isinstance(console_reporter, ConsoleReporter)
-
-
-@pytest.mark.parametrize(['template_name'], [
-    ('dhrystone',),
-    ('whetstone',),
-    ('dpi',),
-    ('ramspeed',),
-    ('ssl',)
-])
-def test_templates(template_name, console_reporter, result_path):
-    """ Test dhrystone report"""
-
-    result = console_reporter.render(template_name, result_path)
-    for line in result.split('\n'):
-        assert len(line) <= 80