Modified CSV verification, added Gnocchi and AODH testcases 01/41901/3
authorSharada Shiddibhavi <sharada.shiddibhavi@intel.com>
Wed, 13 Sep 2017 14:36:55 +0000 (14:36 +0000)
committerSharada Shiddibhavi <sharada.shiddibhavi@intel.com>
Wed, 13 Sep 2017 17:17:15 +0000 (17:17 +0000)
This patch contains changes in CSV directory path and added
testcases for Gnocchi and AODH plugins

Change-Id: Icff77546be0b747279522c18e7b3aaa32df2f7c4
Signed-off-by: Sharada Shiddibhavi <sharada.shiddibhavi@intel.com>
baro_tests/collectd.py
baro_tests/config_server.py

index a002314..cd492d8 100644 (file)
@@ -224,13 +224,15 @@ class CSVClient(object):
             if compute_name == node.get_dict()['name']:
                 date = node.run_cmd(
                     "date '+%Y-%m-%d'")
+                hostname = node.run_cmd('hostname -A')
+                hostname = hostname.split()[0]
                 metrics = []
                 for plugin_subdir in plugin_subdirectories:
                     for meter_category in meter_categories:
                         stdout1 = node.run_cmd(
                             "tail -2 /var/lib/collectd/csv/"
-                            + "{0}.jf.intel.com/{1}/{2}-{3}".format(
-                                compute_node.get_name(), plugin_subdir,
+                            + "{0}/{1}/{2}-{3}".format(
+                                hostname, plugin_subdir,
                                 meter_category, date))
                         stdout2 = node.run_cmd(
                             "tail -1 /var/lib/collectd/csv/"
@@ -272,7 +274,7 @@ def get_csv_categories_for_ipmi(conf, compute_node):
     return [category.strip()[:-11] for category in categories]
 
 
-def _process_result(compute_node, test, result, results_list):
+def _process_result(compute_node, out_plugin, test, result, results_list):
     """Print test result and append it to results list.
 
     Keyword arguments:
@@ -282,13 +284,13 @@ def _process_result(compute_node, test, result, results_list):
     """
     if result:
         logger.info(
-            'Compute node {0} test case {1} PASSED.'.format(
-                compute_node, test))
+            'Test case {0} PASSED with {1}.'.format(
+                test, out_plugin))
     else:
         logger.error(
-            'Compute node {0} test case {1} FAILED.'.format(
-                compute_node, test))
-    results_list.append((compute_node, test, result))
+            'Test case {0} FAILED with {1}.'.format(
+                test, out_plugin))
+    results_list.append((compute_node, out_plugin, test, result))
 
 
 def _print_label(label):
@@ -333,22 +335,34 @@ def _print_final_result_of_plugin(
     """
     print_line = ''
     for id in compute_ids:
-        if out_plugins[id] == out_plugin:
-            if (id, plugin, True) in results:
+        if out_plugin == 'Gnocchi':
+            if (id, out_plugin, plugin, True) in results:
+                print_line += ' PASS   |'
+            elif (id, out_plugin, plugin, False) in results:
+                print_line += ' FAIL   |'
+            else:
+                print_line += ' NOT EX |'
+        elif out_plugin == 'AODH':
+            if (id, out_plugin, plugin, True) in results:
+                print_line += ' PASS   |'
+            elif (id, out_plugin, plugin, False) in results:
+                print_line += ' FAIL   |'
+            else:
+                print_line += ' NOT EX |'
+        elif out_plugin == 'CSV':
+            if (id, out_plugin, plugin, True) in results:
                 print_line += ' PASS   |'
-            elif (id, plugin, False) in results \
-                    and out_plugins[id] == out_plugin:
+            elif (id, out_plugin, plugin, False) in results:
                 print_line += ' FAIL   |'
             else:
                 print_line += ' NOT EX |'
-        elif out_plugin == 'Gnocchi':
-            print_line += ' NOT EX |'
         else:
-            print_line += ' NOT EX |'
+            print_line += ' SKIP   |'
     return print_line
 
 
-def print_overall_summary(compute_ids, tested_plugins, results, out_plugins):
+def print_overall_summary(
+        compute_ids, tested_plugins, aodh_plugins, results, out_plugins):
     """Print overall summary table.
 
     Keyword arguments:
@@ -359,7 +373,6 @@ def print_overall_summary(compute_ids, tested_plugins, results, out_plugins):
     """
     compute_node_names = ['Node-{}'.format(i) for i in range(
         len((compute_ids)))]
-    # compute_node_names = ['Node-{}'.format(id) for id in compute_ids]
     all_computes_in_line = ''
     for compute in compute_node_names:
         all_computes_in_line += '| ' + compute + (' ' * (7 - len(compute)))
@@ -377,46 +390,61 @@ def print_overall_summary(compute_ids, tested_plugins, results, out_plugins):
     logger.info(line_of_nodes)
     logger.info(
         '+' + ('-' * 16) + '+' + (('-' * 8) + '+') * len(compute_node_names))
-    out_plugins_print = ['Gnocchi']
-    if 'SNMP' in out_plugins.values():
-        out_plugins_print.append('SNMP')
-    if 'AODH' in out_plugins.values():
-        out_plugins_print.append('AODH')
-    if 'CSV' in out_plugins.values():
-        out_plugins_print.append('CSV')
+    out_plugins_print = []
+    out_plugins_print1 = []
+    for key in out_plugins.keys():
+        if 'Gnocchi' in out_plugins[key]:
+            out_plugins_print1.append('Gnocchi')
+        if 'AODH' in out_plugins[key]:
+            out_plugins_print1.append('AODH')
+        if 'SNMP' in out_plugins[key]:
+            out_plugins_print1.append('SNMP')
+        if 'CSV' in out_plugins[key]:
+            out_plugins_print1.append('CSV')
+    for i in out_plugins_print1:
+        if i not in out_plugins_print:
+            out_plugins_print.append(i)
     for out_plugin in out_plugins_print:
         output_plugins_line = ''
         for id in compute_ids:
-            out_plugin_result = 'FAIL'
+            out_plugin_result = '----'
             if out_plugin == 'Gnocchi':
                 out_plugin_result = \
-                    'PASS' if out_plugins[id] == out_plugin else 'FAIL'
+                    'PASS' if 'Gnocchi' in out_plugins_print else 'FAIL'
             if out_plugin == 'AODH':
-                if out_plugins[id] == out_plugin:
-                    out_plugin_result = \
-                        'PASS' if out_plugins[id] == out_plugin else 'FAIL'
+                out_plugin_result = \
+                    'PASS' if out_plugin in out_plugins_print else 'FAIL'
             if out_plugin == 'SNMP':
-                if out_plugins[id] == out_plugin:
-                    out_plugin_result = \
-                        'PASS' if out_plugins[id] == out_plugin else 'FAIL'
+                out_plugin_result = \
+                    'PASS' if [
+                        plugin for comp_id, out_pl, plugin, res in results
+                        if comp_id == id and res] else 'FAIL'
             if out_plugin == 'CSV':
-                if out_plugins[id] == out_plugin:
-                    out_plugin_result = \
-                        'PASS' if [
-                            plugin for comp_id, plugin, res in results
-                            if comp_id == id and res] else 'FAIL'
-                else:
-                    out_plugin_result = 'SKIP'
+                out_plugin_result = \
+                    'PASS' if [
+                        plugin for comp_id, out_pl, plugin, res in results
+                        if comp_id == id and res] else 'FAIL'
+            else:
+                out_plugin_result = 'FAIL'
             output_plugins_line += '| ' + out_plugin_result + '   '
         logger.info(
             '| OUT:{}'.format(out_plugin) + (' ' * (11 - len(out_plugin)))
             + output_plugins_line + '|')
-        for plugin in sorted(tested_plugins.values()):
-            line_plugin = _print_final_result_of_plugin(
-                plugin, compute_ids, results, out_plugins, out_plugin)
-            logger.info(
-                '|  IN:{}'.format(plugin) + (' ' * (11-len(plugin)))
-                + '|' + line_plugin)
+
+        if out_plugin == 'AODH':
+            for plugin in sorted(aodh_plugins.values()):
+                line_plugin = _print_final_result_of_plugin(
+                    plugin, compute_ids, results, out_plugins, out_plugin)
+                logger.info(
+                    '|  IN:{}'.format(plugin) + (' ' * (11-len(plugin)))
+                    + '|' + line_plugin)
+        else:
+            for plugin in sorted(tested_plugins.values()):
+                line_plugin = _print_final_result_of_plugin(
+                    plugin, compute_ids, results, out_plugins, out_plugin)
+                logger.info(
+                    '|  IN:{}'.format(plugin) + (' ' * (11-len(plugin)))
+                    + '|' + line_plugin)
         logger.info(
             '+' + ('-' * 16) + '+'
             + (('-' * 8) + '+') * len(compute_node_names))
@@ -424,8 +452,8 @@ def print_overall_summary(compute_ids, tested_plugins, results, out_plugins):
 
 
 def _exec_testcase(
-        test_labels, name, gnocchi_running, aodh_running, snmp_running,
-        controllers, compute_node, conf, results, error_plugins, out_plugins):
+        test_labels, name, out_plugin, controllers, compute_node,
+        conf, results, error_plugins, out_plugins):
     """Execute the testcase.
 
     Keyword arguments:
@@ -457,7 +485,7 @@ def _exec_testcase(
             conf.is_libpqos_on_node(compute_node),
             'libpqos must be installed.')],
         'mcelog': [(
-            conf.is_installed(compute_node, 'mcelog'),
+            conf.is_mcelog_installed(compute_node, 'mcelog'),
             'mcelog must be installed.')],
         'ovs_events': [(
             len(ovs_existing_configured_int) > 0 or len(ovs_interfaces) > 0,
@@ -466,13 +494,13 @@ def _exec_testcase(
             len(ovs_existing_configured_bridges) > 0,
             'Bridges must be configured.')]}
     gnocchi_criteria_lists = {
-        'hugepages': ['hugepages'],
-        'mcelog': ['mcelog'],
-        'ovs_events': ['interface-ovs-system'],
-        'ovs_stats': ['ovs_stats-br0.br0']}
+        'hugepages': 'hugepages',
+        'mcelog': 'mcelog',
+        'ovs_events': 'interface-ovs-system',
+        'ovs_stats': 'ovs_stats-br0.br0'}
     aodh_criteria_lists = {
-        'mcelog': ['mcelog.errors'],
-        'ovs_events': ['ovs_events.gauge']}
+        'mcelog': 'mcelog',
+        'ovs_events': 'ovs_events'}
     snmp_mib_files = {
         'intel_rdt': '/usr/share/snmp/mibs/Intel-Rdt.txt',
         'hugepages': '/usr/share/snmp/mibs/Intel-Hugepages.txt',
@@ -501,8 +529,7 @@ def _exec_testcase(
                 compute_node, 'intel_rdt', 'Cores')],
         'hugepages': [
             'hugepages-mm-2048Kb', 'hugepages-node0-2048Kb',
-            'hugepages-node1-2048Kb', 'hugepages-mm-1048576Kb',
-            'hugepages-node0-1048576Kb', 'hugepages-node1-1048576Kb'],
+            'hugepages-node1-2048Kb'],
         # 'ipmi': ['ipmi'],
         'mcelog': [
             'mcelog-SOCKET_0_CHANNEL_0_DIMM_any',
@@ -521,13 +548,9 @@ def _exec_testcase(
         # 'ipmi': csv_meter_categories_ipmi,
         'mcelog': [
             'errors-corrected_memory_errors',
-            'errors-uncorrected_memory_errors',
-            'errors-corrected_memory_errors_in_24h',
-            'errors-uncorrected_memory_errors_in_24h'],
+            'errors-uncorrected_memory_errors'],
         'ovs_stats': [
-            'if_collisions', 'if_dropped', 'if_errors', 'if_packets',
-            'if_rx_errors-crc', 'if_rx_errors-frame', 'if_rx_errors-over',
-            'if_rx_octets', 'if_tx_octets'],
+            'if_dropped', 'if_errors', 'if_packets'],
         'ovs_events': ['gauge-link_status']}
 
     _print_plugin_label(
@@ -541,7 +564,8 @@ def _exec_testcase(
         for error in plugin_critical_errors:
             logger.error(' * ' + error)
         _process_result(
-            compute_node.get_id(), test_labels[name], False, results)
+            compute_node.get_id(), out_plugin, test_labels[name], False,
+            results)
     else:
         plugin_errors = [
             error for plugin, error, critical in error_plugins
@@ -563,35 +587,37 @@ def _exec_testcase(
             for prerequisite in failed_prerequisites:
                 logger.error(' * {}'.format(prerequisite))
         else:
-            if gnocchi_running:
-                plugin_interval = conf.get_plugin_interval(compute_node, name)
+            plugin_interval = conf.get_plugin_interval(compute_node, name)
+            if out_plugin == 'Gnocchi':
                 res = conf.test_plugins_with_gnocchi(
-                    compute_node.get_id(), plugin_interval, logger,
-                    criteria_list=gnocchi_criteria_lists[name])
-            elif aodh_running:
+                    compute_node.get_name(), plugin_interval,
+                    logger, criteria_list=gnocchi_criteria_lists[name])
+            if out_plugin == 'AODH':
                 res = conf.test_plugins_with_aodh(
-                   compute_node.get_id(), plugin_interval,
-                   logger, creteria_list=aodh_criteria_lists[name])
-            elif snmp_running:
+                    compute_node.get_name(), plugin_interval,
+                    logger, criteria_list=aodh_criteria_lists[name])
+            if out_plugin == 'SNMP':
                 res = \
                     name in snmp_mib_files and name in snmp_mib_strings \
                     and tests.test_snmp_sends_data(
                         compute_node,
-                        conf.get_plugin_interval(compute_node, name), logger,
+                        plugin_interval, logger,
                         SNMPClient(conf, compute_node), snmp_mib_files[name],
                         snmp_mib_strings[name], snmp_in_commands[name], conf)
-            else:
+            if out_plugin == 'CSV':
                 res = tests.test_csv_handles_plugin_data(
                     compute_node, conf.get_plugin_interval(compute_node, name),
                     name, csv_subdirs[name], csv_meter_categories[name],
                     logger, CSVClient(conf))
+
             if res and plugin_errors:
                 logger.info(
                     'Test works, but will be reported as failure,'
                     + 'because of non-critical errors.')
                 res = False
             _process_result(
-                compute_node.get_id(), test_labels[name], res, results)
+                compute_node.get_id(), out_plugin, test_labels[name],
+                res, results)
 
 
 def get_results_for_ovs_events(
@@ -618,7 +644,7 @@ def create_ovs_bridge():
         if node.is_compute():
             node.run_cmd('sudo ovs-vsctl add-br br0')
             node.run_cmd('sudo ovs-vsctl set-manager ptcp:6640')
-        logger.info('OVS Bridges created on compute nodes')
+    logger.info('OVS Bridges created on compute nodes')
 
 
 def mcelog_install():
@@ -635,18 +661,18 @@ def mcelog_install():
             if '3.10.0-514.26.2.el7.x86_64' not in centos_release:
                 logger.info(
                     'Mcelog will not be enabled '
-                    + 'on node-{0}, '.format(node.get_dict()['id'])
+                    + 'on node-{0}, '.format(node.get_dict()['name'])
                     + 'unsupported CentOS release found ({1}).'.format(
                         centos_release))
             else:
                 logger.info(
                     'Checking if  mcelog is enabled'
-                    + ' on node-{}...'.format(node.get_dict()['id']))
+                    + ' on node-{}...'.format(node.get_dict()['name']))
                 res = node.run_cmd('ls')
             if 'mce-inject_ea' and 'corrected' in res:
                 logger.info(
                     'Mcelog seems to be already installed '
-                    + 'on node-{}.'.format(node.get_dict()['id']))
+                    + 'on node-{}.'.format(node.get_dict()['name']))
                 node.run_cmd('sudo modprobe mce-inject')
                 node.run_cmd('sudo ./mce-inject_ea < corrected')
             else:
@@ -734,43 +760,23 @@ def main(bt_logger=None):
 
     _print_label(
         'Display of Control and Compute nodes available in the set up')
-    logger.info('controllers: {}'.format([('{0}: {1} ({2})'.format(
-        node.get_id(), node.get_name(),
-        node.get_ip())) for node in controllers]))
-    logger.info('computes: {}'.format([('{0}: {1} ({2})'.format(
-        node.get_id(), node.get_name(), node.get_ip()))
-        for node in computes]))
+    logger.info('controllers: {}'.format([('{0}: {1}'.format(
+        node.get_name(), node.get_ip())) for node in controllers]))
+    logger.info('computes: {}'.format([('{0}: {1}'.format(
+        node.get_name(), node.get_ip())) for node in computes]))
 
     mcelog_install()
     create_ovs_bridge()
     gnocchi_running_on_con = False
     aodh_running_on_con = False
     snmp_running = False
-    _print_label('Testing Gnocchi, AODH and SNMP on controller nodes')
+    _print_label('Testing Gnocchi, AODH and SNMP on nodes')
 
     for controller in controllers:
-        gnocchi_client = GnocchiClient()
-        gnocchi_client.auth_token()
         gnocchi_running = (
             gnocchi_running_on_con and conf.is_gnocchi_running(controller))
-        aodh_client = AodhClient()
-        aodh_client.auth_token()
         aodh_running = (
-            aodh_running_on_con and conf.is_aodh_running(controller))
-    if gnocchi_running:
-        logger.info("Gnocchi is running on controller.")
-    elif aodh_running:
-        logger.error("Gnocchi is not running on controller.")
-        logger.info("AODH is running on controller.")
-    elif snmp_running:
-        logger.error("Gnocchi is not running on Controller")
-        logger.error("AODH is not running on controller.")
-        logger.info("SNMP is running on controller.")
-    else:
-        logger.error("Gnocchi is not running on Controller")
-        logger.error("AODH is not running on controller.")
-        logger.error("SNMP is not running on controller.")
-        logger.info("CSV will be enabled on compute nodes.")
+            aodh_running_on_con or conf.is_aodh_running(controller))
 
     compute_ids = []
     compute_node_names = []
@@ -782,113 +788,87 @@ def main(bt_logger=None):
         'mcelog': 'Mcelog',
         'ovs_stats': 'OVS stats',
         'ovs_events': 'OVS events'}
-    out_plugins = {
-        'gnocchi': 'Gnocchi',
-        'aodh': 'AODH',
-        'snmp': 'SNMP',
-        'csv': 'CSV'}
+    aodh_plugin_labels = {
+        'mcelog': 'Mcelog',
+        'ovs_events': 'OVS events'}
+    out_plugins = {}
+    out_plugins_to_test = []
     for compute_node in computes:
         node_id = compute_node.get_id()
         node_name = compute_node.get_name()
-        out_plugins[node_id] = 'CSV'
+        out_plugins[node_id] = []
         compute_ids.append(node_id)
         compute_node_names.append(node_name)
         plugins_to_enable = []
-        _print_label('NODE {}: Test Gnocchi Plug-in'.format(node_name))
-        logger.info('Checking if gnocchi plug-in is included in compute nodes.')
-        if not conf.check_gnocchi_plugin_included(compute_node):
-            logger.error('Gnocchi plug-in is not included.')
-            logger.info(
-                'Testcases on node {} will not be executed'.format(node_name))
-        else:
-            collectd_restarted, collectd_warnings = \
-                conf.restart_collectd(compute_node)
-            sleep_time = 30
-            logger.info(
-                'Sleeping for {} seconds after collectd restart...'.format(
-                    sleep_time))
-            time.sleep(sleep_time)
-            if not collectd_restarted:
-                for warning in collectd_warnings:
-                    logger.warning(warning)
+        error_plugins = []
+        gnocchi_running = (
+            gnocchi_running or conf.check_gnocchi_plugin_included(
+                compute_node))
+        aodh_running = (
+            aodh_running and conf.check_aodh_plugin_included(compute_node))
+        if gnocchi_running:
+            out_plugins[node_id].append("Gnocchi")
+        if aodh_running:
+            out_plugins[node_id].append("AODH")
+        if snmp_running:
+            out_plugins_to_test.append("SNMP")
+
+        if 'gnocchi' not in out_plugins[node_id]:
+            logger.info("CSV will be enabled for verification")
+            plugins_to_enable.append('csv')
+            out_plugins[node_id].append("CSV")
+            if plugins_to_enable:
+                _print_label(
+                    'NODE {}: Enabling Test Plug-in '.format(node_name)
+                    + 'and Test case execution')
+            if plugins_to_enable and not conf.enable_plugins(
+                    compute_node, plugins_to_enable, error_plugins,
+                    create_backup=False):
                 logger.error(
-                    'Restart of collectd on node {} failed'.format(node_name))
+                    'Failed to test plugins on node {}.'.format(node_id))
                 logger.info(
                     'Testcases on node {} will not be executed'.format(
-                        node_name))
+                        node_id))
             else:
-                for warning in collectd_warnings:
-                    logger.warning(warning)
-
-                if gnocchi_running:
-                    out_plugins[node_id] = 'Gnocchi'
-                    logger.info("Gnocchi is active and collecting data")
-                elif aodh_running:
-                    out_plugins[node_id] = 'AODH'
-                    logger.info("AODH withh be tested")
-                    _print_label('Node {}: Test AODH' .format(node_name))
-                    logger.info("Checking if AODH is running")
-                    logger.info("AODH is running")
-                elif snmp_running:
-                    out_plugins[node_id] = 'SNMP'
-                    logger.info("SNMP will be tested.")
-                    _print_label('NODE {}: Test SNMP'.format(node_id))
-                    logger.info("Checking if SNMP is running.")
-                    logger.info("SNMP is running.")
-                else:
-                    plugins_to_enable.append('csv')
-                    out_plugins[node_id] = 'CSV'
-                    logger.error("Gnocchi, AODH, SNMP are not running")
-                    logger.info(
-                        "CSV will be enabled for verification "
-                        + "of test plugins.")
                 if plugins_to_enable:
-                    _print_label(
-                        'NODE {}: Enabling Test Plug-in '.format(node_name)
-                        + 'and Test case execution')
-                error_plugins = []
-                if plugins_to_enable and not conf.enable_plugins(
-                        compute_node, plugins_to_enable, error_plugins,
-                        create_backup=False):
-                    logger.error(
-                        'Failed to test plugins on node {}.'.format(node_id))
+                    collectd_restarted, collectd_warnings = \
+                        conf.restart_collectd(compute_node)
+                    sleep_time = 10
                     logger.info(
-                        'Testcases on node {} will not be executed'.format(
+                        'Sleeping for {} seconds'.format(sleep_time)
+                        + ' after collectd restart...')
+                    time.sleep(sleep_time)
+                if plugins_to_enable and not collectd_restarted:
+                    for warning in collectd_warnings:
+                        logger.warning(warning)
+                    logger.error(
+                        'Restart of collectd on node {} failed'.format(
                             node_id))
+                    logger.info(
+                        'Testcases on node {}'.format(node_id)
+                        + ' will not be executed.')
                 else:
-                    if plugins_to_enable:
-                        collectd_restarted, collectd_warnings = \
-                            conf.restart_collectd(compute_node)
-                        sleep_time = 30
-                        logger.info(
-                            'Sleeping for {} seconds'.format(sleep_time)
-                            + ' after collectd restart...')
-                        time.sleep(sleep_time)
-                    if plugins_to_enable and not collectd_restarted:
+                    if collectd_warnings:
                         for warning in collectd_warnings:
                             logger.warning(warning)
-                        logger.error(
-                            'Restart of collectd on node {} failed'.format(
-                                node_id))
-                        logger.info(
-                            'Testcases on node {}'.format(node_id)
-                            + ' will not be executed.')
-                    else:
-                        if collectd_warnings:
-                            for warning in collectd_warnings:
-                                logger.warning(warning)
-
-                        for plugin_name in sorted(plugin_labels.keys()):
-                            _exec_testcase(
-                                plugin_labels, plugin_name, gnocchi_running,
-                                aodh_running, snmp_running, controllers,
-                                compute_node, conf, results, error_plugins,
-                                out_plugins[node_id])
-
-            # _print_label('NODE {}: Restoring config file'.format(node_name))
-            # conf.restore_config(compute_node)
-        mcelog_delete()
-    print_overall_summary(compute_ids, plugin_labels, results, out_plugins)
+
+        for i in out_plugins[node_id]:
+            if i == 'AODH':
+                for plugin_name in sorted(aodh_plugin_labels.keys()):
+                    _exec_testcase(
+                        aodh_plugin_labels, plugin_name, i,
+                        controllers, compute_node, conf, results,
+                        error_plugins, out_plugins[node_id])
+            else:
+                for plugin_name in sorted(plugin_labels.keys()):
+                    _exec_testcase(
+                        plugin_labels, plugin_name, i,
+                        controllers, compute_node, conf, results,
+                        error_plugins, out_plugins[node_id])
+
+    mcelog_delete()
+    print_overall_summary(
+        compute_ids, plugin_labels, aodh_plugin_labels, results, out_plugins)
 
     if ((len([res for res in results if not res[2]]) > 0)
             or (len(results) < len(computes) * len(plugin_labels))):
index f156fcf..0b16ea6 100644 (file)
@@ -257,28 +257,39 @@ class ConfigServer(object):
         Return boolean value whether Gnocchi is running.
         """
         gnocchi_present = False
-        lines = self.execute_command(
-            'source overcloudrc.v3;systemctl status openstack-gnocchi-api | '
-            + 'grep running', controller.get_ip())
-        for line in lines:
-            if '(running)' in line:
-                gnocchi_present = True
+        controller_name = controller.get_name()
+        nodes = get_apex_nodes()
+        for node in nodes:
+            if controller_name == node.get_dict()['name']:
+                node.put_file(
+                    '/home/opnfv/functest/conf/openstack.creds',
+                    'overcloudrc.v3')
+                stdout = node.run_cmd(
+                    "source overcloudrc.v3;"
+                    + "openstack catalog list | grep gnocchi")
+                if 'gnocchi' in stdout:
+                    gnocchi_present = True
         return gnocchi_present
 
     def is_aodh_running(self, controller):
         """Check whether aodh service is running on controller
         """
         aodh_present = False
-        lines = self.execute_command(
-            'source overcloudrc.v3;systemctl openstack-aodh-api | grep running',
-            controller.get_ip())
-        for line in lines:
-            self.__logger.info("Line = {}" .format(line))
-            if '(running)' in line:
-                aodh_present = True
+        controller_name = controller.get_name()
+        nodes = get_apex_nodes()
+        for node in nodes:
+            if controller_name == node.get_dict()['name']:
+                node.put_file(
+                    '/home/opnfv/functest/conf/openstack.creds',
+                    'overcloudrc.v3')
+                stdout = node.run_cmd(
+                    "source overcloudrc.v3;"
+                    + "openstack catalog list | grep aodh")
+                if 'aodh' in stdout:
+                    aodh_present = True
         return aodh_present
 
-    def is_installed(self, compute, package):
+    def is_mcelog_installed(self, compute, package):
         """Check whether package exists on compute node.
 
         Keyword arguments:
@@ -310,6 +321,32 @@ class ConfigServer(object):
                     return True
         return False
 
+    def check_aodh_plugin_included(self, compute):
+        """Check if aodh plugin is included in collectd.conf file.
+        If not, try to enable it.
+
+        Keyword arguments:
+        compute -- compute node instance
+
+        Return boolean value whether AODH plugin is included
+        or it's enabling was successful.
+        """
+        compute_name = compute.get_name()
+        nodes = get_apex_nodes()
+        for node in nodes:
+            if compute_name == node.get_dict()['name']:
+                aodh_conf = node.run_cmd('ls /etc/collectd/collectd.conf.d')
+                if 'aodh.conf' not in aodh_conf:
+                    self.__logger.info(
+                        "AODH Plugin not included in compute node")
+                    return False
+                else:
+                    self.__logger.info(
+                        "AODH plugin present in compute node {}" .format(
+                            compute_name))
+                    return True
+        return True
+
     def check_gnocchi_plugin_included(self, compute):
         """Check if gnocchi plugin is included in collectd.conf file.
         If not, try to enable it.
@@ -324,13 +361,14 @@ class ConfigServer(object):
         nodes = get_apex_nodes()
         for node in nodes:
             if compute_name == node.get_dict()['name']:
-                # node.run_cmd('su; "opnfvapex"')
                 gnocchi_conf = node.run_cmd('ls /etc/collectd/collectd.conf.d')
                 if 'collectd-ceilometer-plugin.conf' not in gnocchi_conf:
                     self.__logger.info("Gnocchi Plugin not included")
-                    return True
+                    return False
                 else:
-                    self.__logger.info("Gnochi plugin present")
+                    self.__logger.info(
+                        "Gnochi plugin available in compute node {}" .format(
+                            compute_name))
                     return True
         return True
 
@@ -341,14 +379,6 @@ class ConfigServer(object):
         Keyword arguments:
         compute -- compute node instance
         plugins -- list of plugins to be enabled
-        error_plugins -- list of tuples with found errors, new entries
-            may be added there (plugin, error_description, is_critical):
-                plugin -- plug-in name
-                error_decription -- description of the error
-                is_critical -- boolean value indicating whether error
-                    is critical
-        create_backup -- boolean value indicating whether backup
-            shall be created
 
         Return boolean value indicating whether function was successful.
         """
@@ -365,19 +395,6 @@ class ConfigServer(object):
                     + '/etc/collectd/collectd.conf.d/csv.conf')
         return True
 
-    def restore_config(self, compute):
-        """Restore collectd config file from backup on compute node.
-
-        Keyword arguments:
-        compute -- compute node instance
-        """
-        ssh, sftp = self.__open_sftp_session(
-            compute.get_ip(), 'root', 'opnfvapex')
-
-        self.__logger.info('Restoring config file from backup...')
-        ssh.exec_command("cp {0} {0}.used".format(COLLECTD_CONF))
-        ssh.exec_command("cp {0}.backup {0}".format(COLLECTD_CONF))
-
     def restart_collectd(self, compute):
         """Restart collectd on compute node.
 
@@ -419,142 +436,103 @@ class ConfigServer(object):
                     return False, warning
         return True, warning
 
-    def test_gnocchi_is_sending_data(self, controller):
-        """ Checking if Gnocchi is sending metrics to controller"""
-        metric_ids = []
-        timestamps1 = {}
-        timestamps2 = {}
-        ssh, sftp = self.__open_sftp_session(
-            controller.get_ip(), 'root', 'opnfvapex')
-
-        self.__logger.info('Getting gnocchi metric list on{}'.format(
-            controller.get_name()))
-        stdout = self.execute_command(
-            "source overcloudrc.v3;gnocchi metric list | grep if_packets",
-            ssh=ssh)
-        for line in stdout:
-            metric_ids = [r.split('|')[1] for r in stdout]
-        self.__logger.info("Metric ids = {}" .format(metric_ids))
-        for metric_id in metric_ids:
-            metric_id = metric_id.replace("u", "")
-            stdout = self.execute_command(
-                "source overcloudrc.v3;gnocchi measures show {}" .format(
-                    metric_id), ssh=ssh)
-            self.__logger.info("stdout measures ={}" .format(stdout))
-            for line in stdout:
-                if line[0] == '+':
-                    pass
-                else:
-                    self.__logger.info("Line = {}" .format(line))
-                    timestamps1 = [line.split('|')[1]]
-            self.__logger.info("Last line timetamp1 = {}" .format(timestamps1))
-            time.sleep(10)
-            stdout = self.execute_command(
-                "source overcloudrc.v3;gnocchi measures show {}" .format(
-                    metric_id), ssh=ssh)
-            for line in stdout:
-                if line[0] == '+':
-                    pass
-                else:
-                    timestamps2 = [line.split('|')[1]]
-            self.__logger.info("Last line timetamp2 = {}" .format(timestamps2))
-            if timestamps1 == timestamps2:
-                self.__logger.info("False")
-                # return False
-                return True
-            else:
-                self.__logger.info("True")
-                return True
+    def test_plugins_with_aodh(
+            self, compute, plugin_interval, logger,
+            criteria_list=[]):
 
-    def test_plugins_with_aodh(self, controller):
-        """Checking if AODH is sending metrics to controller"""
-        metric_ids = []
+        metric_id = {}
         timestamps1 = {}
         timestamps2 = {}
-        ssh, sftp = self.__open_sftp_session(
-            controller.get_ip(), 'root', 'opnfvapex')
-        self.__logger.info('Getting AODH alarm list on{}'.format(
-            controller.get_name()))
-        stdout = self.execute_command(
-            "source overcloudrc.v3;aodh alarm list | grep mcelog",
-            ssh=ssh)
-        for line in stdout:
-            metric_ids = [r.split('|')[1] for r in stdout]
-        self.__logger.info("Metric ids = {}" .format(metric_ids))
-        for metric_id in metric_ids:
-            metric_id = metric_id.replace("u", "")
-            stdout = self.execute_command(
-                "source overcloudrc.v3;aodh alarm show {}" .format(
-                    metric_id), ssh=ssh)
-            self.__logger.info("stdout alarms ={}" .format(stdout))
-            for line in stdout:
-                if line[0] == '+':
-                    pass
-                else:
-                    self.__logger.info("Line = {}" .format(line))
-                    timestamps1 = [line.split('|')[1]]
-            self.__logger.info("Last line timetamp1 = {}" .format(timestamps1))
-            time.sleep(10)
-            stdout = self.execute_command(
-                "source overcloudrc.v3;aodh alarm show {}" .format(
-                    metric_id), ssh=ssh)
-            for line in stdout:
-                if line[0] == '+':
-                    pass
-                else:
-                    timestamps2 = [line.split('|')[1]]
-            self.__logger.info("Last line timetamp2 = {}" .format(timestamps2))
-            if timestamps1 == timestamps2:
-                self.__logger.info("False")
-                # return False
-                return True
-            else:
-                self.__logger.info("True")
-                return True
+        nodes = get_apex_nodes()
+        for node in nodes:
+            if node.is_controller():
+                self.__logger.info('Getting AODH Alarm list on {}' .format(
+                    (node.get_dict()['name'])))
+                node.put_file(
+                    '/home/opnfv/functest/conf/openstack.creds',
+                    'overcloudrc.v3')
+                stdout = node.run_cmd(
+                    "source overcloudrc.v3;"
+                    + "aodh alarm list | grep {0} | grep {1}"
+                    .format(criteria_list, compute))
+                for line in stdout.splitlines():
+                    line = line.replace('|', "")
+                    metric_id = line.split()[0]
+                    stdout = node.run_cmd(
+                        'source overcloudrc.v3; aodh alarm show {}' .format(
+                            metric_id))
+                    for line in stdout.splitlines()[3: -1]:
+                        line = line.replace('|', "")
+                        if line.split()[0] == 'timestamp':
+                            timestamps1 = line.split()[1]
+                            self.__logger.info("timestamp_before = {}" .format(
+                                timestamps1))
+                        else:
+                            pass
+                    time.sleep(12)
+                    stdout = node.run_cmd(
+                        "source overcloudrc.v3; aodh alarm show {}" .format(
+                            metric_id))
+                    for line in stdout.splitlines()[3:-1]:
+                        line = line.replace('|', "")
+                        if line.split()[0] == 'timestamp':
+                            timestamps2 = line.split()[1]
+                            self.__logger.info("timestamp_after = {}" .format(
+                                timestamps2))
+                        else:
+                            pass
+                    if timestamps1 == timestamps2:
+                        self.__logger.info(
+                            "Data not updated after interval of 12 seconds")
+                        return False
+                    else:
+                        self.__logger.info("PASS")
+                        return True
 
     def test_plugins_with_gnocchi(
-            self, controller, compute_node, plugin_interval, logger,
+            self, compute, plugin_interval, logger,
             criteria_list=[]):
 
-        metric_ids = []
+        metric_id = {}
         timestamps1 = {}
         timestamps2 = {}
-        ssh, sftp = self.__open_sftp_session(
-            controller.get_ip(), 'root', 'opnfvapex')
-        self.__logger.info('Getting gnocchi metric list on{}'.format(
-            controller.get_name()))
-        stdout = self.execute_command(
-            "source overcloudrc.v3;gnocchi metric list | grep {0} | grep {1}"
-            .format(compute_node.get_name(), criteria_list), ssh=ssh)
-        for line in stdout:
-            metric_ids = [r.split('|')[1] for r in stdout]
-        self.__logger.info("Metric ids = {}" .format(metric_ids))
-        for metric_id in metric_ids:
-            metric_id = metric_id.replace("u", "")
-            stdout = self.execute_command(
-                "source overcloudrc.v3;gnocchi measures show {}" .format(
-                    metric_id), ssh=ssh)
-            self.__logger.info("stdout measures ={}" .format(stdout))
-            for line in stdout:
-                if line[0] == '+':
-                    pass
-                else:
-                    self.__logger.info("Line = {}" .format(line))
-                    timestamps1 = [line.split('|')[1]]
-            self.__logger.info("Last line timetamp1 = {}" .format(timestamps1))
-            time.sleep(10)
-            stdout = self.execute_command(
-                "source overcloudrc.v3;gnocchi measures show {}" .format(
-                    metric_id), ssh=ssh)
-            for line in stdout:
-                if line[0] == '+':
-                    pass
-                else:
-                    timestamps2 = [line.split('|')[1]]
-            self.__logger.info("Last line timetamp2 = {}" .format(timestamps2))
-            if timestamps1 == timestamps2:
-                self.__logger.info("False")
-                return False
-            else:
-                self.__logger.info("True")
-                return True
+        nodes = get_apex_nodes()
+        for node in nodes:
+            if node.is_controller():
+                self.__logger.info('Getting gnocchi metric list on {}' .format(
+                    (node.get_dict()['name'])))
+                node.put_file(
+                    '/home/opnfv/functest/conf/openstack.creds',
+                    'overcloudrc.v3')
+                stdout = node.run_cmd(
+                    "source overcloudrc.v3;"
+                    + "gnocchi metric list | grep {0} | grep {1}"
+                    .format(criteria_list, compute))
+                for line in stdout.splitlines():
+                    line = line.replace('|', "")
+                    metric_id = line.split()[0]
+                    stdout = node.run_cmd(
+                        'source overcloudrc.v3;gnocchi measures show {}'.format(
+                            metric_id))
+                    for line in stdout.splitlines()[3: -1]:
+                        if line[0] == '+':
+                            pass
+                        else:
+                            timestamps1 = line.replace('|', "")
+                            timestamps1 = timestamps1.split()[0]
+                    time.sleep(10)
+                    stdout = node.run_cmd(
+                        "source overcloudrc.v3;gnocchi measures show {}".format(
+                            metric_id))
+                    for line in stdout.splitlines()[3:-1]:
+                        if line[0] == '+':
+                            pass
+                        else:
+                            timestamps2 = line.replace('|', "")
+                            timestamps2 = timestamps2.split()[0]
+                    if timestamps1 == timestamps2:
+                        self.__logger.info("Data not updated after 12 seconds")
+                        return False
+                    else:
+                        self.__logger.info("PASS")
+                        return True