[ansible][fedora] Update package name
[barometer.git] / baro_tests / collectd.py
index 1b5a923..c1a05af 100644 (file)
@@ -1,5 +1,7 @@
 # -*- coding: utf-8 -*-
-
+#
+# Copyright 2017 OPNFV
+#
 # Licensed under the Apache License, Version 2.0 (the "License"); you may
 # not use this file except in compliance with the License. You may obtain
 # a copy of the License at
@@ -11,6 +13,7 @@
 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 # License for the specific language governing permissions and limitations
 # under the License.
+# Patch on October 10 2017
 
 """Executing test of plugins"""
 
@@ -22,6 +25,8 @@ import time
 import logging
 import config_server
 import tests
+import dma
+from distutils import version
 from opnfv.deployment import factory
 
 AODH_NAME = 'aodh'
@@ -210,21 +215,27 @@ class CSVClient(object):
                                 meter_category, date))
                         stdout2 = node.run_cmd(
                             "tail -1 /var/lib/collectd/csv/"
-                            + "{0}.jf.intel.com/{1}/{2}-{3}".format(
-                                compute_node.get_name(), plugin_subdir,
+                            + "{0}/{1}/{2}-{3}".format(
+                                hostname, plugin_subdir,
                                 meter_category, date))
                         # Storing last two values
                         values = stdout1
+                        values2 = stdout2
                         if values is None:
                             logger.error(
                                 'Getting last two CSV entries of meter category'
                                 + ' {0} in {1} subdir failed'.format(
                                     meter_category, plugin_subdir))
+                        elif values2 is None:
+                            logger.error(
+                                'Getting last CSV entries of meter category'
+                                + ' {0} in {1} subdir failed'.format(
+                                    meter_category, plugin_subdir))
                         else:
                             values = values.split(',')
                             old_value = float(values[0])
-                            stdout2 = stdout2.split(',')
-                            new_value = float(stdout2[0])
+                            values2 = values2.split(',')
+                            new_value = float(values2[0])
                             metrics.append((
                                 plugin_subdir, meter_category, old_value,
                                 new_value))
@@ -248,7 +259,7 @@ def get_csv_categories_for_ipmi(conf, compute_node):
     return [category.strip()[:-11] for category in categories]
 
 
-def _process_result(compute_node, out_plugin, test, result, results_list):
+def _process_result(compute_node, out_plugin, test, result, results_list, node):
     """Print test result and append it to results list.
 
     Keyword arguments:
@@ -258,12 +269,12 @@ def _process_result(compute_node, out_plugin, test, result, results_list):
     """
     if result:
         logger.info(
-            'Test case {0} PASSED with {1}.'.format(
-                test, out_plugin))
+            'Test case for {0} with {1} PASSED on {2}.'.format(
+                node, out_plugin, test))
     else:
         logger.error(
-            'Test case {0} FAILED with {1}.'.format(
-                test, out_plugin))
+            'Test case for {0} with {1} FAILED on {2}.'.format(
+                node, out_plugin, test))
     results_list.append((compute_node, out_plugin, test, result))
 
 
@@ -315,28 +326,28 @@ def _print_final_result_of_plugin(
             elif (id, out_plugin, plugin, False) in results:
                 print_line += ' FAIL   |'
             else:
-                print_line += ' NOT EX |'
+                print_line += ' SKIP   |'
         elif out_plugin == 'AODH':
             if (id, out_plugin, plugin, True) in results:
                 print_line += ' PASS   |'
             elif (id, out_plugin, plugin, False) in results:
                 print_line += ' FAIL   |'
             else:
-                print_line += ' FAIL   |'
+                print_line += ' SKIP   |'
         elif out_plugin == 'SNMP':
             if (id, out_plugin, plugin, True) in results:
                 print_line += ' PASS   |'
             elif (id, out_plugin, plugin, False) in results:
                 print_line += ' FAIL   |'
             else:
-                print_line += ' FAIL   |'
+                print_line += ' SKIP   |'
         elif out_plugin == 'CSV':
             if (id, out_plugin, plugin, True) in results:
                 print_line += ' PASS   |'
             elif (id, out_plugin, plugin, False) in results:
                 print_line += ' FAIL   |'
             else:
-                print_line += ' NOT EX |'
+                print_line += ' SKIP   |'
         else:
             print_line += ' SKIP   |'
     return print_line
@@ -497,8 +508,7 @@ def _exec_testcase(
         'intel_rdt': [
             'intel_rdt-0-2'],
         'hugepages': [
-            'hugepages-mm-2048Kb', 'hugepages-node0-2048Kb',
-            'hugepages-node1-2048Kb'],
+            'hugepages-mm-2048Kb', 'hugepages-node0-2048Kb',],
         # 'ipmi': ['ipmi'],
         'mcelog': [
             'mcelog-SOCKET_0_CHANNEL_0_DIMM_any',
@@ -533,7 +543,7 @@ def _exec_testcase(
             logger.error(' * ' + error)
         _process_result(
             compute_node.get_id(), out_plugin, test_labels[name], False,
-            results)
+            results, compute_node.get_name())
     else:
         plugin_errors = [
             error for plugin, error, critical in error_plugins
@@ -554,6 +564,18 @@ def _exec_testcase(
                 + 'following prerequisites failed:')
             for prerequisite in failed_prerequisites:
                 logger.error(' * {}'.format(prerequisite))
+        # optional plugin
+        elif "intel_rdt" == name and not conf.is_rdt_available(compute_node):
+            #TODO: print log message
+            logger.info("RDT is not available on virtual nodes, skipping test.")
+            res = True
+            print("Results for {}, pre-processing".format(str(test_labels[name])))
+            print(results)
+            _process_result(
+                compute_node.get_id(), out_plugin, test_labels[name],
+                res, results, compute_node.get_name())
+            print("Results for {}, post-processing".format(str(test_labels[name])))
+            print(results)
         else:
             plugin_interval = conf.get_plugin_interval(compute_node, name)
             if out_plugin == 'Gnocchi':
@@ -582,9 +604,13 @@ def _exec_testcase(
                     'Test works, but will be reported as failure,'
                     + 'because of non-critical errors.')
                 res = False
+            print("Results for {}, pre-processing".format(str(test_labels[name])))
+            print(results)
             _process_result(
                 compute_node.get_id(), out_plugin, test_labels[name],
-                res, results)
+                res, results, compute_node.get_name())
+            print("Results for {}, post-processing".format(str(test_labels[name])))
+            print(results)
 
 
 def get_results_for_ovs_events(
@@ -627,38 +653,40 @@ def mcelog_install():
     for node in nodes:
         if node.is_compute():
             centos_release = node.run_cmd('uname -r')
-            if '3.10.0-514.26.2.el7.x86_64' not in centos_release:
+            if version.LooseVersion(centos_release) < version.LooseVersion('3.10.0-514.26.2.el7.x86_64'):
                 logger.info(
-                    'Mcelog will not be enabled '
-                    + 'on node-{0}, '.format(node.get_dict()['name'])
-                    + 'unsupported CentOS release found ({1}).'.format(
-                        centos_release))
+                    'Mcelog will NOT be enabled on node-{}.'
+                    + ' Unsupported CentOS release found ({}).'.format(
+                        node.get_dict()['name'],centos_release))
             else:
                 logger.info(
-                    'Checking if  mcelog is enabled'
+                    'Checking if mcelog is enabled'
                     + ' on node-{}...'.format(node.get_dict()['name']))
                 res = node.run_cmd('ls')
-            if 'mce-inject_ea' and 'corrected' in res:
-                logger.info(
-                    'Mcelog seems to be already installed '
-                    + 'on node-{}.'.format(node.get_dict()['name']))
-                node.run_cmd('sudo modprobe mce-inject')
-                node.run_cmd('sudo ./mce-inject_ea < corrected')
-            else:
-                logger.info(
-                    'Mcelog will be enabled on node-{}...'.format(
-                        node.get_dict()['id']))
-                node.put_file(mce_bin, 'mce-inject_ea')
-                node.run_cmd('chmod a+x mce-inject_ea')
-                node.run_cmd('echo "CPU 0 BANK 0" > corrected')
-                node.run_cmd(
-                    'echo "STATUS 0xcc00008000010090" >>'
-                    + ' corrected')
-                node.run_cmd(
-                    'echo "ADDR 0x0010FFFFFFF" >> corrected')
-                node.run_cmd('sudo modprobe mce-inject')
-                node.run_cmd('sudo ./mce-inject_ea < corrected')
-    logger.info('Mcelog is installed on all compute nodes')
+                if 'mce-inject_ea' and 'corrected' in res:
+                    logger.info(
+                        'Mcelog seems to be already installed '
+                        + 'on node-{}.'.format(node.get_dict()['name']))
+                    node.run_cmd('sudo modprobe mce-inject')
+                    node.run_cmd('sudo ./mce-inject_ea < corrected')
+                else:
+                    logger.info(
+                        'Mcelog will be enabled '
+                        + 'on node-{}...'.format(node.get_dict()['name']))
+                    node.put_file(mce_bin, 'mce-inject_ea')
+                    node.run_cmd('chmod a+x mce-inject_ea')
+                    node.run_cmd('echo "CPU 0 BANK 0" > corrected')
+                    node.run_cmd(
+                        'echo "STATUS 0xcc00008000010090" >>'
+                        + ' corrected')
+                    node.run_cmd(
+                        'echo "ADDR 0x0010FFFFFFF" >> corrected')
+                    node.run_cmd('sudo modprobe mce-inject')
+                    node.run_cmd('sudo ./mce-inject_ea < corrected')
+                    logger.info(
+                        'Mcelog was installed '
+                        + 'on node-{}.'.format(node.get_dict()['name']))
+
 
 
 def mcelog_delete():
@@ -736,8 +764,9 @@ def main(bt_logger=None):
     create_ovs_bridge()
     gnocchi_running_on_con = False
     aodh_running_on_con = False
-    snmp_running = True
-    _print_label('Testing Gnocchi, AODH and SNMP on nodes')
+    # Disabling SNMP write plug-in
+    snmp_running = False
+    _print_label('Testing Gnocchi and AODH plugins on nodes')
 
     for controller in controllers:
         gnocchi_running = (
@@ -767,21 +796,20 @@ def main(bt_logger=None):
         compute_node_names.append(node_name)
         plugins_to_enable = []
         error_plugins = []
-        gnocchi_running = (
+        gnocchi_running_com = (
             gnocchi_running and conf.check_gnocchi_plugin_included(
                 compute_node))
-        aodh_running = (
+        aodh_running_com = (
             aodh_running and conf.check_aodh_plugin_included(compute_node))
-        logger.info("SNMP enabled on {}" .format(node_name))
-        if gnocchi_running:
+        logger.info("SNMP enabled on {}" .format(node_name))
+        if gnocchi_running_com:
             out_plugins[node_id].append("Gnocchi")
-        if aodh_running:
+        if aodh_running_com:
             out_plugins[node_id].append("AODH")
         if snmp_running:
             out_plugins[node_id].append("SNMP")
 
         if 'Gnocchi' in out_plugins[node_id]:
-            logger.info("CSV will be enabled for verification")
             plugins_to_enable.append('csv')
             out_plugins[node_id].append("CSV")
             if plugins_to_enable:
@@ -796,36 +824,40 @@ def main(bt_logger=None):
                 logger.info(
                     'Testcases on node {} will not be executed'.format(
                         node_id))
-            else:
-                if plugins_to_enable:
-                    collectd_restarted, collectd_warnings = \
-                        conf.restart_collectd(compute_node)
-                    sleep_time = 10
-                    logger.info(
-                        'Sleeping for {} seconds'.format(sleep_time)
-                        + ' after collectd restart...')
-                    time.sleep(sleep_time)
-                if plugins_to_enable and not collectd_restarted:
+
+        for i in out_plugins[node_id]:
+            if i == 'AODH':
+                for plugin_name in sorted(aodh_plugin_labels.keys()):
+                    _exec_testcase(
+                        aodh_plugin_labels, plugin_name, i,
+                        controllers, compute_node, conf, results,
+                        error_plugins, out_plugins[node_id])
+            elif i == 'CSV':
+                _print_label("Node {}: Executing CSV Testcases".format(
+                    node_name))
+                logger.info("Restarting collectd for CSV tests")
+                collectd_restarted, collectd_warnings = \
+                    conf.restart_collectd(compute_node)
+                sleep_time = 10
+                logger.info(
+                    'Sleeping for {} seconds'.format(sleep_time)
+                    + ' after collectd restart...')
+                time.sleep(sleep_time)
+                if not collectd_restarted:
                     for warning in collectd_warnings:
                         logger.warning(warning)
                     logger.error(
                         'Restart of collectd on node {} failed'.format(
-                            node_id))
+                            compute_node))
                     logger.info(
-                        'Testcases on node {}'.format(node_id)
+                        'CSV Testcases on node {}'.format(compute_node)
                         + ' will not be executed.')
-                else:
-                    if collectd_warnings:
-                        for warning in collectd_warnings:
-                            logger.warning(warning)
-
-        for i in out_plugins[node_id]:
-            if i == 'AODH':
-                for plugin_name in sorted(aodh_plugin_labels.keys()):
+                for plugin_name in sorted(plugin_labels.keys()):
                     _exec_testcase(
-                        aodh_plugin_labels, plugin_name, i,
+                        plugin_labels, plugin_name, i,
                         controllers, compute_node, conf, results,
                         error_plugins, out_plugins[node_id])
+
             else:
                 for plugin_name in sorted(plugin_labels.keys()):
                     _exec_testcase(
@@ -837,11 +869,19 @@ def main(bt_logger=None):
     print_overall_summary(
         compute_ids, plugin_labels, aodh_plugin_labels, results, out_plugins)
 
-    if ((len([res for res in results if not res[2]]) > 0)
-            or (len(results) < len(computes) * len(plugin_labels))):
-        logger.error('Some tests have failed or have not been executed')
-        return 1
-    return 0
+    res_overall = 0
+    for res in results:
+        if not res[3]:
+            logger.error('Some tests have failed or have not been executed')
+            logger.error('Overall Result is Fail')
+            res_overall = 1
+        else:
+            pass
+
+    _print_label('Testing DMA on compute nodes')
+    res_agent = dma.dma_main(logger, conf, computes)
+
+    return 0 if res_overall == 0 and res_agent == 0 else 1
 
 
 if __name__ == '__main__':