Merge "Added snmpd container"
[barometer.git] / baro_tests / collectd.py
index 6ddb921..304b87b 100644 (file)
@@ -11,6 +11,7 @@
 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 # License for the specific language governing permissions and limitations
 # under the License.
+# Patch on October 10 2017
 
 """Executing test of plugins"""
 
@@ -33,7 +34,6 @@ APEX_IP = os.getenv("INSTALLER_IP").rstrip('\n')
 APEX_USER = 'root'
 APEX_USER_STACK = 'stack'
 APEX_PKEY = '/root/.ssh/id_rsa'
-PATH = os.path.dirname(os.path.realpath(__file__))
 
 
 class KeystoneException(Exception):
@@ -211,21 +211,27 @@ class CSVClient(object):
                                 meter_category, date))
                         stdout2 = node.run_cmd(
                             "tail -1 /var/lib/collectd/csv/"
-                            + "{0}.jf.intel.com/{1}/{2}-{3}".format(
-                                compute_node.get_name(), plugin_subdir,
+                            + "{0}/{1}/{2}-{3}".format(
+                                hostname, plugin_subdir,
                                 meter_category, date))
                         # Storing last two values
                         values = stdout1
+                        values2 = stdout2
                         if values is None:
                             logger.error(
                                 'Getting last two CSV entries of meter category'
                                 + ' {0} in {1} subdir failed'.format(
                                     meter_category, plugin_subdir))
+                        elif values2 is None:
+                            logger.error(
+                                'Getting last CSV entries of meter category'
+                                + ' {0} in {1} subdir failed'.format(
+                                    meter_category, plugin_subdir))
                         else:
                             values = values.split(',')
                             old_value = float(values[0])
-                            stdout2 = stdout2.split(',')
-                            new_value = float(stdout2[0])
+                            values2 = values2.split(',')
+                            new_value = float(values2[0])
                             metrics.append((
                                 plugin_subdir, meter_category, old_value,
                                 new_value))
@@ -249,7 +255,7 @@ def get_csv_categories_for_ipmi(conf, compute_node):
     return [category.strip()[:-11] for category in categories]
 
 
-def _process_result(compute_node, out_plugin, test, result, results_list):
+def _process_result(compute_node, out_plugin, test, result, results_list, node):
     """Print test result and append it to results list.
 
     Keyword arguments:
@@ -259,12 +265,12 @@ def _process_result(compute_node, out_plugin, test, result, results_list):
     """
     if result:
         logger.info(
-            'Test case {0} PASSED with {1}.'.format(
-                test, out_plugin))
+            'Test case for {0} with {1} PASSED on {2}.'.format(
+                node, out_plugin, test))
     else:
         logger.error(
-            'Test case {0} FAILED with {1}.'.format(
-                test, out_plugin))
+            'Test case for {0} with {1} FAILED on {2}.'.format(
+                node, out_plugin, test))
     results_list.append((compute_node, out_plugin, test, result))
 
 
@@ -534,7 +540,7 @@ def _exec_testcase(
             logger.error(' * ' + error)
         _process_result(
             compute_node.get_id(), out_plugin, test_labels[name], False,
-            results)
+            results, compute_node.get_name())
     else:
         plugin_errors = [
             error for plugin, error, critical in error_plugins
@@ -585,7 +591,7 @@ def _exec_testcase(
                 res = False
             _process_result(
                 compute_node.get_id(), out_plugin, test_labels[name],
-                res, results)
+                res, results, compute_node.get_name())
 
 
 def get_results_for_ovs_events(
@@ -624,6 +630,7 @@ def mcelog_install():
                                           APEX_USER_STACK,
                                           APEX_PKEY)
     nodes = handler.get_nodes()
+    mce_bin = os.path.dirname(os.path.realpath(__file__)) + '/mce-inject_ea'
     for node in nodes:
         if node.is_compute():
             centos_release = node.run_cmd('uname -r')
@@ -648,9 +655,7 @@ def mcelog_install():
                 logger.info(
                     'Mcelog will be enabled on node-{}...'.format(
                         node.get_dict()['id']))
-                node.put_file(
-                    'PATH/'
-                    + 'mce-inject_ea', 'mce-inject_ea')
+                node.put_file(mce_bin, 'mce-inject_ea')
                 node.run_cmd('chmod a+x mce-inject_ea')
                 node.run_cmd('echo "CPU 0 BANK 0" > corrected')
                 node.run_cmd(
@@ -738,8 +743,9 @@ def main(bt_logger=None):
     create_ovs_bridge()
     gnocchi_running_on_con = False
     aodh_running_on_con = False
-    snmp_running = True
-    _print_label('Testing Gnocchi, AODH and SNMP on nodes')
+    # Disabling SNMP write plug-in
+    snmp_running = False
+    _print_label('Testing Gnocchi and AODH plugins on nodes')
 
     for controller in controllers:
         gnocchi_running = (
@@ -774,7 +780,7 @@ def main(bt_logger=None):
                 compute_node))
         aodh_running = (
             aodh_running and conf.check_aodh_plugin_included(compute_node))
-        logger.info("SNMP enabled on {}" .format(node_name))
+        logger.info("SNMP enabled on {}" .format(node_name))
         if gnocchi_running:
             out_plugins[node_id].append("Gnocchi")
         if aodh_running:
@@ -783,7 +789,6 @@ def main(bt_logger=None):
             out_plugins[node_id].append("SNMP")
 
         if 'Gnocchi' in out_plugins[node_id]:
-            logger.info("CSV will be enabled for verification")
             plugins_to_enable.append('csv')
             out_plugins[node_id].append("CSV")
             if plugins_to_enable:
@@ -798,36 +803,40 @@ def main(bt_logger=None):
                 logger.info(
                     'Testcases on node {} will not be executed'.format(
                         node_id))
-            else:
-                if plugins_to_enable:
-                    collectd_restarted, collectd_warnings = \
-                        conf.restart_collectd(compute_node)
-                    sleep_time = 10
-                    logger.info(
-                        'Sleeping for {} seconds'.format(sleep_time)
-                        + ' after collectd restart...')
-                    time.sleep(sleep_time)
-                if plugins_to_enable and not collectd_restarted:
+
+        for i in out_plugins[node_id]:
+            if i == 'AODH':
+                for plugin_name in sorted(aodh_plugin_labels.keys()):
+                    _exec_testcase(
+                        aodh_plugin_labels, plugin_name, i,
+                        controllers, compute_node, conf, results,
+                        error_plugins, out_plugins[node_id])
+            elif i == 'CSV':
+                _print_label("Node {}: Executing CSV Testcases".format(
+                    node_name))
+                logger.info("Restarting collectd for CSV tests")
+                collectd_restarted, collectd_warnings = \
+                    conf.restart_collectd(compute_node)
+                sleep_time = 10
+                logger.info(
+                    'Sleeping for {} seconds'.format(sleep_time)
+                    + ' after collectd restart...')
+                time.sleep(sleep_time)
+                if not collectd_restarted:
                     for warning in collectd_warnings:
                         logger.warning(warning)
                     logger.error(
                         'Restart of collectd on node {} failed'.format(
-                            node_id))
+                            compute_node))
                     logger.info(
-                        'Testcases on node {}'.format(node_id)
+                        'CSV Testcases on node {}'.format(compute_node)
                         + ' will not be executed.')
-                else:
-                    if collectd_warnings:
-                        for warning in collectd_warnings:
-                            logger.warning(warning)
-
-        for i in out_plugins[node_id]:
-            if i == 'AODH':
-                for plugin_name in sorted(aodh_plugin_labels.keys()):
+                for plugin_name in sorted(plugin_labels.keys()):
                     _exec_testcase(
-                        aodh_plugin_labels, plugin_name, i,
+                        plugin_labels, plugin_name, i,
                         controllers, compute_node, conf, results,
                         error_plugins, out_plugins[node_id])
+
             else:
                 for plugin_name in sorted(plugin_labels.keys()):
                     _exec_testcase(
@@ -839,10 +848,13 @@ def main(bt_logger=None):
     print_overall_summary(
         compute_ids, plugin_labels, aodh_plugin_labels, results, out_plugins)
 
-    if ((len([res for res in results if not res[2]]) > 0)
-            or (len(results) < len(computes) * len(plugin_labels))):
-        logger.error('Some tests have failed or have not been executed')
-        return 1
+    for res in results:
+        if res[3] is 'False' or 'None':
+            logger.error('Some tests have failed or have not been executed')
+            logger.error('Overall Result is Fail')
+            return 1
+        else:
+            pass
     return 0