1 # -*- coding: utf-8 -*-
3 # Licensed under the Apache License, Version 2.0 (the "License"); you may
4 # not use this file except in compliance with the License. You may obtain
5 # a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 # License for the specific language governing permissions and limitations
15 """Executing test of plugins"""
18 from keystoneclient.v3 import client
25 from opnfv.deployment import factory
28 GNOCCHI_NAME = 'gnocchi'
29 ID_RSA_SRC = '/root/.ssh/id_rsa'
30 ID_RSA_DST_DIR = '/root/.ssh'
31 ID_RSA_DST = ID_RSA_DST_DIR + '/id_rsa'
32 APEX_IP = os.getenv("INSTALLER_IP").rstrip('\n')
34 APEX_USER_STACK = 'stack'
35 APEX_PKEY = '/root/.ssh/id_rsa'
38 class KeystoneException(Exception):
39 """Keystone exception class"""
40 def __init__(self, message, exc=None, response=None):
43 message -- error message
48 message += "\nReason: %s" % exc
49 super(KeystoneException, self).__init__(message)
51 self.response = response
55 class InvalidResponse(KeystoneException):
56 """Invalid Keystone exception class"""
57 def __init__(self, exc, response):
63 super(InvalidResponse, self).__init__(
64 "Invalid response", exc, response)
68 handler = factory.Factory.get_handler('apex',
72 nodes = handler.get_nodes()
76 class GnocchiClient(object):
77 # Gnocchi Client to authenticate and request meters
79 self._auth_token = None
80 self._gnocchi_url = None
81 self._meter_list = None
86 return self._auth_token
88 def get_gnocchi_url(self):
90 return self._gnocchi_url
92 def get_gnocchi_metrics(self, criteria=None):
93 # Subject to change if metric gathering is different for gnocchi
94 self._request_meters(criteria)
95 return self._meter_list
97 def _auth_server(self):
98 # Request token in authentication server
99 logger.debug('Connecting to the auth server {}'.format(
100 os.environ['OS_AUTH_URL']))
101 keystone = client.Client(username=os.environ['OS_USERNAME'],
102 password=os.environ['OS_PASSWORD'],
103 tenant_name=os.environ['OS_USERNAME'],
104 auth_url=os.environ['OS_AUTH_URL'])
105 self._auth_token = keystone.auth_token
106 for service in keystone.service_catalog.get_data():
107 if service['name'] == GNOCCHI_NAME:
108 for service_type in service['endpoints']:
109 if service_type['interface'] == 'internal':
110 self._gnocchi_url = service_type['url']
112 if self._gnocchi_url is None:
113 logger.warning('Gnocchi is not registered in service catalog')
115 def _request_meters(self, criteria):
116 """Request meter list values from ceilometer
119 criteria -- criteria for ceilometer meter list
122 url = self._gnocchi_url + ('/v2/metric?limit=400')
124 url = self._gnocchi_url \
125 + ('/v3/metric/%s?q.field=metric&limit=400' % criteria)
126 headers = {'X-Auth-Token': self._auth_token}
127 resp = requests.get(url, headers=headers)
129 resp.raise_for_status()
130 self._meter_list = resp.json()
131 except (KeyError, ValueError, requests.exceptions.HTTPError) as err:
132 raise InvalidResponse(err, resp)
135 class AodhClient(object):
136 # Gnocchi Client to authenticate and request meters
138 self._auth_token = None
139 self._aodh_url = None
140 self._meter_list = None
142 def auth_token(self):
145 return self._auth_token
147 def get_aodh_url(self):
149 return self._gnocchi_url
151 def get_aodh_metrics(self, criteria=None):
152 # Subject to change if metric gathering is different for gnocchi
153 self._request_meters(criteria)
154 return self._meter_list
156 def _auth_server(self):
157 # Request token in authentication server
158 logger.debug('Connecting to the AODH auth server {}'.format(
159 os.environ['OS_AUTH_URL']))
160 keystone = client.Client(username=os.environ['OS_USERNAME'],
161 password=os.environ['OS_PASSWORD'],
162 tenant_name=os.environ['OS_USERNAME'],
163 auth_url=os.environ['OS_AUTH_URL'])
164 self._auth_token = keystone.auth_token
165 for service in keystone.service_catalog.get_data():
166 if service['name'] == AODH_NAME:
167 for service_type in service['endpoints']:
168 if service_type['interface'] == 'internal':
169 self._gnocchi_url = service_type['url']
171 if self._aodh_url is None:
172 logger.warning('Aodh is not registered in service catalog')
175 class CSVClient(object):
176 """Client to request CSV meters"""
177 def __init__(self, conf):
180 conf -- ConfigServer instance
185 self, compute_node, plugin_subdirectories, meter_categories):
189 compute_node -- compute node instance
190 plugin_subdirectories -- list of subdirectories of plug-in
191 meter_categories -- categories which will be tested
193 Return list of metrics.
195 compute_name = compute_node.get_name()
196 nodes = get_apex_nodes()
198 if compute_name == node.get_dict()['name']:
201 hostname = node.run_cmd('hostname -A')
202 hostname = hostname.split()[0]
204 for plugin_subdir in plugin_subdirectories:
205 for meter_category in meter_categories:
206 stdout1 = node.run_cmd(
207 "tail -2 /var/lib/collectd/csv/"
208 + "{0}/{1}/{2}-{3}".format(
209 hostname, plugin_subdir,
210 meter_category, date))
211 stdout2 = node.run_cmd(
212 "tail -1 /var/lib/collectd/csv/"
213 + "{0}/{1}/{2}-{3}".format(
214 hostname, plugin_subdir,
215 meter_category, date))
216 # Storing last two values
221 'Getting last two CSV entries of meter category'
222 + ' {0} in {1} subdir failed'.format(
223 meter_category, plugin_subdir))
224 elif values2 is None:
226 'Getting last CSV entries of meter category'
227 + ' {0} in {1} subdir failed'.format(
228 meter_category, plugin_subdir))
230 values = values.split(',')
231 old_value = float(values[0])
232 values2 = values2.split(',')
233 new_value = float(values2[0])
235 plugin_subdir, meter_category, old_value,
240 def get_csv_categories_for_ipmi(conf, compute_node):
244 compute_node -- compute node instance
246 Return list of categories.
248 stdout = conf.execute_command(
249 "date '+%Y-%m-%d'", compute_node.get_ip())
250 date = stdout[0].strip()
251 categories = conf.execute_command(
252 "ls /var/lib/collectd/csv/{0}.jf.intel.com/ipmi | grep {1}".format(
253 compute_node.get_name(), date), compute_node.get_ip())
254 return [category.strip()[:-11] for category in categories]
257 def _process_result(compute_node, out_plugin, test, result, results_list):
258 """Print test result and append it to results list.
261 test -- testcase name
262 result -- boolean test result
263 results_list -- results list
267 'Test case {0} PASSED with {1}.'.format(
271 'Test case {0} FAILED with {1}.'.format(
273 results_list.append((compute_node, out_plugin, test, result))
276 def _print_label(label):
277 """Print label on the screen
280 label -- label string
282 label = label.strip()
285 label = ' ' + label + ' '
286 length_label = len(label)
287 length1 = (length - length_label) / 2
288 length2 = length - length_label - length1
289 length1 = max(3, length1)
290 length2 = max(3, length2)
291 logger.info(('=' * length1) + label + ('=' * length2))
294 def _print_plugin_label(plugin, node_name):
295 """Print plug-in label.
298 plugin -- plug-in name
302 'Node {0}: Plug-in {1} Test case execution'.format(node_name, plugin))
305 def _print_final_result_of_plugin(
306 plugin, compute_ids, results, out_plugins, out_plugin):
307 """Print final results of plug-in.
310 plugin -- plug-in name
311 compute_ids -- list of compute node IDs
312 results -- results list
313 out_plugins -- list of out plug-ins
314 out_plugin -- used out plug-in
317 for id in compute_ids:
318 if out_plugin == 'Gnocchi':
319 if (id, out_plugin, plugin, True) in results:
320 print_line += ' PASS |'
321 elif (id, out_plugin, plugin, False) in results:
322 print_line += ' FAIL |'
324 print_line += ' NOT EX |'
325 elif out_plugin == 'AODH':
326 if (id, out_plugin, plugin, True) in results:
327 print_line += ' PASS |'
328 elif (id, out_plugin, plugin, False) in results:
329 print_line += ' FAIL |'
331 print_line += ' FAIL |'
332 elif out_plugin == 'SNMP':
333 if (id, out_plugin, plugin, True) in results:
334 print_line += ' PASS |'
335 elif (id, out_plugin, plugin, False) in results:
336 print_line += ' FAIL |'
338 print_line += ' FAIL |'
339 elif out_plugin == 'CSV':
340 if (id, out_plugin, plugin, True) in results:
341 print_line += ' PASS |'
342 elif (id, out_plugin, plugin, False) in results:
343 print_line += ' FAIL |'
345 print_line += ' NOT EX |'
347 print_line += ' SKIP |'
351 def print_overall_summary(
352 compute_ids, tested_plugins, aodh_plugins, results, out_plugins):
353 """Print overall summary table.
356 compute_ids -- list of compute IDs
357 tested_plugins -- list of plug-ins
358 results -- results list
359 out_plugins -- list of used out plug-ins
361 compute_node_names = ['Node-{}'.format(i) for i in range(
363 all_computes_in_line = ''
364 for compute in compute_node_names:
365 all_computes_in_line += '| ' + compute + (' ' * (7 - len(compute)))
366 line_of_nodes = '| Test ' + all_computes_in_line + '|'
367 logger.info('=' * 70)
368 logger.info('+' + ('-' * ((9 * len(compute_node_names))+16)) + '+')
370 '|' + ' ' * ((9*len(compute_node_names))/2)
373 9*len(compute_node_names) - (9*len(compute_node_names))/2)
376 '+' + ('-' * 16) + '+' + (('-' * 8) + '+') * len(compute_node_names))
377 logger.info(line_of_nodes)
379 '+' + ('-' * 16) + '+' + (('-' * 8) + '+') * len(compute_node_names))
380 out_plugins_print = []
381 out_plugins_print1 = []
382 for key in out_plugins.keys():
383 if 'Gnocchi' in out_plugins[key]:
384 out_plugins_print1.append('Gnocchi')
385 if 'AODH' in out_plugins[key]:
386 out_plugins_print1.append('AODH')
387 if 'SNMP' in out_plugins[key]:
388 out_plugins_print1.append('SNMP')
389 if 'CSV' in out_plugins[key]:
390 out_plugins_print1.append('CSV')
391 for i in out_plugins_print1:
392 if i not in out_plugins_print:
393 out_plugins_print.append(i)
394 for out_plugin in out_plugins_print:
395 output_plugins_line = ''
396 for id in compute_ids:
397 out_plugin_result = '----'
398 if out_plugin == 'Gnocchi':
399 out_plugin_result = \
401 elif out_plugin == 'AODH':
402 out_plugin_result = \
404 elif out_plugin == 'SNMP':
405 out_plugin_result = \
407 elif out_plugin == 'CSV':
408 out_plugin_result = \
410 plugin for comp_id, out_pl, plugin, res in results
411 if comp_id == id and res] else 'FAIL'
413 out_plugin_result = \
415 output_plugins_line += '| ' + out_plugin_result + ' '
417 '| OUT:{}'.format(out_plugin) + (' ' * (11 - len(out_plugin)))
418 + output_plugins_line + '|')
420 if out_plugin == 'AODH':
421 for plugin in sorted(aodh_plugins.values()):
422 line_plugin = _print_final_result_of_plugin(
423 plugin, compute_ids, results, out_plugins, out_plugin)
425 '| IN:{}'.format(plugin) + (' ' * (11-len(plugin)))
428 for plugin in sorted(tested_plugins.values()):
429 line_plugin = _print_final_result_of_plugin(
430 plugin, compute_ids, results, out_plugins, out_plugin)
432 '| IN:{}'.format(plugin) + (' ' * (11-len(plugin)))
435 '+' + ('-' * 16) + '+'
436 + (('-' * 8) + '+') * len(compute_node_names))
437 logger.info('=' * 70)
441 test_labels, name, out_plugin, controllers, compute_node,
442 conf, results, error_plugins, out_plugins):
443 """Execute the testcase.
446 test_labels -- dictionary of plug-in IDs and their display names
447 name -- plug-in ID, key of test_labels dictionary
448 ceilometer_running -- boolean indicating whether Ceilometer is running
449 compute_node -- compute node ID
450 conf -- ConfigServer instance
451 results -- results list
452 error_plugins -- list of tuples with plug-in errors
453 (plugin, error_description, is_critical):
454 plugin -- plug-in ID, key of test_labels dictionary
455 error_decription -- description of the error
456 is_critical -- boolean value indicating whether error is critical
458 ovs_interfaces = conf.get_ovs_interfaces(compute_node)
459 ovs_configured_interfaces = conf.get_plugin_config_values(
460 compute_node, 'ovs_events', 'Interfaces')
461 ovs_configured_bridges = conf.get_plugin_config_values(
462 compute_node, 'ovs_stats', 'Bridges')
463 ovs_existing_configured_int = [
464 interface for interface in ovs_interfaces
465 if interface in ovs_configured_interfaces]
466 ovs_existing_configured_bridges = [
467 bridge for bridge in ovs_interfaces
468 if bridge in ovs_configured_bridges]
469 plugin_prerequisites = {
471 conf.is_mcelog_installed(compute_node, 'mcelog'),
472 'mcelog must be installed.')],
474 len(ovs_existing_configured_int) > 0 or len(ovs_interfaces) > 0,
475 'Interfaces must be configured.')],
477 len(ovs_existing_configured_bridges) > 0,
478 'Bridges must be configured.')]}
479 gnocchi_criteria_lists = {
480 'hugepages': 'hugepages',
483 'ovs_events': 'interface-ovs-system',
484 'ovs_stats': 'ovs_stats-br0.br0'}
485 aodh_criteria_lists = {
487 'ovs_events': 'ovs_events'}
489 'intel_rdt': '/usr/share/snmp/mibs/Intel-Rdt.txt',
490 'hugepages': '/usr/share/snmp/mibs/Intel-Hugepages.txt',
491 'mcelog': '/usr/share/snmp/mibs/Intel-Mcelog.txt'}
493 'intel_rdt': 'INTEL-RDT-MIB::rdtLlc.1',
494 'hugepages': 'INTEL-HUGEPAGES-MIB::hugepagesPageFree',
495 'mcelog': 'INTEL-MCELOG-MIB::memoryCorrectedErrors.1'}
496 nr_hugepages = int(time.time()) % 10000
499 'hugepages': 'echo {} > /sys/kernel/'.format(nr_hugepages)
500 + 'mm/hugepages/hugepages-2048kB/nr_hugepages',
501 'mcelog': '/root/mce-inject_df < /root/corrected'}
506 'hugepages-mm-2048Kb', 'hugepages-node0-2048Kb',
507 'hugepages-node1-2048Kb'],
510 'mcelog-SOCKET_0_CHANNEL_0_DIMM_any',
511 'mcelog-SOCKET_0_CHANNEL_any_DIMM_any'],
513 'ovs_stats-br0.br0'],
516 # csv_meter_categories_ipmi = get_csv_categories_for_ipmi(conf,
518 csv_meter_categories = {
521 'hugepages': ['vmpage_number-free', 'vmpage_number-used'],
522 # 'ipmi': csv_meter_categories_ipmi,
524 'errors-corrected_memory_errors',
525 'errors-uncorrected_memory_errors'],
527 'if_dropped', 'if_errors', 'if_packets'],
528 'ovs_events': ['gauge-link_status']}
531 test_labels[name] if name in test_labels else name,
532 compute_node.get_name())
533 plugin_critical_errors = [
534 error for plugin, error, critical in error_plugins
535 if plugin == name and critical]
536 if plugin_critical_errors:
537 logger.error('Following critical errors occurred:'.format(name))
538 for error in plugin_critical_errors:
539 logger.error(' * ' + error)
541 compute_node.get_id(), out_plugin, test_labels[name], False,
545 error for plugin, error, critical in error_plugins
546 if plugin == name and not critical]
548 logger.warning('Following non-critical errors occured:')
549 for error in plugin_errors:
550 logger.warning(' * ' + error)
551 failed_prerequisites = []
552 if name in plugin_prerequisites:
553 failed_prerequisites = [
554 prerequisite_name for prerequisite_passed,
555 prerequisite_name in plugin_prerequisites[name]
556 if not prerequisite_passed]
557 if failed_prerequisites:
559 '{} test will not be executed, '.format(name)
560 + 'following prerequisites failed:')
561 for prerequisite in failed_prerequisites:
562 logger.error(' * {}'.format(prerequisite))
564 plugin_interval = conf.get_plugin_interval(compute_node, name)
565 if out_plugin == 'Gnocchi':
566 res = conf.test_plugins_with_gnocchi(
567 compute_node.get_name(), plugin_interval,
568 logger, criteria_list=gnocchi_criteria_lists[name])
569 if out_plugin == 'AODH':
570 res = conf.test_plugins_with_aodh(
571 compute_node.get_name(), plugin_interval,
572 logger, criteria_list=aodh_criteria_lists[name])
573 if out_plugin == 'SNMP':
575 name in snmp_mib_files and name in snmp_mib_strings \
576 and conf.test_plugins_with_snmp(
577 compute_node.get_name(), plugin_interval, logger, name,
578 snmp_mib_files[name], snmp_mib_strings[name],
579 snmp_in_commands[name])
580 if out_plugin == 'CSV':
581 res = tests.test_csv_handles_plugin_data(
582 compute_node, conf.get_plugin_interval(compute_node, name),
583 name, csv_subdirs[name], csv_meter_categories[name],
584 logger, CSVClient(conf))
586 if res and plugin_errors:
588 'Test works, but will be reported as failure,'
589 + 'because of non-critical errors.')
592 compute_node.get_id(), out_plugin, test_labels[name],
596 def get_results_for_ovs_events(
597 plugin_labels, plugin_name, gnocchi_running,
598 compute_node, conf, results, error_plugins):
599 """ Testing OVS Events with python plugin
601 plugin_label = 'OVS events'
602 res = conf.enable_ovs_events(
603 compute_node, plugin_label, error_plugins, create_backup=False)
605 compute_node.get_id(), plugin_label, res, results)
606 logger.info("Results for OVS Events = {}" .format(results))
609 def create_ovs_bridge():
610 """Create OVS brides on compute nodes"""
611 handler = factory.Factory.get_handler('apex',
615 nodes = handler.get_nodes()
616 logger.info("Creating OVS bridges on computes nodes")
618 if node.is_compute():
619 node.run_cmd('sudo ovs-vsctl add-br br0')
620 node.run_cmd('sudo ovs-vsctl set-manager ptcp:6640')
621 logger.info('OVS Bridges created on compute nodes')
624 def mcelog_install():
625 """Install mcelog on compute nodes."""
626 _print_label('Enabling mcelog and OVS bridges on compute nodes')
627 handler = factory.Factory.get_handler('apex',
631 nodes = handler.get_nodes()
632 mce_bin = os.path.dirname(os.path.realpath(__file__)) + '/mce-inject_ea'
634 if node.is_compute():
635 centos_release = node.run_cmd('uname -r')
636 if '3.10.0-514.26.2.el7.x86_64' not in centos_release:
638 'Mcelog will not be enabled '
639 + 'on node-{0}, '.format(node.get_dict()['name'])
640 + 'unsupported CentOS release found ({1}).'.format(
644 'Checking if mcelog is enabled'
645 + ' on node-{}...'.format(node.get_dict()['name']))
646 res = node.run_cmd('ls')
647 if 'mce-inject_ea' and 'corrected' in res:
649 'Mcelog seems to be already installed '
650 + 'on node-{}.'.format(node.get_dict()['name']))
651 node.run_cmd('sudo modprobe mce-inject')
652 node.run_cmd('sudo ./mce-inject_ea < corrected')
655 'Mcelog will be enabled on node-{}...'.format(
656 node.get_dict()['id']))
657 node.put_file(mce_bin, 'mce-inject_ea')
658 node.run_cmd('chmod a+x mce-inject_ea')
659 node.run_cmd('echo "CPU 0 BANK 0" > corrected')
661 'echo "STATUS 0xcc00008000010090" >>'
664 'echo "ADDR 0x0010FFFFFFF" >> corrected')
665 node.run_cmd('sudo modprobe mce-inject')
666 node.run_cmd('sudo ./mce-inject_ea < corrected')
667 logger.info('Mcelog is installed on all compute nodes')
671 """Uninstall mcelog from compute nodes."""
672 handler = factory.Factory.get_handler(
673 'apex', APEX_IP, APEX_USER, APEX_PKEY)
674 nodes = handler.get_nodes()
676 if node.is_compute():
677 output = node.run_cmd('ls')
678 if 'mce-inject_ea' in output:
679 node.run_cmd('rm mce-inject_ea')
680 if 'corrected' in output:
681 node.run_cmd('rm corrected')
682 node.run_cmd('sudo systemctl restart mcelog')
683 logger.info('Mcelog is deleted from all compute nodes')
687 if not os.path.isdir(ID_RSA_DST_DIR):
688 os.makedirs(ID_RSA_DST_DIR)
689 if not os.path.isfile(ID_RSA_DST):
691 "RSA key file {} doesn't exist".format(ID_RSA_DST)
692 + ", it will be downloaded from installer node.")
693 handler = factory.Factory.get_handler(
694 'apex', APEX_IP, APEX_USER, APEX_PKEY)
695 apex = handler.get_installer_node()
696 apex.get_file(ID_RSA_SRC, ID_RSA_DST)
698 logger.info("RSA key file {} exists.".format(ID_RSA_DST))
702 """Check whether there is global logger available and if not, define one."""
703 if 'logger' not in globals():
705 logger = logger.Logger("barometercollectd").getLogger()
708 def main(bt_logger=None):
709 """Check each compute node sends gnocchi metrics.
712 bt_logger -- logger instance
714 logging.getLogger("paramiko").setLevel(logging.WARNING)
715 logging.getLogger("stevedore").setLevel(logging.WARNING)
716 logging.getLogger("opnfv.deployment.manager").setLevel(logging.WARNING)
717 if bt_logger is None:
722 _print_label("Starting barometer tests suite")
724 conf = config_server.ConfigServer(APEX_IP, APEX_USER, logger)
725 controllers = conf.get_controllers()
726 if len(controllers) == 0:
727 logger.error('No controller nodes found!')
729 computes = conf.get_computes()
730 if len(computes) == 0:
731 logger.error('No compute nodes found!')
735 'Display of Control and Compute nodes available in the set up')
736 logger.info('controllers: {}'.format([('{0}: {1}'.format(
737 node.get_name(), node.get_ip())) for node in controllers]))
738 logger.info('computes: {}'.format([('{0}: {1}'.format(
739 node.get_name(), node.get_ip())) for node in computes]))
743 gnocchi_running_on_con = False
744 aodh_running_on_con = False
746 _print_label('Testing Gnocchi, AODH and SNMP on nodes')
748 for controller in controllers:
750 gnocchi_running_on_con or conf.is_gnocchi_running(controller))
752 aodh_running_on_con or conf.is_aodh_running(controller))
755 compute_node_names = []
758 'intel_rdt': 'Intel RDT',
759 'hugepages': 'Hugepages',
762 'ovs_stats': 'OVS stats',
763 'ovs_events': 'OVS events'}
764 aodh_plugin_labels = {
766 'ovs_events': 'OVS events'}
768 for compute_node in computes:
769 node_id = compute_node.get_id()
770 node_name = compute_node.get_name()
771 out_plugins[node_id] = []
772 compute_ids.append(node_id)
773 compute_node_names.append(node_name)
774 plugins_to_enable = []
777 gnocchi_running and conf.check_gnocchi_plugin_included(
780 aodh_running and conf.check_aodh_plugin_included(compute_node))
781 logger.info("SNMP enabled on {}" .format(node_name))
783 out_plugins[node_id].append("Gnocchi")
785 out_plugins[node_id].append("AODH")
787 out_plugins[node_id].append("SNMP")
789 if 'Gnocchi' in out_plugins[node_id]:
790 logger.info("CSV will be enabled for verification")
791 plugins_to_enable.append('csv')
792 out_plugins[node_id].append("CSV")
793 if plugins_to_enable:
795 'NODE {}: Enabling Test Plug-in '.format(node_name)
796 + 'and Test case execution')
797 if plugins_to_enable and not conf.enable_plugins(
798 compute_node, plugins_to_enable, error_plugins,
799 create_backup=False):
801 'Failed to test plugins on node {}.'.format(node_id))
803 'Testcases on node {} will not be executed'.format(
806 if plugins_to_enable:
807 collectd_restarted, collectd_warnings = \
808 conf.restart_collectd(compute_node)
811 'Sleeping for {} seconds'.format(sleep_time)
812 + ' after collectd restart...')
813 time.sleep(sleep_time)
814 if plugins_to_enable and not collectd_restarted:
815 for warning in collectd_warnings:
816 logger.warning(warning)
818 'Restart of collectd on node {} failed'.format(
821 'Testcases on node {}'.format(node_id)
822 + ' will not be executed.')
824 if collectd_warnings:
825 for warning in collectd_warnings:
826 logger.warning(warning)
828 for i in out_plugins[node_id]:
830 for plugin_name in sorted(aodh_plugin_labels.keys()):
832 aodh_plugin_labels, plugin_name, i,
833 controllers, compute_node, conf, results,
834 error_plugins, out_plugins[node_id])
836 for plugin_name in sorted(plugin_labels.keys()):
838 plugin_labels, plugin_name, i,
839 controllers, compute_node, conf, results,
840 error_plugins, out_plugins[node_id])
843 print_overall_summary(
844 compute_ids, plugin_labels, aodh_plugin_labels, results, out_plugins)
846 if ((len([res for res in results if not res[2]]) > 0)
847 or (len(results) < len(computes) * len(plugin_labels))):
848 logger.error('Some tests have failed or have not been executed')
853 if __name__ == '__main__':