1 # -*- coding: utf-8 -*-
5 # Licensed under the Apache License, Version 2.0 (the "License"); you may
6 # not use this file except in compliance with the License. You may obtain
7 # a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14 # License for the specific language governing permissions and limitations
16 # Patch on October 10 2017
18 """Executing test of plugins"""
21 from keystoneclient.v3 import client
29 from distutils import version
30 from opnfv.deployment import factory
33 GNOCCHI_NAME = 'gnocchi'
34 ID_RSA_SRC = '/root/.ssh/id_rsa'
35 ID_RSA_DST_DIR = '/root/.ssh'
36 ID_RSA_DST = ID_RSA_DST_DIR + '/id_rsa'
37 APEX_IP = os.getenv("INSTALLER_IP").rstrip('\n')
39 APEX_USER_STACK = 'stack'
40 APEX_PKEY = '/root/.ssh/id_rsa'
43 class KeystoneException(Exception):
44 """Keystone exception class"""
45 def __init__(self, message, exc=None, response=None):
48 message -- error message
53 message += "\nReason: %s" % exc
54 super(KeystoneException, self).__init__(message)
56 self.response = response
60 class InvalidResponse(KeystoneException):
61 """Invalid Keystone exception class"""
62 def __init__(self, exc, response):
68 super(InvalidResponse, self).__init__(
69 "Invalid response", exc, response)
73 handler = factory.Factory.get_handler('apex',
77 nodes = handler.get_nodes()
81 class GnocchiClient(object):
82 # Gnocchi Client to authenticate and request meters
84 self._auth_token = None
85 self._gnocchi_url = None
86 self._meter_list = None
91 return self._auth_token
93 def get_gnocchi_url(self):
95 return self._gnocchi_url
97 def get_gnocchi_metrics(self, criteria=None):
98 # Subject to change if metric gathering is different for gnocchi
99 self._request_meters(criteria)
100 return self._meter_list
102 def _auth_server(self):
103 # Request token in authentication server
104 logger.debug('Connecting to the auth server {}'.format(
105 os.environ['OS_AUTH_URL']))
106 keystone = client.Client(username=os.environ['OS_USERNAME'],
107 password=os.environ['OS_PASSWORD'],
108 tenant_name=os.environ['OS_USERNAME'],
109 auth_url=os.environ['OS_AUTH_URL'])
110 self._auth_token = keystone.auth_token
111 for service in keystone.service_catalog.get_data():
112 if service['name'] == GNOCCHI_NAME:
113 for service_type in service['endpoints']:
114 if service_type['interface'] == 'internal':
115 self._gnocchi_url = service_type['url']
117 if self._gnocchi_url is None:
118 logger.warning('Gnocchi is not registered in service catalog')
120 def _request_meters(self, criteria):
121 """Request meter list values from ceilometer
124 criteria -- criteria for ceilometer meter list
127 url = self._gnocchi_url + ('/v2/metric?limit=400')
129 url = self._gnocchi_url \
130 + ('/v3/metric/%s?q.field=metric&limit=400' % criteria)
131 headers = {'X-Auth-Token': self._auth_token}
132 resp = requests.get(url, headers=headers)
134 resp.raise_for_status()
135 self._meter_list = resp.json()
136 except (KeyError, ValueError, requests.exceptions.HTTPError) as err:
137 raise InvalidResponse(err, resp)
140 class AodhClient(object):
141 # Gnocchi Client to authenticate and request meters
143 self._auth_token = None
144 self._aodh_url = None
145 self._meter_list = None
147 def auth_token(self):
150 return self._auth_token
152 def get_aodh_url(self):
154 return self._gnocchi_url
156 def get_aodh_metrics(self, criteria=None):
157 # Subject to change if metric gathering is different for gnocchi
158 self._request_meters(criteria)
159 return self._meter_list
161 def _auth_server(self):
162 # Request token in authentication server
163 logger.debug('Connecting to the AODH auth server {}'.format(
164 os.environ['OS_AUTH_URL']))
165 keystone = client.Client(username=os.environ['OS_USERNAME'],
166 password=os.environ['OS_PASSWORD'],
167 tenant_name=os.environ['OS_USERNAME'],
168 auth_url=os.environ['OS_AUTH_URL'])
169 self._auth_token = keystone.auth_token
170 for service in keystone.service_catalog.get_data():
171 if service['name'] == AODH_NAME:
172 for service_type in service['endpoints']:
173 if service_type['interface'] == 'internal':
174 self._gnocchi_url = service_type['url']
176 if self._aodh_url is None:
177 logger.warning('Aodh is not registered in service catalog')
180 class CSVClient(object):
181 """Client to request CSV meters"""
182 def __init__(self, conf):
185 conf -- ConfigServer instance
190 self, compute_node, plugin_subdirectories, meter_categories):
194 compute_node -- compute node instance
195 plugin_subdirectories -- list of subdirectories of plug-in
196 meter_categories -- categories which will be tested
198 Return list of metrics.
200 compute_name = compute_node.get_name()
201 nodes = get_apex_nodes()
203 if compute_name == node.get_dict()['name']:
206 hostname = node.run_cmd('hostname -A')
207 hostname = hostname.split()[0]
209 for plugin_subdir in plugin_subdirectories:
210 for meter_category in meter_categories:
211 stdout1 = node.run_cmd(
212 "tail -2 /var/lib/collectd/csv/"
213 + "{0}/{1}/{2}-{3}".format(
214 hostname, plugin_subdir,
215 meter_category, date))
216 stdout2 = node.run_cmd(
217 "tail -1 /var/lib/collectd/csv/"
218 + "{0}/{1}/{2}-{3}".format(
219 hostname, plugin_subdir,
220 meter_category, date))
221 # Storing last two values
226 'Getting last two CSV entries of meter category'
227 + ' {0} in {1} subdir failed'.format(
228 meter_category, plugin_subdir))
229 elif values2 is None:
231 'Getting last CSV entries of meter category'
232 + ' {0} in {1} subdir failed'.format(
233 meter_category, plugin_subdir))
235 values = values.split(',')
236 old_value = float(values[0])
237 values2 = values2.split(',')
238 new_value = float(values2[0])
240 plugin_subdir, meter_category, old_value,
245 def get_csv_categories_for_ipmi(conf, compute_node):
249 compute_node -- compute node instance
251 Return list of categories.
253 stdout = conf.execute_command(
254 "date '+%Y-%m-%d'", compute_node.get_ip())
255 date = stdout[0].strip()
256 categories = conf.execute_command(
257 "ls /var/lib/collectd/csv/{0}.jf.intel.com/ipmi | grep {1}".format(
258 compute_node.get_name(), date), compute_node.get_ip())
259 return [category.strip()[:-11] for category in categories]
262 def _process_result(compute_node, out_plugin, test, result, results_list, node):
263 """Print test result and append it to results list.
266 test -- testcase name
267 result -- boolean test result
268 results_list -- results list
272 'Test case for {0} with {1} PASSED on {2}.'.format(
273 node, out_plugin, test))
276 'Test case for {0} with {1} FAILED on {2}.'.format(
277 node, out_plugin, test))
278 results_list.append((compute_node, out_plugin, test, result))
281 def _print_label(label):
282 """Print label on the screen
285 label -- label string
287 label = label.strip()
290 label = ' ' + label + ' '
291 length_label = len(label)
292 length1 = (length - length_label) / 2
293 length2 = length - length_label - length1
294 length1 = max(3, length1)
295 length2 = max(3, length2)
296 logger.info(('=' * length1) + label + ('=' * length2))
299 def _print_plugin_label(plugin, node_name):
300 """Print plug-in label.
303 plugin -- plug-in name
307 'Node {0}: Plug-in {1} Test case execution'.format(node_name, plugin))
310 def _print_final_result_of_plugin(
311 plugin, compute_ids, results, out_plugins, out_plugin):
312 """Print final results of plug-in.
315 plugin -- plug-in name
316 compute_ids -- list of compute node IDs
317 results -- results list
318 out_plugins -- list of out plug-ins
319 out_plugin -- used out plug-in
322 for id in compute_ids:
323 if out_plugin == 'Gnocchi':
324 if (id, out_plugin, plugin, True) in results:
325 print_line += ' PASS |'
326 elif (id, out_plugin, plugin, False) in results:
327 print_line += ' FAIL |'
329 print_line += ' SKIP |'
330 elif out_plugin == 'AODH':
331 if (id, out_plugin, plugin, True) in results:
332 print_line += ' PASS |'
333 elif (id, out_plugin, plugin, False) in results:
334 print_line += ' FAIL |'
336 print_line += ' SKIP |'
337 elif out_plugin == 'SNMP':
338 if (id, out_plugin, plugin, True) in results:
339 print_line += ' PASS |'
340 elif (id, out_plugin, plugin, False) in results:
341 print_line += ' FAIL |'
343 print_line += ' SKIP |'
344 elif out_plugin == 'CSV':
345 if (id, out_plugin, plugin, True) in results:
346 print_line += ' PASS |'
347 elif (id, out_plugin, plugin, False) in results:
348 print_line += ' FAIL |'
350 print_line += ' SKIP |'
352 print_line += ' SKIP |'
356 def print_overall_summary(
357 compute_ids, tested_plugins, aodh_plugins, results, out_plugins):
358 """Print overall summary table.
361 compute_ids -- list of compute IDs
362 tested_plugins -- list of plug-ins
363 results -- results list
364 out_plugins -- list of used out plug-ins
366 compute_node_names = ['Node-{}'.format(i) for i in range(
368 all_computes_in_line = ''
369 for compute in compute_node_names:
370 all_computes_in_line += '| ' + compute + (' ' * (7 - len(compute)))
371 line_of_nodes = '| Test ' + all_computes_in_line + '|'
372 logger.info('=' * 70)
373 logger.info('+' + ('-' * ((9 * len(compute_node_names))+16)) + '+')
375 '|' + ' ' * ((9*len(compute_node_names))/2)
378 9*len(compute_node_names) - (9*len(compute_node_names))/2)
381 '+' + ('-' * 16) + '+' + (('-' * 8) + '+') * len(compute_node_names))
382 logger.info(line_of_nodes)
384 '+' + ('-' * 16) + '+' + (('-' * 8) + '+') * len(compute_node_names))
385 out_plugins_print = []
386 out_plugins_print1 = []
387 for key in out_plugins.keys():
388 if 'Gnocchi' in out_plugins[key]:
389 out_plugins_print1.append('Gnocchi')
390 if 'AODH' in out_plugins[key]:
391 out_plugins_print1.append('AODH')
392 if 'SNMP' in out_plugins[key]:
393 out_plugins_print1.append('SNMP')
394 if 'CSV' in out_plugins[key]:
395 out_plugins_print1.append('CSV')
396 for i in out_plugins_print1:
397 if i not in out_plugins_print:
398 out_plugins_print.append(i)
399 for out_plugin in out_plugins_print:
400 output_plugins_line = ''
401 for id in compute_ids:
402 out_plugin_result = '----'
403 if out_plugin == 'Gnocchi':
404 out_plugin_result = \
406 elif out_plugin == 'AODH':
407 out_plugin_result = \
409 elif out_plugin == 'SNMP':
410 out_plugin_result = \
412 elif out_plugin == 'CSV':
413 out_plugin_result = \
415 plugin for comp_id, out_pl, plugin, res in results
416 if comp_id == id and res] else 'FAIL'
418 out_plugin_result = \
420 output_plugins_line += '| ' + out_plugin_result + ' '
422 '| OUT:{}'.format(out_plugin) + (' ' * (11 - len(out_plugin)))
423 + output_plugins_line + '|')
425 if out_plugin == 'AODH':
426 for plugin in sorted(aodh_plugins.values()):
427 line_plugin = _print_final_result_of_plugin(
428 plugin, compute_ids, results, out_plugins, out_plugin)
430 '| IN:{}'.format(plugin) + (' ' * (11-len(plugin)))
433 for plugin in sorted(tested_plugins.values()):
434 line_plugin = _print_final_result_of_plugin(
435 plugin, compute_ids, results, out_plugins, out_plugin)
437 '| IN:{}'.format(plugin) + (' ' * (11-len(plugin)))
440 '+' + ('-' * 16) + '+'
441 + (('-' * 8) + '+') * len(compute_node_names))
442 logger.info('=' * 70)
446 test_labels, name, out_plugin, controllers, compute_node,
447 conf, results, error_plugins, out_plugins):
448 """Execute the testcase.
451 test_labels -- dictionary of plug-in IDs and their display names
452 name -- plug-in ID, key of test_labels dictionary
453 ceilometer_running -- boolean indicating whether Ceilometer is running
454 compute_node -- compute node ID
455 conf -- ConfigServer instance
456 results -- results list
457 error_plugins -- list of tuples with plug-in errors
458 (plugin, error_description, is_critical):
459 plugin -- plug-in ID, key of test_labels dictionary
460 error_decription -- description of the error
461 is_critical -- boolean value indicating whether error is critical
463 ovs_interfaces = conf.get_ovs_interfaces(compute_node)
464 ovs_configured_interfaces = conf.get_plugin_config_values(
465 compute_node, 'ovs_events', 'Interfaces')
466 ovs_configured_bridges = conf.get_plugin_config_values(
467 compute_node, 'ovs_stats', 'Bridges')
468 ovs_existing_configured_int = [
469 interface for interface in ovs_interfaces
470 if interface in ovs_configured_interfaces]
471 ovs_existing_configured_bridges = [
472 bridge for bridge in ovs_interfaces
473 if bridge in ovs_configured_bridges]
474 plugin_prerequisites = {
476 conf.is_mcelog_installed(compute_node, 'mcelog'),
477 'mcelog must be installed.')],
479 len(ovs_existing_configured_int) > 0 or len(ovs_interfaces) > 0,
480 'Interfaces must be configured.')],
482 len(ovs_existing_configured_bridges) > 0,
483 'Bridges must be configured.')]}
484 gnocchi_criteria_lists = {
485 'hugepages': 'hugepages',
488 'ovs_events': 'interface-ovs-system',
489 'ovs_stats': 'ovs_stats-br0.br0'}
490 aodh_criteria_lists = {
492 'ovs_events': 'ovs_events'}
494 'intel_rdt': '/usr/share/snmp/mibs/Intel-Rdt.txt',
495 'hugepages': '/usr/share/snmp/mibs/Intel-Hugepages.txt',
496 'mcelog': '/usr/share/snmp/mibs/Intel-Mcelog.txt'}
498 'intel_rdt': 'INTEL-RDT-MIB::rdtLlc.1',
499 'hugepages': 'INTEL-HUGEPAGES-MIB::hugepagesPageFree',
500 'mcelog': 'INTEL-MCELOG-MIB::memoryCorrectedErrors.1'}
501 nr_hugepages = int(time.time()) % 10000
504 'hugepages': 'echo {} > /sys/kernel/'.format(nr_hugepages)
505 + 'mm/hugepages/hugepages-2048kB/nr_hugepages',
506 'mcelog': '/root/mce-inject_df < /root/corrected'}
511 'hugepages-mm-2048Kb', 'hugepages-node0-2048Kb',],
514 'mcelog-SOCKET_0_CHANNEL_0_DIMM_any',
515 'mcelog-SOCKET_0_CHANNEL_any_DIMM_any'],
517 'ovs_stats-br0.br0'],
520 # csv_meter_categories_ipmi = get_csv_categories_for_ipmi(conf,
522 csv_meter_categories = {
525 'hugepages': ['vmpage_number-free', 'vmpage_number-used'],
526 # 'ipmi': csv_meter_categories_ipmi,
528 'errors-corrected_memory_errors',
529 'errors-uncorrected_memory_errors'],
531 'if_dropped', 'if_errors', 'if_packets'],
532 'ovs_events': ['gauge-link_status']}
535 test_labels[name] if name in test_labels else name,
536 compute_node.get_name())
537 plugin_critical_errors = [
538 error for plugin, error, critical in error_plugins
539 if plugin == name and critical]
540 if plugin_critical_errors:
541 logger.error('Following critical errors occurred:'.format(name))
542 for error in plugin_critical_errors:
543 logger.error(' * ' + error)
545 compute_node.get_id(), out_plugin, test_labels[name], False,
546 results, compute_node.get_name())
549 error for plugin, error, critical in error_plugins
550 if plugin == name and not critical]
552 logger.warning('Following non-critical errors occured:')
553 for error in plugin_errors:
554 logger.warning(' * ' + error)
555 failed_prerequisites = []
556 if name in plugin_prerequisites:
557 failed_prerequisites = [
558 prerequisite_name for prerequisite_passed,
559 prerequisite_name in plugin_prerequisites[name]
560 if not prerequisite_passed]
561 if failed_prerequisites:
563 '{} test will not be executed, '.format(name)
564 + 'following prerequisites failed:')
565 for prerequisite in failed_prerequisites:
566 logger.error(' * {}'.format(prerequisite))
568 elif "intel_rdt" == name and not conf.is_rdt_available(compute_node):
569 #TODO: print log message
570 logger.info("RDT is not available on virtual nodes, skipping test.")
572 print("Results for {}, pre-processing".format(str(test_labels[name])))
575 compute_node.get_id(), out_plugin, test_labels[name],
576 res, results, compute_node.get_name())
577 print("Results for {}, post-processing".format(str(test_labels[name])))
580 plugin_interval = conf.get_plugin_interval(compute_node, name)
581 if out_plugin == 'Gnocchi':
582 res = conf.test_plugins_with_gnocchi(
583 compute_node.get_name(), plugin_interval,
584 logger, criteria_list=gnocchi_criteria_lists[name])
585 if out_plugin == 'AODH':
586 res = conf.test_plugins_with_aodh(
587 compute_node.get_name(), plugin_interval,
588 logger, criteria_list=aodh_criteria_lists[name])
589 if out_plugin == 'SNMP':
591 name in snmp_mib_files and name in snmp_mib_strings \
592 and conf.test_plugins_with_snmp(
593 compute_node.get_name(), plugin_interval, logger, name,
594 snmp_mib_files[name], snmp_mib_strings[name],
595 snmp_in_commands[name])
596 if out_plugin == 'CSV':
597 res = tests.test_csv_handles_plugin_data(
598 compute_node, conf.get_plugin_interval(compute_node, name),
599 name, csv_subdirs[name], csv_meter_categories[name],
600 logger, CSVClient(conf))
602 if res and plugin_errors:
604 'Test works, but will be reported as failure,'
605 + 'because of non-critical errors.')
607 print("Results for {}, pre-processing".format(str(test_labels[name])))
610 compute_node.get_id(), out_plugin, test_labels[name],
611 res, results, compute_node.get_name())
612 print("Results for {}, post-processing".format(str(test_labels[name])))
616 def get_results_for_ovs_events(
617 plugin_labels, plugin_name, gnocchi_running,
618 compute_node, conf, results, error_plugins):
619 """ Testing OVS Events with python plugin
621 plugin_label = 'OVS events'
622 res = conf.enable_ovs_events(
623 compute_node, plugin_label, error_plugins, create_backup=False)
625 compute_node.get_id(), plugin_label, res, results)
626 logger.info("Results for OVS Events = {}" .format(results))
629 def create_ovs_bridge():
630 """Create OVS brides on compute nodes"""
631 handler = factory.Factory.get_handler('apex',
635 nodes = handler.get_nodes()
636 logger.info("Creating OVS bridges on computes nodes")
638 if node.is_compute():
639 node.run_cmd('sudo ovs-vsctl add-br br0')
640 node.run_cmd('sudo ovs-vsctl set-manager ptcp:6640')
641 logger.info('OVS Bridges created on compute nodes')
644 def mcelog_install():
645 """Install mcelog on compute nodes."""
646 _print_label('Enabling mcelog and OVS bridges on compute nodes')
647 handler = factory.Factory.get_handler('apex',
651 nodes = handler.get_nodes()
652 mce_bin = os.path.dirname(os.path.realpath(__file__)) + '/mce-inject_ea'
654 if node.is_compute():
655 centos_release = node.run_cmd('uname -r')
656 if version.LooseVersion(centos_release) < version.LooseVersion('3.10.0-514.26.2.el7.x86_64'):
658 'Mcelog will NOT be enabled on node-{}.'
659 + ' Unsupported CentOS release found ({}).'.format(
660 node.get_dict()['name'],centos_release))
663 'Checking if mcelog is enabled'
664 + ' on node-{}...'.format(node.get_dict()['name']))
665 res = node.run_cmd('ls')
666 if 'mce-inject_ea' and 'corrected' in res:
668 'Mcelog seems to be already installed '
669 + 'on node-{}.'.format(node.get_dict()['name']))
670 node.run_cmd('sudo modprobe mce-inject')
671 node.run_cmd('sudo ./mce-inject_ea < corrected')
674 'Mcelog will be enabled '
675 + 'on node-{}...'.format(node.get_dict()['name']))
676 node.put_file(mce_bin, 'mce-inject_ea')
677 node.run_cmd('chmod a+x mce-inject_ea')
678 node.run_cmd('echo "CPU 0 BANK 0" > corrected')
680 'echo "STATUS 0xcc00008000010090" >>'
683 'echo "ADDR 0x0010FFFFFFF" >> corrected')
684 node.run_cmd('sudo modprobe mce-inject')
685 node.run_cmd('sudo ./mce-inject_ea < corrected')
687 'Mcelog was installed '
688 + 'on node-{}.'.format(node.get_dict()['name']))
693 """Uninstall mcelog from compute nodes."""
694 handler = factory.Factory.get_handler(
695 'apex', APEX_IP, APEX_USER, APEX_PKEY)
696 nodes = handler.get_nodes()
698 if node.is_compute():
699 output = node.run_cmd('ls')
700 if 'mce-inject_ea' in output:
701 node.run_cmd('rm mce-inject_ea')
702 if 'corrected' in output:
703 node.run_cmd('rm corrected')
704 node.run_cmd('sudo systemctl restart mcelog')
705 logger.info('Mcelog is deleted from all compute nodes')
709 if not os.path.isdir(ID_RSA_DST_DIR):
710 os.makedirs(ID_RSA_DST_DIR)
711 if not os.path.isfile(ID_RSA_DST):
713 "RSA key file {} doesn't exist".format(ID_RSA_DST)
714 + ", it will be downloaded from installer node.")
715 handler = factory.Factory.get_handler(
716 'apex', APEX_IP, APEX_USER, APEX_PKEY)
717 apex = handler.get_installer_node()
718 apex.get_file(ID_RSA_SRC, ID_RSA_DST)
720 logger.info("RSA key file {} exists.".format(ID_RSA_DST))
724 """Check whether there is global logger available and if not, define one."""
725 if 'logger' not in globals():
727 logger = logger.Logger("barometercollectd").getLogger()
730 def main(bt_logger=None):
731 """Check each compute node sends gnocchi metrics.
734 bt_logger -- logger instance
736 logging.getLogger("paramiko").setLevel(logging.WARNING)
737 logging.getLogger("stevedore").setLevel(logging.WARNING)
738 logging.getLogger("opnfv.deployment.manager").setLevel(logging.WARNING)
739 if bt_logger is None:
744 _print_label("Starting barometer tests suite")
746 conf = config_server.ConfigServer(APEX_IP, APEX_USER, logger)
747 controllers = conf.get_controllers()
748 if len(controllers) == 0:
749 logger.error('No controller nodes found!')
751 computes = conf.get_computes()
752 if len(computes) == 0:
753 logger.error('No compute nodes found!')
757 'Display of Control and Compute nodes available in the set up')
758 logger.info('controllers: {}'.format([('{0}: {1}'.format(
759 node.get_name(), node.get_ip())) for node in controllers]))
760 logger.info('computes: {}'.format([('{0}: {1}'.format(
761 node.get_name(), node.get_ip())) for node in computes]))
765 gnocchi_running_on_con = False
766 aodh_running_on_con = False
767 # Disabling SNMP write plug-in
769 _print_label('Testing Gnocchi and AODH plugins on nodes')
771 for controller in controllers:
773 gnocchi_running_on_con or conf.is_gnocchi_running(controller))
775 aodh_running_on_con or conf.is_aodh_running(controller))
778 compute_node_names = []
781 'intel_rdt': 'Intel RDT',
782 'hugepages': 'Hugepages',
785 'ovs_stats': 'OVS stats',
786 'ovs_events': 'OVS events'}
787 aodh_plugin_labels = {
789 'ovs_events': 'OVS events'}
791 for compute_node in computes:
792 node_id = compute_node.get_id()
793 node_name = compute_node.get_name()
794 out_plugins[node_id] = []
795 compute_ids.append(node_id)
796 compute_node_names.append(node_name)
797 plugins_to_enable = []
799 gnocchi_running_com = (
800 gnocchi_running and conf.check_gnocchi_plugin_included(
803 aodh_running and conf.check_aodh_plugin_included(compute_node))
804 # logger.info("SNMP enabled on {}" .format(node_name))
805 if gnocchi_running_com:
806 out_plugins[node_id].append("Gnocchi")
808 out_plugins[node_id].append("AODH")
810 out_plugins[node_id].append("SNMP")
812 if 'Gnocchi' in out_plugins[node_id]:
813 plugins_to_enable.append('csv')
814 out_plugins[node_id].append("CSV")
815 if plugins_to_enable:
817 'NODE {}: Enabling Test Plug-in '.format(node_name)
818 + 'and Test case execution')
819 if plugins_to_enable and not conf.enable_plugins(
820 compute_node, plugins_to_enable, error_plugins,
821 create_backup=False):
823 'Failed to test plugins on node {}.'.format(node_id))
825 'Testcases on node {} will not be executed'.format(
828 for i in out_plugins[node_id]:
830 for plugin_name in sorted(aodh_plugin_labels.keys()):
832 aodh_plugin_labels, plugin_name, i,
833 controllers, compute_node, conf, results,
834 error_plugins, out_plugins[node_id])
836 _print_label("Node {}: Executing CSV Testcases".format(
838 logger.info("Restarting collectd for CSV tests")
839 collectd_restarted, collectd_warnings = \
840 conf.restart_collectd(compute_node)
843 'Sleeping for {} seconds'.format(sleep_time)
844 + ' after collectd restart...')
845 time.sleep(sleep_time)
846 if not collectd_restarted:
847 for warning in collectd_warnings:
848 logger.warning(warning)
850 'Restart of collectd on node {} failed'.format(
853 'CSV Testcases on node {}'.format(compute_node)
854 + ' will not be executed.')
855 for plugin_name in sorted(plugin_labels.keys()):
857 plugin_labels, plugin_name, i,
858 controllers, compute_node, conf, results,
859 error_plugins, out_plugins[node_id])
862 for plugin_name in sorted(plugin_labels.keys()):
864 plugin_labels, plugin_name, i,
865 controllers, compute_node, conf, results,
866 error_plugins, out_plugins[node_id])
869 print_overall_summary(
870 compute_ids, plugin_labels, aodh_plugin_labels, results, out_plugins)
875 logger.error('Some tests have failed or have not been executed')
876 logger.error('Overall Result is Fail')
881 _print_label('Testing DMA on compute nodes')
882 res_agent = dma.dma_main(logger, conf, computes)
884 return 0 if res_overall == 0 and res_agent == 0 else 1
887 if __name__ == '__main__':