1 # -*- coding: utf-8 -*-
3 # Licensed under the Apache License, Version 2.0 (the "License"); you may
4 # not use this file except in compliance with the License. You may obtain
5 # a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 # License for the specific language governing permissions and limitations
15 """Executing test of plugins"""
18 from keystoneclient.v3 import client
26 from opnfv.deployment import factory
28 GNOCCHI_NAME = 'gnocchi'
29 ID_RSA_SRC = '/root/.ssh/id_rsa'
30 ID_RSA_DST_DIR = '/home/opnfv/.ssh'
31 ID_RSA_DST = ID_RSA_DST_DIR + '/id_rsa'
32 APEX_IP = subprocess.check_output("echo $INSTALLER_IP", shell=True)
34 APEX_USER_STACK = 'stack'
35 APEX_PKEY = '/root/.ssh/id_rsa'
38 class KeystoneException(Exception):
39 """Keystone exception class"""
40 def __init__(self, message, exc=None, response=None):
43 message -- error message
48 message += "\nReason: %s" % exc
49 super(KeystoneException, self).__init__(message)
51 self.response = response
55 class InvalidResponse(KeystoneException):
56 """Invalid Keystone exception class"""
57 def __init__(self, exc, response):
63 super(InvalidResponse, self).__init__(
64 "Invalid response", exc, response)
67 class GnocchiClient(object):
68 # Gnocchi Client to authenticate and request meters
70 self._auth_token = None
71 self._gnocchi_url = None
72 self._meter_list = None
77 return self._auth_token
79 def get_gnocchi_url(self):
81 return self._gnocchi_url
83 def get_gnocchi_metrics(self, criteria=None):
84 # Subject to change if metric gathering is different for gnocchi
85 self._request_meters(criteria)
86 return self._meter_list
88 def _auth_server(self):
89 # Request token in authentication server
90 logger.debug('Connecting to the auth server {}'.format(
91 os.environ['OS_AUTH_URL']))
92 keystone = client.Client(username=os.environ['OS_USERNAME'],
93 password=os.environ['OS_PASSWORD'],
94 tenant_name=os.environ['OS_USERNAME'],
95 auth_url=os.environ['OS_AUTH_URL'])
96 self._auth_token = keystone.auth_token
97 for service in keystone.service_catalog.get_data():
98 if service['name'] == GNOCCHI_NAME:
99 for service_type in service['endpoints']:
100 if service_type['interface'] == 'internal':
101 self._gnocchi_url = service_type['url']
103 if self._gnocchi_url is None:
104 logger.warning('Gnocchi is not registered in service catalog')
106 def _request_meters(self, criteria):
107 """Request meter list values from ceilometer
110 criteria -- criteria for ceilometer meter list
113 url = self._gnocchi_url + ('/v3/resource?limit=400')
115 url = self._gnocchi_url \
116 + ('/v3/resource/%s?q.field=resource_id&limit=400' % criteria)
117 headers = {'X-Auth-Token': self._auth_token}
118 resp = requests.get(url, headers=headers)
120 resp.raise_for_status()
121 self._meter_list = resp.json()
122 except (KeyError, ValueError, requests.exceptions.HTTPError) as err:
123 raise InvalidResponse(err, resp)
126 class CSVClient(object):
127 """Client to request CSV meters"""
128 def __init__(self, conf):
131 conf -- ConfigServer instance
136 self, compute_node, plugin_subdirectories, meter_categories):
140 compute_node -- compute node instance
141 plugin_subdirectories -- list of subdirectories of plug-in
142 meter_categories -- categories which will be tested
144 Return list of metrics.
146 stdout = self.conf.execute_command(
147 "date '+%Y-%m-%d'", compute_node.get_ip())
148 date = stdout[0].strip()
150 for plugin_subdir in plugin_subdirectories:
151 for meter_category in meter_categories:
152 stdout = self.conf.execute_command(
153 "tail -2 /var/lib/collectd/csv/"
154 + "{0}.jf.intel.com/{1}/{2}-{3}".format(
155 compute_node.get_name(), plugin_subdir, meter_category,
157 compute_node.get_ip())
158 # Storing last two values
162 'Getting last two CSV entries of meter category '
163 + '{0} in {1} subdir failed'.format(
164 meter_category, plugin_subdir))
166 old_value = int(values[0][0:values[0].index('.')])
167 new_value = int(values[1][0:values[1].index('.')])
169 plugin_subdir, meter_category, old_value, new_value))
173 def get_csv_categories_for_ipmi(conf, compute_node):
177 compute_node -- compute node instance
179 Return list of categories.
181 stdout = conf.execute_command(
182 "date '+%Y-%m-%d'", compute_node.get_ip())
183 date = stdout[0].strip()
184 categories = conf.execute_command(
185 "ls /var/lib/collectd/csv/{0}.jf.intel.com/ipmi | grep {1}".format(
186 compute_node.get_name(), date), compute_node.get_ip())
187 return [category.strip()[:-11] for category in categories]
190 def _process_result(compute_node, test, result, results_list):
191 """Print test result and append it to results list.
194 test -- testcase name
195 result -- boolean test result
196 results_list -- results list
200 'Compute node {0} test case {1} PASSED.'.format(
204 'Compute node {0} test case {1} FAILED.'.format(
206 results_list.append((compute_node, test, result))
209 def _print_label(label):
210 """Print label on the screen
213 label -- label string
215 label = label.strip()
218 label = ' ' + label + ' '
219 length_label = len(label)
220 length1 = (length - length_label) / 2
221 length2 = length - length_label - length1
222 length1 = max(3, length1)
223 length2 = max(3, length2)
224 logger.info(('=' * length1) + label + ('=' * length2))
227 def _print_plugin_label(plugin, node_name):
228 """Print plug-in label.
231 plugin -- plug-in name
235 'Node {0}: Plug-in {1} Test case execution'.format(node_name, plugin))
238 def _print_final_result_of_plugin(
239 plugin, compute_ids, results, out_plugins, out_plugin):
240 """Print final results of plug-in.
243 plugin -- plug-in name
244 compute_ids -- list of compute node IDs
245 results -- results list
246 out_plugins -- list of out plug-ins
247 out_plugin -- used out plug-in
250 for id in compute_ids:
251 if out_plugins[id] == out_plugin:
252 if (id, plugin, True) in results:
253 print_line += ' PASS |'
254 elif (id, plugin, False) in results \
255 and out_plugins[id] == out_plugin:
256 print_line += ' FAIL |'
258 print_line += ' NOT EX |'
259 elif out_plugin == 'Gnocchi':
260 print_line += ' NOT EX |'
262 print_line += ' SKIP |'
266 def print_overall_summary(compute_ids, tested_plugins, results, out_plugins):
267 """Print overall summary table.
270 compute_ids -- list of compute IDs
271 tested_plugins -- list of plug-ins
272 results -- results list
273 out_plugins -- list of used out plug-ins
275 compute_node_names = ['Node-{}'.format(i) for i in range(
277 # compute_node_names = ['Node-{}'.format(id) for id in compute_ids]
278 all_computes_in_line = ''
279 for compute in compute_node_names:
280 all_computes_in_line += '| ' + compute + (' ' * (7 - len(compute)))
281 line_of_nodes = '| Test ' + all_computes_in_line + '|'
282 logger.info('=' * 70)
283 logger.info('+' + ('-' * ((9 * len(compute_node_names))+16)) + '+')
285 '|' + ' ' * ((9*len(compute_node_names))/2)
288 9*len(compute_node_names) - (9*len(compute_node_names))/2)
291 '+' + ('-' * 16) + '+' + (('-' * 8) + '+') * len(compute_node_names))
292 logger.info(line_of_nodes)
294 '+' + ('-' * 16) + '+' + (('-' * 8) + '+') * len(compute_node_names))
295 out_plugins_print = ['Gnocchi']
296 if 'SNMP' in out_plugins.values():
297 out_plugins_print.append('SNMP')
298 if 'CSV' in out_plugins.values():
299 out_plugins_print.append('CSV')
300 for out_plugin in out_plugins_print:
301 output_plugins_line = ''
302 for id in compute_ids:
303 out_plugin_result = '----'
304 if out_plugin == 'Gnocchi':
305 out_plugin_result = \
306 'PASS' if out_plugins[id] == out_plugin else 'FAIL'
307 if out_plugin == 'SNMP':
308 if out_plugins[id] == out_plugin:
309 out_plugin_result = \
310 'PASS' if out_plugins[id] == out_plugin else 'FAIL'
312 out_plugin_result = 'SKIP'
313 if out_plugin == 'CSV':
314 if out_plugins[id] == out_plugin:
315 out_plugin_result = \
317 plugin for comp_id, plugin, res in results
318 if comp_id == id and res] else 'FAIL'
320 out_plugin_result = 'SKIP'
321 output_plugins_line += '| ' + out_plugin_result + ' '
323 '| OUT:{}'.format(out_plugin) + (' ' * (11 - len(out_plugin)))
324 + output_plugins_line + '|')
325 for plugin in sorted(tested_plugins.values()):
326 line_plugin = _print_final_result_of_plugin(
327 plugin, compute_ids, results, out_plugins, out_plugin)
329 '| IN:{}'.format(plugin) + (' ' * (11-len(plugin)))
332 '+' + ('-' * 16) + '+'
333 + (('-' * 8) + '+') * len(compute_node_names))
334 logger.info('=' * 70)
338 test_labels, name, gnocchi_running, compute_node,
339 conf, results, error_plugins):
340 """Execute the testcase.
343 test_labels -- dictionary of plug-in IDs and their display names
344 name -- plug-in ID, key of test_labels dictionary
345 ceilometer_running -- boolean indicating whether Ceilometer is running
346 compute_node -- compute node ID
347 conf -- ConfigServer instance
348 results -- results list
349 error_plugins -- list of tuples with plug-in errors
350 (plugin, error_description, is_critical):
351 plugin -- plug-in ID, key of test_labels dictionary
352 error_decription -- description of the error
353 is_critical -- boolean value indicating whether error is critical
355 ovs_interfaces = conf.get_ovs_interfaces(compute_node)
356 ovs_configured_interfaces = conf.get_plugin_config_values(
357 compute_node, 'ovs_events', 'Interfaces')
358 ovs_configured_bridges = conf.get_plugin_config_values(
359 compute_node, 'ovs_stats', 'Bridges')
360 ovs_existing_configured_int = [
361 interface for interface in ovs_interfaces
362 if interface in ovs_configured_interfaces]
363 ovs_existing_configured_bridges = [
364 bridge for bridge in ovs_interfaces
365 if bridge in ovs_configured_bridges]
366 plugin_prerequisites = {
368 conf.is_libpqos_on_node(compute_node),
369 'libpqos must be installed.')],
371 conf.is_installed(compute_node, 'mcelog'),
372 'mcelog must be installed.')],
374 len(ovs_existing_configured_int) > 0 or len(ovs_interfaces) > 0,
375 'Interfaces must be configured.')],
377 len(ovs_existing_configured_bridges) > 0,
378 'Bridges must be configured.')]}
379 ceilometer_criteria_lists = {
381 'intel_rdt.ipc', 'intel_rdt.bytes',
382 'intel_rdt.memory_bandwidth'],
383 'hugepages': ['hugepages.vmpage_number'],
384 'ipmi': ['ipmi.temperature', 'ipmi.voltage'],
385 'mcelog': ['mcelog.errors'],
386 'ovs_stats': ['interface.if_packets'],
387 'ovs_events': ['ovs_events.gauge']}
388 ceilometer_substr_lists = {
389 'ovs_events': ovs_existing_configured_int if len(
390 ovs_existing_configured_int) > 0 else ovs_interfaces}
393 'intel_rdt-{}'.format(core)
394 for core in conf.get_plugin_config_values(
395 compute_node, 'intel_rdt', 'Cores')],
397 'hugepages-mm-2048Kb', 'hugepages-node0-2048Kb',
398 'hugepages-node1-2048Kb', 'hugepages-mm-1048576Kb',
399 'hugepages-node0-1048576Kb', 'hugepages-node1-1048576Kb'],
402 'mcelog-SOCKET_0_CHANNEL_0_DIMM_any',
403 'mcelog-SOCKET_0_CHANNEL_any_DIMM_any'],
405 'ovs_stats-{0}.{0}'.format(interface)
406 for interface in ovs_existing_configured_bridges],
408 'ovs_events-{}'.format(interface)
410 ovs_existing_configured_int
411 if len(ovs_existing_configured_int) > 0 else ovs_interfaces)]}
412 csv_meter_categories_ipmi = get_csv_categories_for_ipmi(conf, compute_node)
413 csv_meter_categories = {
415 'bytes-llc', 'ipc', 'memory_bandwidth-local',
416 'memory_bandwidth-remote'],
417 'hugepages': ['vmpage_number-free', 'vmpage_number-used'],
418 'ipmi': csv_meter_categories_ipmi,
420 'errors-corrected_memory_errors',
421 'errors-uncorrected_memory_errors',
422 'errors-corrected_memory_errors_in_24h',
423 'errors-uncorrected_memory_errors_in_24h'],
425 'if_collisions', 'if_dropped', 'if_errors', 'if_packets',
426 'if_rx_errors-crc', 'if_rx_errors-frame', 'if_rx_errors-over',
427 'if_rx_octets', 'if_tx_octets'],
428 'ovs_events': ['gauge-link_status']}
431 test_labels[name] if name in test_labels else name,
432 compute_node.get_name())
433 plugin_critical_errors = [
434 error for plugin, error, critical in error_plugins
435 if plugin == name and critical]
436 if plugin_critical_errors:
437 logger.error('Following critical errors occurred:'.format(name))
438 for error in plugin_critical_errors:
439 logger.error(' * ' + error)
441 compute_node.get_id(), test_labels[name], False, results)
444 error for plugin, error, critical in error_plugins
445 if plugin == name and not critical]
447 logger.warning('Following non-critical errors occured:')
448 for error in plugin_errors:
449 logger.warning(' * ' + error)
450 failed_prerequisites = []
451 if name in plugin_prerequisites:
452 failed_prerequisites = [
453 prerequisite_name for prerequisite_passed,
454 prerequisite_name in plugin_prerequisites[name]
455 if not prerequisite_passed]
456 if failed_prerequisites:
458 '{} test will not be executed, '.format(name)
459 + 'following prerequisites failed:')
460 for prerequisite in failed_prerequisites:
461 logger.error(' * {}'.format(prerequisite))
464 res = conf.test_plugins_with_gnocchi(
465 compute_node.get_id(),
466 conf.get_plugin_interval(compute_node, name),
467 logger, client=GnocchiClient(),
468 criteria_list=ceilometer_criteria_lists[name],
469 resource_id_substrings=(
470 ceilometer_substr_lists[name]
471 if name in ceilometer_substr_lists else ['']))
473 res = tests.test_csv_handles_plugin_data(
474 compute_node, conf.get_plugin_interval(compute_node, name),
475 name, csv_subdirs[name], csv_meter_categories[name],
476 logger, CSVClient(conf))
477 if res and plugin_errors:
479 'Test works, but will be reported as failure,'
480 + 'because of non-critical errors.')
483 compute_node.get_id(), test_labels[name], res, results)
486 def get_results_for_ovs_events(
487 plugin_labels, plugin_name, gnocchi_running,
488 compute_node, conf, results, error_plugins):
489 """ Testing OVS Events with python plugin
491 plugin_label = 'OVS events'
492 res = conf.enable_ovs_events(
493 compute_node, plugin_label, error_plugins, create_backup=False)
495 compute_node.get_id(), plugin_label, res, results)
496 logger.info("Results for OVS Events = {}" .format(results))
499 def mcelog_install():
500 """Install mcelog on compute nodes."""
501 _print_label('Enabling mcelog on compute nodes')
502 handler = factory.Factory.get_handler('apex',
506 nodes = handler.get_nodes()
508 if node.is_compute():
509 centos_release = node.run_cmd('uname -r')
510 if '3.10.0-514.26.2.el7.x86_64' not in centos_release:
512 'Mcelog will not be enabled '
513 + 'on node-{0}, '.format(node.get_dict()['id'])
514 + 'unsupported CentOS release found ({1}).'.format(
518 'Checking if mcelog is enabled'
519 + ' on node-{}...'.format(node.get_dict()['id']))
520 res = node.run_cmd('ls')
521 if 'mce-inject_ea' and 'corrected' in res:
523 'Mcelog seems to be already installed '
524 + 'on node-{}.'.format(node.get_dict()['id']))
525 node.run_cmd('modprobe mce-inject_ea')
526 node.run_cmd('mce-inject_ea < corrected')
529 'Mcelog will be enabled on node-{}...'.format(
530 node.get_dict()['id']))
532 '/usr/local/lib/python2.7/dist-packages/baro_tests/'
533 + 'mce-inject_ea', 'mce-inject_ea')
534 node.run_cmd('chmod a+x mce-inject_ea')
535 node.run_cmd('echo "CPU 0 BANK 0" > corrected')
537 'echo "STATUS 0xcc00008000010090" >>'
540 'echo "ADDR 0x0010FFFFFFF" >> corrected')
541 node.run_cmd('modprobe mce-inject')
542 node.run_cmd('mce-inject_ea < corrected')
543 logger.info('Mcelog is installed on all compute nodes')
547 """Uninstall mcelog from compute nodes."""
548 handler = factory.Factory.get_handler(
549 'apex', APEX_IP, APEX_USER, APEX_PKEY)
550 nodes = handler.get_nodes()
552 if node.is_compute():
553 output = node.run_cmd('ls')
554 if 'mce-inject_ea' in output:
555 node.run_cmd('rm mce-inject_ea')
556 if 'corrected' in output:
557 node.run_cmd('rm corrected')
558 node.run_cmd('systemctl restart mcelog')
559 logger.info('Mcelog is deleted from all compute nodes')
563 if not os.path.isdir(ID_RSA_DST_DIR):
564 os.makedirs(ID_RSA_DST_DIR)
565 if not os.path.isfile(ID_RSA_DST):
567 "RSA key file {} doesn't exist".format(ID_RSA_DST)
568 + ", it will be downloaded from installer node.")
569 handler = factory.Factory.get_handler(
570 'apex', APEX_IP, APEX_USER, APEX_PKEY)
571 apex = handler.get_installer_node()
572 apex.get_file(ID_RSA_SRC, ID_RSA_DST)
574 logger.info("RSA key file {} exists.".format(ID_RSA_DST))
578 """Check whether there is global logger available and if not, define one."""
579 if 'logger' not in globals():
581 logger = logger.Logger("barometercollectd").getLogger()
584 def main(bt_logger=None):
585 """Check each compute node sends gnocchi metrics.
588 bt_logger -- logger instance
590 logging.getLogger("paramiko").setLevel(logging.WARNING)
591 logging.getLogger("stevedore").setLevel(logging.WARNING)
592 logging.getLogger("opnfv.deployment.manager").setLevel(logging.WARNING)
593 if bt_logger is None:
598 _print_label("Starting barometer tests suite")
600 conf = config_server.ConfigServer(APEX_IP, APEX_USER, logger)
601 controllers = conf.get_controllers()
602 if len(controllers) == 0:
603 logger.error('No controller nodes found!')
605 computes = conf.get_computes()
606 if len(computes) == 0:
607 logger.error('No compute nodes found!')
611 'Display of Control and Compute nodes available in the set up')
612 logger.info('controllers: {}'.format([('{0}: {1} ({2})'.format(
613 node.get_id(), node.get_name(),
614 node.get_ip())) for node in controllers]))
615 logger.info('computes: {}'.format([('{0}: {1} ({2})'.format(
616 node.get_id(), node.get_name(), node.get_ip()))
617 for node in computes]))
620 gnocchi_running_on_con = False
621 _print_label('Test Gnocchi on controller nodes')
623 for controller in controllers:
624 logger.info("Controller = {}" .format(controller))
625 gnocchi_client = GnocchiClient()
626 gnocchi_client.auth_token()
627 gnocchi_running_on_con = (
628 gnocchi_running_on_con or conf.is_gnocchi_running(
630 if gnocchi_running_on_con:
631 logger.info("Gnocchi is running on controller.")
633 logger.error("Gnocchi is not running on controller.")
634 logger.info("CSV will be enabled on compute nodes.")
637 compute_node_names = []
640 'intel_rdt': 'Intel RDT',
641 'hugepages': 'Hugepages',
644 'ovs_stats': 'OVS stats',
645 'ovs_events': 'OVS events'}
647 for compute_node in computes:
648 node_id = compute_node.get_id()
649 node_name = compute_node.get_name()
650 out_plugins[node_id] = 'CSV'
651 compute_ids.append(node_id)
652 compute_node_names.append(node_name)
653 plugins_to_enable = []
654 _print_label('NODE {}: Test Gnocchi Plug-in'.format(node_name))
655 logger.info('Checking if gnocchi plug-in is included in compute nodes.')
656 if not conf.check_gnocchi_plugin_included(compute_node):
657 logger.error('Gnocchi plug-in is not included.')
659 'Testcases on node {} will not be executed'.format(node_name))
661 collectd_restarted, collectd_warnings = \
662 conf.restart_collectd(compute_node)
665 'Sleeping for {} seconds after collectd restart...'.format(
667 time.sleep(sleep_time)
668 if not collectd_restarted:
669 for warning in collectd_warnings:
670 logger.warning(warning)
672 'Restart of collectd on node {} failed'.format(node_name))
674 'Testcases on node {} will not be executed'.format(
677 for warning in collectd_warnings:
678 logger.warning(warning)
680 gnocchi_running_on_con
681 and conf.test_gnocchi_is_sending_data(
684 out_plugins[node_id] = 'Gnocchi'
685 logger.info("Gnocchi is active and collecting data")
687 plugins_to_enable.append('csv')
688 out_plugins[node_id] = 'CSV'
689 logger.error("Gnocchi is inactive and not collecting data")
691 "CSV will be enabled for verification "
692 + "of test plugins.")
693 if plugins_to_enable:
695 'NODE {}: Enabling Test Plug-in '.format(node_name)
696 + 'and Test case execution')
698 if plugins_to_enable and not conf.enable_plugins(
699 compute_node, plugins_to_enable, error_plugins,
700 create_backup=False):
702 'Failed to test plugins on node {}.'.format(node_id))
704 'Testcases on node {} will not be executed'.format(
707 if plugins_to_enable:
708 collectd_restarted, collectd_warnings = \
709 conf.restart_collectd(compute_node)
712 'Sleeping for {} seconds'.format(sleep_time)
713 + ' after collectd restart...')
714 time.sleep(sleep_time)
715 if plugins_to_enable and not collectd_restarted:
716 for warning in collectd_warnings:
717 logger.warning(warning)
719 'Restart of collectd on node {} failed'.format(
722 'Testcases on node {}'.format(node_id)
723 + ' will not be executed.')
725 if collectd_warnings:
726 for warning in collectd_warnings:
727 logger.warning(warning)
729 for plugin_name in sorted(plugin_labels.keys()):
731 plugin_labels, plugin_name,
733 compute_node, conf, results, error_plugins)
735 _print_label('NODE {}: Restoring config file'.format(node_name))
736 conf.restore_config(compute_node)
738 print_overall_summary(compute_ids, plugin_labels, results, out_plugins)
740 if ((len([res for res in results if not res[2]]) > 0)
741 or (len(results) < len(computes) * len(plugin_labels))):
742 logger.error('Some tests have failed or have not been executed')
747 if __name__ == '__main__':