3 # Copyright 2015-2017 Intel Corporation.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
17 """VSPERF main script.
36 from tabulate import tabulate
37 from conf import merge_spec
38 from conf import settings
39 import core.component_factory as component_factory
40 from core.loader import Loader
41 from testcases import PerformanceTestCase
42 from testcases import IntegrationTestCase
43 from testcases import K8sPerformanceTestCase
44 from tools import tasks
45 from tools import networkcard
46 from tools import functions
47 from tools.pkt_gen import trafficgen
48 from tools.opnfvdashboard import opnfvdashboard
49 from tools.os_deploy_tgen import osdt
50 sys.dont_write_bytecode = True
53 'debug': logging.DEBUG,
55 'warning': logging.WARNING,
56 'error': logging.ERROR,
57 'critical': logging.CRITICAL
60 _CURR_DIR = os.path.dirname(os.path.realpath(__file__))
62 _TEMPLATE_RST = {'head' : os.path.join(_CURR_DIR, 'tools/report/report_head.rst'),
63 'foot' : os.path.join(_CURR_DIR, 'tools/report/report_foot.rst'),
64 'final' : 'test_report.rst',
65 'tmp' : os.path.join(_CURR_DIR, 'tools/report/report_tmp_caption.rst')
68 _TEMPLATE_MATRIX = "Performance Matrix\n------------------\n\n"\
69 "The following performance matrix was generated with the results of all the\n"\
70 "currently run tests. The metric used for comparison is {}.\n\n{}\n\n"
72 _LOGGER = logging.getLogger()
73 logging.getLogger('matplotlib').setLevel(logging.ERROR)
75 def parse_param_string(values):
77 Parse and split a single '--test-params' argument.
79 This expects either 'x=y', 'x=y,z' or 'x' (implicit true)
80 values. For multiple overrides use a ; separated list for
81 e.g. --test-params 'x=z; y=(a,b)'
88 for param, _, value in re.findall('([^;=]+)(=([^;]+))?', values):
93 # values are passed inside string from CLI, so we must retype them accordingly
95 results[param] = ast.literal_eval(value)
97 # for backward compatibility, we have to accept strings without quotes
98 _LOGGER.warning("Adding missing quotes around string value: %s = %s",
100 results[param] = str(value)
102 results[param] = True
106 def parse_arguments():
108 Parse command line arguments.
110 class _SplitTestParamsAction(argparse.Action):
112 Parse and split '--test-params' arguments.
114 This expects either a single list of ; separated overrides
115 as 'x=y', 'x=y,z' or 'x' (implicit true) values.
116 e.g. --test-params 'x=z; y=(a,b)'
117 Or a list of these ; separated lists with overrides for
119 e.g. --test-params "['x=z; y=(a,b)','x=z']"
121 def __call__(self, parser, namespace, values, option_string=None):
123 input_list = ast.literal_eval(values)
125 for test_params in input_list:
126 parameter_list.append(parse_param_string(test_params))
128 parameter_list = parse_param_string(values)
129 results = {'_PARAMS_LIST':parameter_list}
130 setattr(namespace, self.dest, results)
132 class _ValidateFileAction(argparse.Action):
133 """Validate a file can be read from before using it.
135 def __call__(self, parser, namespace, values, option_string=None):
136 if not os.path.isfile(values):
137 raise argparse.ArgumentTypeError(
138 'the path \'%s\' is not a valid path' % values)
139 elif not os.access(values, os.R_OK):
140 raise argparse.ArgumentTypeError(
141 'the path \'%s\' is not accessible' % values)
143 setattr(namespace, self.dest, values)
145 class _ValidateDirAction(argparse.Action):
146 """Validate a directory can be written to before using it.
148 def __call__(self, parser, namespace, values, option_string=None):
149 if not os.path.isdir(values):
150 raise argparse.ArgumentTypeError(
151 'the path \'%s\' is not a valid path' % values)
152 elif not os.access(values, os.W_OK):
153 raise argparse.ArgumentTypeError(
154 'the path \'%s\' is not accessible' % values)
156 setattr(namespace, self.dest, values)
158 def list_logging_levels():
159 """Give a summary of all available logging levels.
161 :return: List of verbosity level names in decreasing order of
164 return sorted(VERBOSITY_LEVELS.keys(),
165 key=lambda x: VERBOSITY_LEVELS[x])
167 parser = argparse.ArgumentParser(prog=__file__, formatter_class=
168 argparse.ArgumentDefaultsHelpFormatter)
169 parser.add_argument('--version', action='version', version='%(prog)s 0.2')
170 parser.add_argument('--list', '--list-tests', action='store_true',
171 help='list all tests and exit')
172 parser.add_argument('--list-trafficgens', action='store_true',
173 help='list all traffic generators and exit')
174 parser.add_argument('--list-collectors', action='store_true',
175 help='list all system metrics loggers and exit')
176 parser.add_argument('--list-vswitches', action='store_true',
177 help='list all system vswitches and exit')
178 parser.add_argument('--list-fwdapps', action='store_true',
179 help='list all system forwarding applications and exit')
180 parser.add_argument('--list-vnfs', action='store_true',
181 help='list all system vnfs and exit')
182 parser.add_argument('--list-loadgens', action='store_true',
183 help='list all background load generators')
184 parser.add_argument('--list-pods', action='store_true',
185 help='list all system pods')
186 parser.add_argument('--list-settings', action='store_true',
187 help='list effective settings configuration and exit')
188 parser.add_argument('exact_test_name', nargs='*', help='Exact names of\
189 tests to run. E.g "vsperf phy2phy_tput phy2phy_cont"\
190 runs only the two tests with those exact names.\
191 To run all tests omit both positional args and --tests arg.')
193 group = parser.add_argument_group('test selection options')
194 group.add_argument('-m', '--mode', help='vsperf mode of operation;\
195 Values: "normal" - execute vSwitch, VNF and traffic generator;\
196 "trafficgen" - execute only traffic generator; "trafficgen-off" \
197 - execute vSwitch and VNF; trafficgen-pause - execute vSwitch \
198 and VNF but pause before traffic transmission ', default='normal')
200 group.add_argument('-f', '--test-spec', help='test specification file')
201 group.add_argument('-d', '--test-dir', help='directory containing tests')
202 group.add_argument('-t', '--tests', help='Comma-separated list of terms \
203 indicating tests to run. e.g. "RFC2544,!p2p" - run all tests whose\
204 name contains RFC2544 less those containing "p2p"; "!back2back" - \
205 run all tests except those containing back2back')
206 group.add_argument('--verbosity', choices=list_logging_levels(),
208 group.add_argument('--integration', action='store_true', help='execute integration tests')
209 group.add_argument('--k8s', action='store_true', help='execute Kubernetes tests')
210 group.add_argument('--openstack', action='store_true', help='Run VSPERF with openstack')
211 group.add_argument('--trafficgen', help='traffic generator to use')
212 group.add_argument('--vswitch', help='vswitch implementation to use')
213 group.add_argument('--fwdapp', help='packet forwarding application to use')
214 group.add_argument('--vnf', help='vnf to use')
215 group.add_argument('--loadgen', help='loadgen to use')
216 group.add_argument('--sysmetrics', help='system metrics logger to use')
217 group = parser.add_argument_group('test behavior options')
218 group.add_argument('--xunit', action='store_true',
219 help='enable xUnit-formatted output')
220 group.add_argument('--xunit-dir', action=_ValidateDirAction,
221 help='output directory of xUnit-formatted output')
222 group.add_argument('--load-env', action='store_true',
223 help='enable loading of settings from the environment')
224 group.add_argument('--conf-file', action=_ValidateFileAction,
225 help='settings file')
226 group.add_argument('--test-params', action=_SplitTestParamsAction,
227 help='csv list of test parameters: key=val; e.g. '
228 'TRAFFICGEN_PKT_SIZES=(64,128);TRAFFICGEN_DURATION=30; '
229 'GUEST_LOOPBACK=["l2fwd"] ...'
230 ' or a list of csv lists of test parameters: key=val; e.g. '
231 '[\'TRAFFICGEN_DURATION=10;TRAFFICGEN_PKT_SIZES=(128,)\','
232 '\'TRAFFICGEN_DURATION=10;TRAFFICGEN_PKT_SIZES=(64,)\']')
233 group.add_argument('--opnfvpod', help='name of POD in opnfv')
234 group.add_argument('--matrix', help='enable performance matrix analysis',
235 action='store_true', default=False)
237 args = vars(parser.parse_args())
242 def configure_logging(level):
243 """Configure logging.
245 name, ext = os.path.splitext(settings.getValue('LOG_FILE_DEFAULT'))
246 rename_default = "{name}_{uid}{ex}".format(name=name,
247 uid=settings.getValue(
250 log_file_default = os.path.join(
251 settings.getValue('RESULTS_PATH'), rename_default)
252 name, ext = os.path.splitext(settings.getValue('LOG_FILE_HOST_CMDS'))
253 rename_hostcmd = "{name}_{uid}{ex}".format(name=name,
254 uid=settings.getValue(
257 log_file_host_cmds = os.path.join(
258 settings.getValue('RESULTS_PATH'), rename_hostcmd)
259 name, ext = os.path.splitext(settings.getValue('LOG_FILE_TRAFFIC_GEN'))
260 rename_traffic = "{name}_{uid}{ex}".format(name=name,
261 uid=settings.getValue(
264 log_file_traffic_gen = os.path.join(
265 settings.getValue('RESULTS_PATH'), rename_traffic)
266 metrics_file = (settings.getValue('LOG_FILE_INFRA_METRICS_PFX') +
267 settings.getValue('LOG_TIMESTAMP') + '.log')
268 log_file_infra_metrics = os.path.join(settings.getValue('LOG_DIR'),
271 _LOGGER.setLevel(logging.DEBUG)
273 stream_logger = logging.StreamHandler(sys.stdout)
274 stream_logger.setLevel(VERBOSITY_LEVELS[level])
275 stream_logger.setFormatter(logging.Formatter(
276 '[%(levelname)-5s] %(asctime)s : (%(name)s) - %(message)s'))
277 _LOGGER.addHandler(stream_logger)
279 file_logger = logging.FileHandler(filename=log_file_default)
280 file_logger.setLevel(logging.DEBUG)
281 file_logger.setFormatter(logging.Formatter(
282 '%(asctime)s : %(message)s'))
283 _LOGGER.addHandler(file_logger)
285 class CommandFilter(logging.Filter):
286 """Filter out strings beginning with 'cmd :'"""
287 def filter(self, record):
288 return record.getMessage().startswith(tasks.CMD_PREFIX)
290 class TrafficGenCommandFilter(logging.Filter):
291 """Filter out strings beginning with 'gencmd :'"""
292 def filter(self, record):
293 return record.getMessage().startswith(trafficgen.CMD_PREFIX)
295 class CollectdMetricsFilter(logging.Filter):
296 """Filter out strings beginning with 'COLLECTD' :'"""
297 def filter(self, record):
298 return record.getMessage().startswith('COLLECTD')
300 cmd_logger = logging.FileHandler(filename=log_file_host_cmds)
301 cmd_logger.setLevel(logging.DEBUG)
302 cmd_logger.addFilter(CommandFilter())
303 _LOGGER.addHandler(cmd_logger)
305 gen_logger = logging.FileHandler(filename=log_file_traffic_gen)
306 gen_logger.setLevel(logging.DEBUG)
307 gen_logger.addFilter(TrafficGenCommandFilter())
308 _LOGGER.addHandler(gen_logger)
310 if settings.getValue('COLLECTOR') == 'Collectd':
311 met_logger = logging.FileHandler(filename=log_file_infra_metrics)
312 met_logger.setLevel(logging.DEBUG)
313 met_logger.addFilter(CollectdMetricsFilter())
314 _LOGGER.addHandler(met_logger)
317 def apply_filter(tests, tc_filter):
318 """Allow a subset of tests to be conveniently selected
320 :param tests: The list of Tests from which to select.
321 :param tc_filter: A case-insensitive string of comma-separated terms
322 indicating the Tests to select.
323 e.g. 'RFC' - select all tests whose name contains 'RFC'
324 e.g. 'RFC,burst' - select all tests whose name contains 'RFC' or
326 e.g. 'RFC,burst,!p2p' - select all tests whose name contains 'RFC'
327 or 'burst' and from these remove any containing 'p2p'.
328 e.g. '' - empty string selects all tests.
329 :return: A list of the selected Tests.
331 # if negative filter is first we have to start with full list of tests
332 if tc_filter.strip()[0] == '!':
336 if tc_filter is None:
339 for term in [x.strip() for x in tc_filter.lower().split(",")]:
340 if not term or term[0] != '!':
341 # Add matching tests from 'tests' into results
342 result.extend([test for test in tests \
343 if test['Name'].lower().find(term) >= 0])
345 # Term begins with '!' so we remove matching tests
346 result = [test for test in result \
347 if test['Name'].lower().find(term[1:]) < 0]
352 def check_and_set_locale():
353 """ Function will check locale settings. In case, that it isn't configured
354 properly, then default values specified by DEFAULT_LOCALE will be used.
357 system_locale = locale.getdefaultlocale()
358 if None in system_locale:
359 os.environ['LC_ALL'] = settings.getValue('DEFAULT_LOCALE')
360 _LOGGER.warning("Locale was not properly configured. Default values were set. Old locale: %s, New locale: %s",
361 system_locale, locale.getdefaultlocale())
363 def get_vswitch_names(rst_files):
364 """ Function will return a list of vSwitches detected in given ``rst_files``.
366 vswitch_names = set()
369 output = subprocess.check_output(['grep', '-h', '^* vSwitch'] + rst_files).decode().splitlines()
371 match = re.search(r'^\* vSwitch: ([^,]+)', str(line))
373 vswitch_names.add(match.group(1))
376 return list(vswitch_names)
378 except subprocess.CalledProcessError:
379 _LOGGER.warning('Cannot detect vSwitches used during testing.')
381 # fallback to the default value
385 """ Function will return a Jenkins job ID environment variable.
389 build_tag = os.environ['BUILD_TAG']
392 _LOGGER.warning('Cannot detect Jenkins job ID')
397 def generate_final_report():
398 """ Function will check if partial test results are available
399 and generates final report in rst format.
402 path = settings.getValue('RESULTS_PATH')
403 # check if there are any results in rst format
404 rst_results = glob.glob(os.path.join(path, 'result*rst'))
405 pkt_processors = get_vswitch_names(rst_results)
408 test_report = os.path.join(path, '{}_{}'.format('_'.join(pkt_processors), _TEMPLATE_RST['final']))
409 # create report caption directly - it is not worth to execute jinja machinery
410 report_caption = '{}\n{} {}\n{}\n\n'.format(
411 '============================================================',
412 'Performance report for',
413 ', '.join(pkt_processors),
414 '============================================================')
416 with open(_TEMPLATE_RST['tmp'], 'w') as file_:
417 file_.write(report_caption)
419 retval = subprocess.call('cat {} {} {} {} > {}'.format(_TEMPLATE_RST['tmp'], _TEMPLATE_RST['head'],
420 ' '.join(rst_results), _TEMPLATE_RST['foot'],
421 test_report), shell=True)
422 if retval == 0 and os.path.isfile(test_report):
423 _LOGGER.info('Overall test report written to "%s"', test_report)
425 _LOGGER.error('Generation of overall test report has failed.')
427 # remove temporary file
428 os.remove(_TEMPLATE_RST['tmp'])
430 except subprocess.CalledProcessError:
431 _LOGGER.error('Generatrion of overall test report has failed.')
434 def generate_performance_matrix(selected_tests, results_path):
436 Loads the results of all the currently run tests, compares them
437 based on the MATRIX_METRIC, outputs and saves the generated table.
438 :selected_tests: list of currently run test
439 :results_path: directory path to the results of current tests
441 _LOGGER.info('Performance Matrix:')
444 for test in selected_tests:
445 test_name = test.get('Name', '<Name not set>')
446 test_deployment = test.get('Deployment', '<Deployment not set>')
447 test_list.append({'test_name':test_name, 'test_deployment':test_deployment, 'csv_data':False})
451 all_params = settings.getValue('_PARAMS_LIST')
452 for i in range(len(selected_tests)):
454 if isinstance(all_params, list):
456 if i >= len(all_params):
457 list_index = len(all_params) - 1
458 if settings.getValue('CUMULATIVE_PARAMS') and (i > 0):
459 test_params.update(all_params[list_index])
461 test_params = all_params[list_index]
463 test_params = all_params
464 settings.setValue('TEST_PARAMS', test_params)
465 test['test_params'] = copy.deepcopy(test_params)
467 with open("{}/result_{}_{}_{}.csv".format(results_path, str(i),
468 test['test_name'], test['test_deployment'])) as csvfile:
469 reader = list(csv.DictReader(csvfile))
470 test['csv_data'] = reader[0]
471 # pylint: disable=broad-except
472 except (Exception) as ex:
473 _LOGGER.error("Result file not found: %s", ex)
475 metric = settings.getValue('MATRIX_METRIC')
477 output_header = ("ID", "Name", metric, "Change [%]", "Parameters, "\
478 "CUMULATIVE_PARAMS = {}".format(settings.getValue('CUMULATIVE_PARAMS')))
479 if not test_list[0]['csv_data'] or float(test_list[0]['csv_data'][metric]) == 0:
480 _LOGGER.error("Incorrect format of test results")
482 for i, test in enumerate(test_list):
484 change[i] = float(test['csv_data'][metric])/\
485 (float(test_list[0]['csv_data'][metric]) / 100) - 100
486 output.append([i, test['test_name'], float(test['csv_data'][metric]),
487 change[i], str(test['test_params'])[1:-1]])
490 output.append([i, test['test_name'], "Test Failed", 0, test['test_params']])
491 print(tabulate(output, headers=output_header, tablefmt="grid", floatfmt="0.3f"))
492 with open(results_path + '/result_performance_matrix.rst', 'w+') as output_file:
493 output_file.write(_TEMPLATE_MATRIX.format(metric, tabulate(output, headers=output_header,
494 tablefmt="rst", floatfmt="0.3f")))
495 _LOGGER.info('Performance matrix written to: "%s/result_performance_matrix.rst"', results_path)
497 def enable_sriov(nic_list):
498 """ Enable SRIOV for given enhanced PCI IDs
500 :param nic_list: A list of enhanced PCI IDs
502 # detect if sriov is required
505 if networkcard.is_sriov_nic(nic):
506 tmp_nic = nic.split('|')
507 if tmp_nic[0] in sriov_nic:
508 if int(tmp_nic[1][2:]) > sriov_nic[tmp_nic[0]]:
509 sriov_nic[tmp_nic[0]] = int(tmp_nic[1][2:])
511 sriov_nic.update({tmp_nic[0] : int(tmp_nic[1][2:])})
513 # sriov is required for some NICs
515 for nic in sriov_nic:
516 # check if SRIOV is supported and enough virt interfaces are available
517 if not networkcard.is_sriov_supported(nic) \
518 or networkcard.get_sriov_numvfs(nic) <= sriov_nic[nic]:
519 # if not, enable and set appropriate number of VFs
520 if not networkcard.set_sriov_numvfs(nic, sriov_nic[nic] + 1):
521 raise RuntimeError('SRIOV cannot be enabled for NIC {}'.format(nic))
523 _LOGGER.debug("SRIOV enabled for NIC %s", nic)
525 # ensure that path to the bind tool is valid
526 functions.settings_update_paths()
528 # WORKAROUND: it has been observed with IXGBE(VF) driver,
529 # that NIC doesn't correclty dispatch traffic to VFs based
530 # on their MAC address. Unbind and bind to the same driver
532 networkcard.reinit_vfs(nic)
534 # After SRIOV is enabled it takes some time until network drivers
535 # properly initialize all cards.
536 # Wait also in case, that SRIOV was already configured as it can be
537 # configured automatically just before vsperf execution.
545 def disable_sriov(nic_list):
546 """ Disable SRIOV for given PCI IDs
548 :param nic_list: A list of enhanced PCI IDs
551 if networkcard.is_sriov_nic(nic):
552 if not networkcard.set_sriov_numvfs(nic.split('|')[0], 0):
553 raise RuntimeError('SRIOV cannot be disabled for NIC {}'.format(nic))
555 _LOGGER.debug("SRIOV disabled for NIC %s", nic.split('|')[0])
558 def handle_list_options(args):
559 """ Process --list cli arguments if needed
561 :param args: A dictionary with all CLI arguments
563 if args['list_trafficgens']:
564 print(Loader().get_trafficgens_printable())
567 if args['list_collectors']:
568 print(Loader().get_collectors_printable())
571 if args['list_vswitches']:
572 print(Loader().get_vswitches_printable())
575 if args['list_vnfs']:
576 print(Loader().get_vnfs_printable())
579 if args['list_fwdapps']:
580 print(Loader().get_pktfwds_printable())
583 if args['list_loadgens']:
584 print(Loader().get_loadgens_printable())
587 if args['list_pods']:
588 print(Loader().get_pods_printable())
591 if args['list_settings']:
600 def list_testcases(args):
601 """ Print list of testcases requested by --list CLI argument
603 :param args: A dictionary with all CLI arguments
606 if args['integration']:
607 testcases = settings.getValue('INTEGRATION_TESTS')
609 testcases = settings.getValue('K8SPERFORMANCE_TESTS')
611 testcases = settings.getValue('PERFORMANCE_TESTS')
613 print("Available Tests:")
614 print("================")
616 for test in testcases:
617 description = functions.format_description(test['Description'], 70)
618 if len(test['Name']) < 40:
619 print('* {:40} {}'.format('{}:'.format(test['Name']), description[0]))
621 print('* {}'.format('{}:'.format(test['Name'])))
622 print(' {:40} {}'.format('', description[0]))
623 for i in range(1, len(description)):
624 print(' {:40} {}'.format('', description[i]))
627 def vsperf_finalize():
628 """ Clean up before exit
630 # remove directory if no result files were created
632 results_path = settings.getValue('RESULTS_PATH')
633 if os.path.exists(results_path):
634 files_list = os.listdir(results_path)
636 _LOGGER.info("Removing empty result directory: %s", results_path)
637 shutil.rmtree(results_path)
638 except AttributeError:
639 # skip it if parameter doesn't exist
642 # disable SRIOV if needed
644 if settings.getValue('SRIOV_ENABLED'):
645 disable_sriov(settings.getValue('WHITELIST_NICS_ORIG'))
646 except AttributeError:
647 # skip it if parameter doesn't exist
651 class MockTestCase(unittest.TestCase):
652 """Allow use of xmlrunner to generate Jenkins compatible output without
653 using xmlrunner to actually run tests.
656 suite = unittest.TestSuite()
657 suite.addTest(MockTestCase('Test1 passed ', True, 'Test1'))
658 suite.addTest(MockTestCase('Test2 failed because...', False, 'Test2'))
659 xmlrunner.XMLTestRunner(...).run(suite)
662 def __init__(self, msg, is_pass, test_name):
665 self.is_pass = is_pass
667 #dynamically create a test method with the right name
668 #but point the method at our generic test method
669 setattr(MockTestCase, test_name, self.generic_test)
671 super(MockTestCase, self).__init__(test_name)
673 def generic_test(self):
674 """Provide a generic function that raises or not based
675 on how self.is_pass was set in the constructor"""
676 self.assertTrue(self.is_pass, self.msg)
678 # pylint: disable=too-many-locals, too-many-branches, too-many-statements
682 args = parse_arguments()
686 settings.load_from_dir(os.path.join(_CURR_DIR, 'conf'))
688 # define the timestamp to be used by logs and results
689 date = datetime.datetime.fromtimestamp(time.time())
690 timestamp = date.strftime('%Y-%m-%d_%H-%M-%S')
691 settings.setValue('LOG_TIMESTAMP', timestamp)
693 # generate results directory name
694 # integration test use vswitchd log in test step assertions, ensure that
695 # correct value will be set before loading integration test configuration
696 results_dir = "results_" + timestamp
697 results_path = os.path.join(settings.getValue('LOG_DIR'), results_dir)
698 settings.setValue('RESULTS_PATH', results_path)
699 # create results directory
700 if not os.path.exists(results_path):
701 os.makedirs(results_path)
703 # load non performance/integration tests
704 if args['integration']:
705 settings.load_from_dir(os.path.join(_CURR_DIR, 'conf/integration'))
707 settings.load_from_dir(os.path.join(_CURR_DIR, 'conf/kubernetes'))
709 # load command line parameters first in case there are settings files
711 settings.load_from_dict(args)
713 if args['conf_file']:
714 settings.load_from_file(args['conf_file'])
717 settings.load_from_env()
719 # reload command line parameters since these should take higher priority
720 # than both a settings file and environment variables
721 settings.load_from_dict(args)
723 settings.setValue('mode', args['mode'])
726 settings.setValue('K8S', True)
728 settings.setValue('K8S', False)
730 if args['openstack']:
731 result = osdt.deploy_testvnf()
733 _LOGGER.info('TestVNF successfully deployed on Openstack')
734 settings.setValue('mode', 'trafficgen')
736 _LOGGER.error('Failed to deploy TestVNF in Openstac')
738 # update paths to trafficgens if required
739 if settings.getValue('mode') == 'trafficgen':
740 functions.settings_update_paths()
742 # if required, handle list-* operations
743 handle_list_options(args)
745 configure_logging(settings.getValue('VERBOSITY'))
748 _LOGGER.info("Creating result directory: %s", results_path)
750 # check and fix locale
751 check_and_set_locale()
753 # configure trafficgens
754 if args['trafficgen']:
755 trafficgens = Loader().get_trafficgens()
756 if args['trafficgen'] not in trafficgens:
757 _LOGGER.error('There are no trafficgens matching \'%s\' found in'
758 ' \'%s\'. Exiting...', args['trafficgen'],
759 settings.getValue('TRAFFICGEN_DIR'))
762 # configuration validity checks
764 vswitch_none = args['vswitch'].strip().lower() == 'none'
766 settings.setValue('VSWITCH', 'none')
768 vswitches = Loader().get_vswitches()
769 if args['vswitch'] not in vswitches:
770 _LOGGER.error('There are no vswitches matching \'%s\' found in'
771 ' \'%s\'. Exiting...', args['vswitch'],
772 settings.getValue('VSWITCH_DIR'))
776 settings.setValue('PKTFWD', args['fwdapp'])
777 fwdapps = Loader().get_pktfwds()
778 if args['fwdapp'] not in fwdapps:
779 _LOGGER.error('There are no forwarding application'
780 ' matching \'%s\' found in'
781 ' \'%s\'. Exiting...', args['fwdapp'],
782 settings.getValue('PKTFWD_DIR'))
786 vnfs = Loader().get_vnfs()
787 if args['vnf'] not in vnfs:
788 _LOGGER.error('there are no vnfs matching \'%s\' found in'
789 ' \'%s\'. exiting...', args['vnf'],
790 settings.getValue('VNF_DIR'))
794 loadgens = Loader().get_loadgens()
795 if args['loadgen'] not in loadgens:
796 _LOGGER.error('There are no loadgens matching \'%s\' found in'
797 ' \'%s\'. Exiting...', args['loadgen'],
798 settings.getValue('LOADGEN_DIR'))
801 if args['exact_test_name'] and args['tests']:
802 _LOGGER.error("Cannot specify tests with both positional args and --test.")
805 # modify NIC configuration to decode enhanced PCI IDs
806 wl_nics_orig = list(networkcard.check_pci(pci) for pci in settings.getValue('WHITELIST_NICS'))
807 settings.setValue('WHITELIST_NICS_ORIG', wl_nics_orig)
809 # sriov handling is performed on checked/expanded PCI IDs
810 settings.setValue('SRIOV_ENABLED', enable_sriov(wl_nics_orig))
813 for nic in wl_nics_orig:
814 tmp_nic = networkcard.get_nic_info(nic)
816 nic_list.append({'pci' : tmp_nic,
817 'type' : 'vf' if networkcard.get_sriov_pf(tmp_nic) else 'pf',
818 'mac' : networkcard.get_mac(tmp_nic),
819 'driver' : networkcard.get_driver(tmp_nic),
820 'device' : networkcard.get_device_name(tmp_nic)})
823 raise RuntimeError("Invalid network card PCI ID: '{}'".format(nic))
825 settings.setValue('NICS', nic_list)
826 # for backward compatibility
827 settings.setValue('WHITELIST_NICS', list(nic['pci'] for nic in nic_list))
830 # pylint: disable=too-many-nested-blocks
831 if settings.getValue('mode') == 'trafficgen':
832 # execute only traffic generator
833 _LOGGER.debug("Executing traffic generator:")
835 # set traffic details, so they can be passed to traffic ctl
836 traffic = copy.deepcopy(settings.getValue('TRAFFIC'))
837 traffic = functions.check_traffic(traffic)
839 traffic_ctl = component_factory.create_traffic(
840 traffic['traffic_type'],
841 loader.get_trafficgen_class())
843 traffic_ctl.send_traffic(traffic)
844 _LOGGER.debug("Traffic Results:")
845 traffic_ctl.print_results()
847 # write results into CSV file
848 result_file = os.path.join(results_path, "result.csv")
849 PerformanceTestCase.write_result_to_file(traffic_ctl.get_results(), result_file)
852 if args['integration']:
853 testcases = settings.getValue('INTEGRATION_TESTS')
855 testcases = settings.getValue('K8SPERFORMANCE_TESTS')
857 testcases = settings.getValue('PERFORMANCE_TESTS')
859 if args['exact_test_name']:
860 exact_names = args['exact_test_name']
861 # positional args => exact matches only
863 for test_name in exact_names:
864 for test in testcases:
865 if test['Name'] == test_name:
866 selected_tests.append(test)
868 # --tests => apply filter to select requested tests
869 selected_tests = apply_filter(testcases, args['tests'])
871 # Default - run all tests
872 selected_tests = testcases
874 if not selected_tests:
875 _LOGGER.error("No tests matched --tests option or positional args. Done.")
879 suite = unittest.TestSuite()
880 settings_snapshot = copy.deepcopy(settings.__dict__)
882 for i, cfg in enumerate(selected_tests):
883 settings.setValue('_TEST_INDEX', i)
884 test_name = cfg.get('Name', '<Name not set>')
886 test_params = settings.getValue('_PARAMS_LIST')
887 if isinstance(test_params, list):
889 if i >= len(test_params):
890 list_index = len(test_params) - 1
891 test_params = test_params[list_index]
892 if settings.getValue('CUMULATIVE_PARAMS'):
893 test_params = merge_spec(settings.getValue('TEST_PARAMS'), test_params)
894 settings.setValue('TEST_PARAMS', test_params)
896 if args['integration']:
897 test = IntegrationTestCase(cfg)
899 test = K8sPerformanceTestCase(cfg)
901 test = PerformanceTestCase(cfg)
904 suite.addTest(MockTestCase('', True, test.name))
906 # pylint: disable=broad-except
907 except (Exception) as ex:
908 _LOGGER.exception("Failed to run test: %s", test_name)
909 suite.addTest(MockTestCase(str(ex), False, test_name))
910 _LOGGER.info("Continuing with next test...")
912 if not settings.getValue('CUMULATIVE_PARAMS'):
913 settings.restore_from_dict(settings_snapshot)
915 settings.restore_from_dict(settings_snapshot)
918 # Generate and printout Performance Matrix
920 generate_performance_matrix(selected_tests, results_path)
922 # generate final rst report with results of all executed TCs
923 generate_final_report()
927 if settings.getValue('XUNIT'):
928 xmlrunner.XMLTestRunner(
929 output=settings.getValue('XUNIT_DIR'), outsuffix="",
930 verbosity=0).run(suite)
932 if args['opnfvpod'] or settings.getValue('OPNFVPOD'):
933 pod_name = (args['opnfvpod'] if args['opnfvpod'] else
934 settings.getValue('OPNFVPOD'))
935 installer_name = str(settings.getValue('OPNFV_INSTALLER')).lower()
936 opnfv_url = settings.getValue('OPNFV_URL')
937 pkg_list = settings.getValue('PACKAGE_LIST')
939 int_data = {'pod': pod_name,
940 'build_tag': get_build_tag(),
941 'installer': installer_name,
942 'pkg_list': pkg_list,
944 # pass vswitch name from configuration to be used for failed
945 # TCs; In case of successful TCs it is safer to use vswitch
946 # name from CSV as TC can override global configuration
947 'vswitch': str(settings.getValue('VSWITCH')).lower()}
948 tc_names = [tc['Name'] for tc in selected_tests]
949 opnfvdashboard.results2opnfv_dashboard(tc_names, results_path, int_data)
951 # cleanup before exit
954 if __name__ == "__main__":