3 # Copyright 2015-2017 Intel Corporation.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
17 """VSPERF main script.
36 from tabulate import tabulate
37 from conf import merge_spec
38 from conf import settings
39 import core.component_factory as component_factory
40 from core.loader import Loader
41 from testcases import PerformanceTestCase
42 from testcases import IntegrationTestCase
43 from testcases import K8sPerformanceTestCase
44 from tools import tasks
45 from tools import networkcard
46 from tools import functions
47 from tools.pkt_gen import trafficgen
48 from tools.opnfvdashboard import opnfvdashboard
49 from tools.os_deploy_tgen import osdt
50 sys.dont_write_bytecode = True
53 'debug': logging.DEBUG,
55 'warning': logging.WARNING,
56 'error': logging.ERROR,
57 'critical': logging.CRITICAL
60 _CURR_DIR = os.path.dirname(os.path.realpath(__file__))
62 _TEMPLATE_RST = {'head' : os.path.join(_CURR_DIR, 'tools/report/report_head.rst'),
63 'foot' : os.path.join(_CURR_DIR, 'tools/report/report_foot.rst'),
64 'final' : 'test_report.rst',
65 'tmp' : os.path.join(_CURR_DIR, 'tools/report/report_tmp_caption.rst')
68 _TEMPLATE_MATRIX = "Performance Matrix\n------------------\n\n"\
69 "The following performance matrix was generated with the results of all the\n"\
70 "currently run tests. The metric used for comparison is {}.\n\n{}\n\n"
72 _LOGGER = logging.getLogger()
74 def parse_param_string(values):
76 Parse and split a single '--test-params' argument.
78 This expects either 'x=y', 'x=y,z' or 'x' (implicit true)
79 values. For multiple overrides use a ; separated list for
80 e.g. --test-params 'x=z; y=(a,b)'
87 for param, _, value in re.findall('([^;=]+)(=([^;]+))?', values):
92 # values are passed inside string from CLI, so we must retype them accordingly
94 results[param] = ast.literal_eval(value)
96 # for backward compatibility, we have to accept strings without quotes
97 _LOGGER.warning("Adding missing quotes around string value: %s = %s",
99 results[param] = str(value)
101 results[param] = True
105 def parse_arguments():
107 Parse command line arguments.
109 class _SplitTestParamsAction(argparse.Action):
111 Parse and split '--test-params' arguments.
113 This expects either a single list of ; separated overrides
114 as 'x=y', 'x=y,z' or 'x' (implicit true) values.
115 e.g. --test-params 'x=z; y=(a,b)'
116 Or a list of these ; separated lists with overrides for
118 e.g. --test-params "['x=z; y=(a,b)','x=z']"
120 def __call__(self, parser, namespace, values, option_string=None):
122 input_list = ast.literal_eval(values)
124 for test_params in input_list:
125 parameter_list.append(parse_param_string(test_params))
127 parameter_list = parse_param_string(values)
128 results = {'_PARAMS_LIST':parameter_list}
129 setattr(namespace, self.dest, results)
131 class _ValidateFileAction(argparse.Action):
132 """Validate a file can be read from before using it.
134 def __call__(self, parser, namespace, values, option_string=None):
135 if not os.path.isfile(values):
136 raise argparse.ArgumentTypeError(
137 'the path \'%s\' is not a valid path' % values)
138 elif not os.access(values, os.R_OK):
139 raise argparse.ArgumentTypeError(
140 'the path \'%s\' is not accessible' % values)
142 setattr(namespace, self.dest, values)
144 class _ValidateDirAction(argparse.Action):
145 """Validate a directory can be written to before using it.
147 def __call__(self, parser, namespace, values, option_string=None):
148 if not os.path.isdir(values):
149 raise argparse.ArgumentTypeError(
150 'the path \'%s\' is not a valid path' % values)
151 elif not os.access(values, os.W_OK):
152 raise argparse.ArgumentTypeError(
153 'the path \'%s\' is not accessible' % values)
155 setattr(namespace, self.dest, values)
157 def list_logging_levels():
158 """Give a summary of all available logging levels.
160 :return: List of verbosity level names in decreasing order of
163 return sorted(VERBOSITY_LEVELS.keys(),
164 key=lambda x: VERBOSITY_LEVELS[x])
166 parser = argparse.ArgumentParser(prog=__file__, formatter_class=
167 argparse.ArgumentDefaultsHelpFormatter)
168 parser.add_argument('--version', action='version', version='%(prog)s 0.2')
169 parser.add_argument('--list', '--list-tests', action='store_true',
170 help='list all tests and exit')
171 parser.add_argument('--list-trafficgens', action='store_true',
172 help='list all traffic generators and exit')
173 parser.add_argument('--list-collectors', action='store_true',
174 help='list all system metrics loggers and exit')
175 parser.add_argument('--list-vswitches', action='store_true',
176 help='list all system vswitches and exit')
177 parser.add_argument('--list-fwdapps', action='store_true',
178 help='list all system forwarding applications and exit')
179 parser.add_argument('--list-vnfs', action='store_true',
180 help='list all system vnfs and exit')
181 parser.add_argument('--list-loadgens', action='store_true',
182 help='list all background load generators')
183 parser.add_argument('--list-pods', action='store_true',
184 help='list all system pods')
185 parser.add_argument('--list-settings', action='store_true',
186 help='list effective settings configuration and exit')
187 parser.add_argument('exact_test_name', nargs='*', help='Exact names of\
188 tests to run. E.g "vsperf phy2phy_tput phy2phy_cont"\
189 runs only the two tests with those exact names.\
190 To run all tests omit both positional args and --tests arg.')
192 group = parser.add_argument_group('test selection options')
193 group.add_argument('-m', '--mode', help='vsperf mode of operation;\
194 Values: "normal" - execute vSwitch, VNF and traffic generator;\
195 "trafficgen" - execute only traffic generator; "trafficgen-off" \
196 - execute vSwitch and VNF; trafficgen-pause - execute vSwitch \
197 and VNF but pause before traffic transmission ', default='normal')
199 group.add_argument('-f', '--test-spec', help='test specification file')
200 group.add_argument('-d', '--test-dir', help='directory containing tests')
201 group.add_argument('-t', '--tests', help='Comma-separated list of terms \
202 indicating tests to run. e.g. "RFC2544,!p2p" - run all tests whose\
203 name contains RFC2544 less those containing "p2p"; "!back2back" - \
204 run all tests except those containing back2back')
205 group.add_argument('--verbosity', choices=list_logging_levels(),
207 group.add_argument('--integration', action='store_true', help='execute integration tests')
208 group.add_argument('--k8s', action='store_true', help='execute Kubernetes tests')
209 group.add_argument('--openstack', action='store_true', help='Run VSPERF with openstack')
210 group.add_argument('--trafficgen', help='traffic generator to use')
211 group.add_argument('--vswitch', help='vswitch implementation to use')
212 group.add_argument('--fwdapp', help='packet forwarding application to use')
213 group.add_argument('--vnf', help='vnf to use')
214 group.add_argument('--loadgen', help='loadgen to use')
215 group.add_argument('--sysmetrics', help='system metrics logger to use')
216 group = parser.add_argument_group('test behavior options')
217 group.add_argument('--xunit', action='store_true',
218 help='enable xUnit-formatted output')
219 group.add_argument('--xunit-dir', action=_ValidateDirAction,
220 help='output directory of xUnit-formatted output')
221 group.add_argument('--load-env', action='store_true',
222 help='enable loading of settings from the environment')
223 group.add_argument('--conf-file', action=_ValidateFileAction,
224 help='settings file')
225 group.add_argument('--test-params', action=_SplitTestParamsAction,
226 help='csv list of test parameters: key=val; e.g. '
227 'TRAFFICGEN_PKT_SIZES=(64,128);TRAFFICGEN_DURATION=30; '
228 'GUEST_LOOPBACK=["l2fwd"] ...'
229 ' or a list of csv lists of test parameters: key=val; e.g. '
230 '[\'TRAFFICGEN_DURATION=10;TRAFFICGEN_PKT_SIZES=(128,)\','
231 '\'TRAFFICGEN_DURATION=10;TRAFFICGEN_PKT_SIZES=(64,)\']')
232 group.add_argument('--opnfvpod', help='name of POD in opnfv')
233 group.add_argument('--matrix', help='enable performance matrix analysis',
234 action='store_true', default=False)
236 args = vars(parser.parse_args())
241 def configure_logging(level):
242 """Configure logging.
244 name, ext = os.path.splitext(settings.getValue('LOG_FILE_DEFAULT'))
245 rename_default = "{name}_{uid}{ex}".format(name=name,
246 uid=settings.getValue(
249 log_file_default = os.path.join(
250 settings.getValue('RESULTS_PATH'), rename_default)
251 name, ext = os.path.splitext(settings.getValue('LOG_FILE_HOST_CMDS'))
252 rename_hostcmd = "{name}_{uid}{ex}".format(name=name,
253 uid=settings.getValue(
256 log_file_host_cmds = os.path.join(
257 settings.getValue('RESULTS_PATH'), rename_hostcmd)
258 name, ext = os.path.splitext(settings.getValue('LOG_FILE_TRAFFIC_GEN'))
259 rename_traffic = "{name}_{uid}{ex}".format(name=name,
260 uid=settings.getValue(
263 log_file_traffic_gen = os.path.join(
264 settings.getValue('RESULTS_PATH'), rename_traffic)
265 metrics_file = (settings.getValue('LOG_FILE_INFRA_METRICS_PFX') +
266 settings.getValue('LOG_TIMESTAMP') + '.log')
267 log_file_infra_metrics = os.path.join(settings.getValue('LOG_DIR'),
270 _LOGGER.setLevel(logging.DEBUG)
272 stream_logger = logging.StreamHandler(sys.stdout)
273 stream_logger.setLevel(VERBOSITY_LEVELS[level])
274 stream_logger.setFormatter(logging.Formatter(
275 '[%(levelname)-5s] %(asctime)s : (%(name)s) - %(message)s'))
276 _LOGGER.addHandler(stream_logger)
278 file_logger = logging.FileHandler(filename=log_file_default)
279 file_logger.setLevel(logging.DEBUG)
280 file_logger.setFormatter(logging.Formatter(
281 '%(asctime)s : %(message)s'))
282 _LOGGER.addHandler(file_logger)
284 class CommandFilter(logging.Filter):
285 """Filter out strings beginning with 'cmd :'"""
286 def filter(self, record):
287 return record.getMessage().startswith(tasks.CMD_PREFIX)
289 class TrafficGenCommandFilter(logging.Filter):
290 """Filter out strings beginning with 'gencmd :'"""
291 def filter(self, record):
292 return record.getMessage().startswith(trafficgen.CMD_PREFIX)
294 class CollectdMetricsFilter(logging.Filter):
295 """Filter out strings beginning with 'COLLECTD' :'"""
296 def filter(self, record):
297 return record.getMessage().startswith('COLLECTD')
299 cmd_logger = logging.FileHandler(filename=log_file_host_cmds)
300 cmd_logger.setLevel(logging.DEBUG)
301 cmd_logger.addFilter(CommandFilter())
302 _LOGGER.addHandler(cmd_logger)
304 gen_logger = logging.FileHandler(filename=log_file_traffic_gen)
305 gen_logger.setLevel(logging.DEBUG)
306 gen_logger.addFilter(TrafficGenCommandFilter())
307 _LOGGER.addHandler(gen_logger)
309 if settings.getValue('COLLECTOR') == 'Collectd':
310 met_logger = logging.FileHandler(filename=log_file_infra_metrics)
311 met_logger.setLevel(logging.DEBUG)
312 met_logger.addFilter(CollectdMetricsFilter())
313 _LOGGER.addHandler(met_logger)
316 def apply_filter(tests, tc_filter):
317 """Allow a subset of tests to be conveniently selected
319 :param tests: The list of Tests from which to select.
320 :param tc_filter: A case-insensitive string of comma-separated terms
321 indicating the Tests to select.
322 e.g. 'RFC' - select all tests whose name contains 'RFC'
323 e.g. 'RFC,burst' - select all tests whose name contains 'RFC' or
325 e.g. 'RFC,burst,!p2p' - select all tests whose name contains 'RFC'
326 or 'burst' and from these remove any containing 'p2p'.
327 e.g. '' - empty string selects all tests.
328 :return: A list of the selected Tests.
330 # if negative filter is first we have to start with full list of tests
331 if tc_filter.strip()[0] == '!':
335 if tc_filter is None:
338 for term in [x.strip() for x in tc_filter.lower().split(",")]:
339 if not term or term[0] != '!':
340 # Add matching tests from 'tests' into results
341 result.extend([test for test in tests \
342 if test['Name'].lower().find(term) >= 0])
344 # Term begins with '!' so we remove matching tests
345 result = [test for test in result \
346 if test['Name'].lower().find(term[1:]) < 0]
351 def check_and_set_locale():
352 """ Function will check locale settings. In case, that it isn't configured
353 properly, then default values specified by DEFAULT_LOCALE will be used.
356 system_locale = locale.getdefaultlocale()
357 if None in system_locale:
358 os.environ['LC_ALL'] = settings.getValue('DEFAULT_LOCALE')
359 _LOGGER.warning("Locale was not properly configured. Default values were set. Old locale: %s, New locale: %s",
360 system_locale, locale.getdefaultlocale())
362 def get_vswitch_names(rst_files):
363 """ Function will return a list of vSwitches detected in given ``rst_files``.
365 vswitch_names = set()
368 output = subprocess.check_output(['grep', '-h', '^* vSwitch'] + rst_files).decode().splitlines()
370 match = re.search(r'^\* vSwitch: ([^,]+)', str(line))
372 vswitch_names.add(match.group(1))
375 return list(vswitch_names)
377 except subprocess.CalledProcessError:
378 _LOGGER.warning('Cannot detect vSwitches used during testing.')
380 # fallback to the default value
384 """ Function will return a Jenkins job ID environment variable.
388 build_tag = os.environ['BUILD_TAG']
391 _LOGGER.warning('Cannot detect Jenkins job ID')
396 def generate_final_report():
397 """ Function will check if partial test results are available
398 and generates final report in rst format.
401 path = settings.getValue('RESULTS_PATH')
402 # check if there are any results in rst format
403 rst_results = glob.glob(os.path.join(path, 'result*rst'))
404 pkt_processors = get_vswitch_names(rst_results)
407 test_report = os.path.join(path, '{}_{}'.format('_'.join(pkt_processors), _TEMPLATE_RST['final']))
408 # create report caption directly - it is not worth to execute jinja machinery
409 report_caption = '{}\n{} {}\n{}\n\n'.format(
410 '============================================================',
411 'Performance report for',
412 ', '.join(pkt_processors),
413 '============================================================')
415 with open(_TEMPLATE_RST['tmp'], 'w') as file_:
416 file_.write(report_caption)
418 retval = subprocess.call('cat {} {} {} {} > {}'.format(_TEMPLATE_RST['tmp'], _TEMPLATE_RST['head'],
419 ' '.join(rst_results), _TEMPLATE_RST['foot'],
420 test_report), shell=True)
421 if retval == 0 and os.path.isfile(test_report):
422 _LOGGER.info('Overall test report written to "%s"', test_report)
424 _LOGGER.error('Generation of overall test report has failed.')
426 # remove temporary file
427 os.remove(_TEMPLATE_RST['tmp'])
429 except subprocess.CalledProcessError:
430 _LOGGER.error('Generatrion of overall test report has failed.')
433 def generate_performance_matrix(selected_tests, results_path):
435 Loads the results of all the currently run tests, compares them
436 based on the MATRIX_METRIC, outputs and saves the generated table.
437 :selected_tests: list of currently run test
438 :results_path: directory path to the results of current tests
440 _LOGGER.info('Performance Matrix:')
443 for test in selected_tests:
444 test_name = test.get('Name', '<Name not set>')
445 test_deployment = test.get('Deployment', '<Deployment not set>')
446 test_list.append({'test_name':test_name, 'test_deployment':test_deployment, 'csv_data':False})
450 all_params = settings.getValue('_PARAMS_LIST')
451 for i in range(len(selected_tests)):
453 if isinstance(all_params, list):
455 if i >= len(all_params):
456 list_index = len(all_params) - 1
457 if settings.getValue('CUMULATIVE_PARAMS') and (i > 0):
458 test_params.update(all_params[list_index])
460 test_params = all_params[list_index]
462 test_params = all_params
463 settings.setValue('TEST_PARAMS', test_params)
464 test['test_params'] = copy.deepcopy(test_params)
466 with open("{}/result_{}_{}_{}.csv".format(results_path, str(i),
467 test['test_name'], test['test_deployment'])) as csvfile:
468 reader = list(csv.DictReader(csvfile))
469 test['csv_data'] = reader[0]
470 # pylint: disable=broad-except
471 except (Exception) as ex:
472 _LOGGER.error("Result file not found: %s", ex)
474 metric = settings.getValue('MATRIX_METRIC')
476 output_header = ("ID", "Name", metric, "Change [%]", "Parameters, "\
477 "CUMULATIVE_PARAMS = {}".format(settings.getValue('CUMULATIVE_PARAMS')))
478 if not test_list[0]['csv_data'] or float(test_list[0]['csv_data'][metric]) == 0:
479 _LOGGER.error("Incorrect format of test results")
481 for i, test in enumerate(test_list):
483 change[i] = float(test['csv_data'][metric])/\
484 (float(test_list[0]['csv_data'][metric]) / 100) - 100
485 output.append([i, test['test_name'], float(test['csv_data'][metric]),
486 change[i], str(test['test_params'])[1:-1]])
489 output.append([i, test['test_name'], "Test Failed", 0, test['test_params']])
490 print(tabulate(output, headers=output_header, tablefmt="grid", floatfmt="0.3f"))
491 with open(results_path + '/result_performance_matrix.rst', 'w+') as output_file:
492 output_file.write(_TEMPLATE_MATRIX.format(metric, tabulate(output, headers=output_header,
493 tablefmt="rst", floatfmt="0.3f")))
494 _LOGGER.info('Performance matrix written to: "%s/result_performance_matrix.rst"', results_path)
496 def enable_sriov(nic_list):
497 """ Enable SRIOV for given enhanced PCI IDs
499 :param nic_list: A list of enhanced PCI IDs
501 # detect if sriov is required
504 if networkcard.is_sriov_nic(nic):
505 tmp_nic = nic.split('|')
506 if tmp_nic[0] in sriov_nic:
507 if int(tmp_nic[1][2:]) > sriov_nic[tmp_nic[0]]:
508 sriov_nic[tmp_nic[0]] = int(tmp_nic[1][2:])
510 sriov_nic.update({tmp_nic[0] : int(tmp_nic[1][2:])})
512 # sriov is required for some NICs
514 for nic in sriov_nic:
515 # check if SRIOV is supported and enough virt interfaces are available
516 if not networkcard.is_sriov_supported(nic) \
517 or networkcard.get_sriov_numvfs(nic) <= sriov_nic[nic]:
518 # if not, enable and set appropriate number of VFs
519 if not networkcard.set_sriov_numvfs(nic, sriov_nic[nic] + 1):
520 raise RuntimeError('SRIOV cannot be enabled for NIC {}'.format(nic))
522 _LOGGER.debug("SRIOV enabled for NIC %s", nic)
524 # ensure that path to the bind tool is valid
525 functions.settings_update_paths()
527 # WORKAROUND: it has been observed with IXGBE(VF) driver,
528 # that NIC doesn't correclty dispatch traffic to VFs based
529 # on their MAC address. Unbind and bind to the same driver
531 networkcard.reinit_vfs(nic)
533 # After SRIOV is enabled it takes some time until network drivers
534 # properly initialize all cards.
535 # Wait also in case, that SRIOV was already configured as it can be
536 # configured automatically just before vsperf execution.
544 def disable_sriov(nic_list):
545 """ Disable SRIOV for given PCI IDs
547 :param nic_list: A list of enhanced PCI IDs
550 if networkcard.is_sriov_nic(nic):
551 if not networkcard.set_sriov_numvfs(nic.split('|')[0], 0):
552 raise RuntimeError('SRIOV cannot be disabled for NIC {}'.format(nic))
554 _LOGGER.debug("SRIOV disabled for NIC %s", nic.split('|')[0])
557 def handle_list_options(args):
558 """ Process --list cli arguments if needed
560 :param args: A dictionary with all CLI arguments
562 if args['list_trafficgens']:
563 print(Loader().get_trafficgens_printable())
566 if args['list_collectors']:
567 print(Loader().get_collectors_printable())
570 if args['list_vswitches']:
571 print(Loader().get_vswitches_printable())
574 if args['list_vnfs']:
575 print(Loader().get_vnfs_printable())
578 if args['list_fwdapps']:
579 print(Loader().get_pktfwds_printable())
582 if args['list_loadgens']:
583 print(Loader().get_loadgens_printable())
586 if args['list_pods']:
587 print(Loader().get_pods_printable())
590 if args['list_settings']:
599 def list_testcases(args):
600 """ Print list of testcases requested by --list CLI argument
602 :param args: A dictionary with all CLI arguments
605 if args['integration']:
606 testcases = settings.getValue('INTEGRATION_TESTS')
608 testcases = settings.getValue('K8SPERFORMANCE_TESTS')
610 testcases = settings.getValue('PERFORMANCE_TESTS')
612 print("Available Tests:")
613 print("================")
615 for test in testcases:
616 description = functions.format_description(test['Description'], 70)
617 if len(test['Name']) < 40:
618 print('* {:40} {}'.format('{}:'.format(test['Name']), description[0]))
620 print('* {}'.format('{}:'.format(test['Name'])))
621 print(' {:40} {}'.format('', description[0]))
622 for i in range(1, len(description)):
623 print(' {:40} {}'.format('', description[i]))
626 def vsperf_finalize():
627 """ Clean up before exit
629 # remove directory if no result files were created
631 results_path = settings.getValue('RESULTS_PATH')
632 if os.path.exists(results_path):
633 files_list = os.listdir(results_path)
635 _LOGGER.info("Removing empty result directory: %s", results_path)
636 shutil.rmtree(results_path)
637 except AttributeError:
638 # skip it if parameter doesn't exist
641 # disable SRIOV if needed
643 if settings.getValue('SRIOV_ENABLED'):
644 disable_sriov(settings.getValue('WHITELIST_NICS_ORIG'))
645 except AttributeError:
646 # skip it if parameter doesn't exist
650 class MockTestCase(unittest.TestCase):
651 """Allow use of xmlrunner to generate Jenkins compatible output without
652 using xmlrunner to actually run tests.
655 suite = unittest.TestSuite()
656 suite.addTest(MockTestCase('Test1 passed ', True, 'Test1'))
657 suite.addTest(MockTestCase('Test2 failed because...', False, 'Test2'))
658 xmlrunner.XMLTestRunner(...).run(suite)
661 def __init__(self, msg, is_pass, test_name):
664 self.is_pass = is_pass
666 #dynamically create a test method with the right name
667 #but point the method at our generic test method
668 setattr(MockTestCase, test_name, self.generic_test)
670 super(MockTestCase, self).__init__(test_name)
672 def generic_test(self):
673 """Provide a generic function that raises or not based
674 on how self.is_pass was set in the constructor"""
675 self.assertTrue(self.is_pass, self.msg)
677 # pylint: disable=too-many-locals, too-many-branches, too-many-statements
681 args = parse_arguments()
685 settings.load_from_dir(os.path.join(_CURR_DIR, 'conf'))
687 # define the timestamp to be used by logs and results
688 date = datetime.datetime.fromtimestamp(time.time())
689 timestamp = date.strftime('%Y-%m-%d_%H-%M-%S')
690 settings.setValue('LOG_TIMESTAMP', timestamp)
692 # generate results directory name
693 # integration test use vswitchd log in test step assertions, ensure that
694 # correct value will be set before loading integration test configuration
695 results_dir = "results_" + timestamp
696 results_path = os.path.join(settings.getValue('LOG_DIR'), results_dir)
697 settings.setValue('RESULTS_PATH', results_path)
698 # create results directory
699 if not os.path.exists(results_path):
700 os.makedirs(results_path)
702 # load non performance/integration tests
703 if args['integration']:
704 settings.load_from_dir(os.path.join(_CURR_DIR, 'conf/integration'))
706 settings.load_from_dir(os.path.join(_CURR_DIR, 'conf/kubernetes'))
708 # load command line parameters first in case there are settings files
710 settings.load_from_dict(args)
712 if args['conf_file']:
713 settings.load_from_file(args['conf_file'])
716 settings.load_from_env()
718 # reload command line parameters since these should take higher priority
719 # than both a settings file and environment variables
720 settings.load_from_dict(args)
722 settings.setValue('mode', args['mode'])
725 settings.setValue('K8S', True)
727 settings.setValue('K8S', False)
729 if args['openstack']:
730 result = osdt.deploy_testvnf()
732 _LOGGER.info('TestVNF successfully deployed on Openstack')
733 settings.setValue('mode', 'trafficgen')
735 _LOGGER.error('Failed to deploy TestVNF in Openstac')
737 # update paths to trafficgens if required
738 if settings.getValue('mode') == 'trafficgen':
739 functions.settings_update_paths()
741 # if required, handle list-* operations
742 handle_list_options(args)
744 configure_logging(settings.getValue('VERBOSITY'))
747 _LOGGER.info("Creating result directory: %s", results_path)
749 # check and fix locale
750 check_and_set_locale()
752 # configure trafficgens
753 if args['trafficgen']:
754 trafficgens = Loader().get_trafficgens()
755 if args['trafficgen'] not in trafficgens:
756 _LOGGER.error('There are no trafficgens matching \'%s\' found in'
757 ' \'%s\'. Exiting...', args['trafficgen'],
758 settings.getValue('TRAFFICGEN_DIR'))
761 # configuration validity checks
763 vswitch_none = args['vswitch'].strip().lower() == 'none'
765 settings.setValue('VSWITCH', 'none')
767 vswitches = Loader().get_vswitches()
768 if args['vswitch'] not in vswitches:
769 _LOGGER.error('There are no vswitches matching \'%s\' found in'
770 ' \'%s\'. Exiting...', args['vswitch'],
771 settings.getValue('VSWITCH_DIR'))
775 settings.setValue('PKTFWD', args['fwdapp'])
776 fwdapps = Loader().get_pktfwds()
777 if args['fwdapp'] not in fwdapps:
778 _LOGGER.error('There are no forwarding application'
779 ' matching \'%s\' found in'
780 ' \'%s\'. Exiting...', args['fwdapp'],
781 settings.getValue('PKTFWD_DIR'))
785 vnfs = Loader().get_vnfs()
786 if args['vnf'] not in vnfs:
787 _LOGGER.error('there are no vnfs matching \'%s\' found in'
788 ' \'%s\'. exiting...', args['vnf'],
789 settings.getValue('VNF_DIR'))
793 loadgens = Loader().get_loadgens()
794 if args['loadgen'] not in loadgens:
795 _LOGGER.error('There are no loadgens matching \'%s\' found in'
796 ' \'%s\'. Exiting...', args['loadgen'],
797 settings.getValue('LOADGEN_DIR'))
800 if args['exact_test_name'] and args['tests']:
801 _LOGGER.error("Cannot specify tests with both positional args and --test.")
804 # modify NIC configuration to decode enhanced PCI IDs
805 wl_nics_orig = list(networkcard.check_pci(pci) for pci in settings.getValue('WHITELIST_NICS'))
806 settings.setValue('WHITELIST_NICS_ORIG', wl_nics_orig)
808 # sriov handling is performed on checked/expanded PCI IDs
809 settings.setValue('SRIOV_ENABLED', enable_sriov(wl_nics_orig))
812 for nic in wl_nics_orig:
813 tmp_nic = networkcard.get_nic_info(nic)
815 nic_list.append({'pci' : tmp_nic,
816 'type' : 'vf' if networkcard.get_sriov_pf(tmp_nic) else 'pf',
817 'mac' : networkcard.get_mac(tmp_nic),
818 'driver' : networkcard.get_driver(tmp_nic),
819 'device' : networkcard.get_device_name(tmp_nic)})
822 raise RuntimeError("Invalid network card PCI ID: '{}'".format(nic))
824 settings.setValue('NICS', nic_list)
825 # for backward compatibility
826 settings.setValue('WHITELIST_NICS', list(nic['pci'] for nic in nic_list))
829 # pylint: disable=too-many-nested-blocks
830 if settings.getValue('mode') == 'trafficgen':
831 # execute only traffic generator
832 _LOGGER.debug("Executing traffic generator:")
834 # set traffic details, so they can be passed to traffic ctl
835 traffic = copy.deepcopy(settings.getValue('TRAFFIC'))
836 traffic = functions.check_traffic(traffic)
838 traffic_ctl = component_factory.create_traffic(
839 traffic['traffic_type'],
840 loader.get_trafficgen_class())
842 traffic_ctl.send_traffic(traffic)
843 _LOGGER.debug("Traffic Results:")
844 traffic_ctl.print_results()
846 # write results into CSV file
847 result_file = os.path.join(results_path, "result.csv")
848 PerformanceTestCase.write_result_to_file(traffic_ctl.get_results(), result_file)
851 if args['integration']:
852 testcases = settings.getValue('INTEGRATION_TESTS')
854 testcases = settings.getValue('K8SPERFORMANCE_TESTS')
856 testcases = settings.getValue('PERFORMANCE_TESTS')
858 if args['exact_test_name']:
859 exact_names = args['exact_test_name']
860 # positional args => exact matches only
862 for test_name in exact_names:
863 for test in testcases:
864 if test['Name'] == test_name:
865 selected_tests.append(test)
867 # --tests => apply filter to select requested tests
868 selected_tests = apply_filter(testcases, args['tests'])
870 # Default - run all tests
871 selected_tests = testcases
873 if not selected_tests:
874 _LOGGER.error("No tests matched --tests option or positional args. Done.")
878 suite = unittest.TestSuite()
879 settings_snapshot = copy.deepcopy(settings.__dict__)
881 for i, cfg in enumerate(selected_tests):
882 settings.setValue('_TEST_INDEX', i)
883 test_name = cfg.get('Name', '<Name not set>')
885 test_params = settings.getValue('_PARAMS_LIST')
886 if isinstance(test_params, list):
888 if i >= len(test_params):
889 list_index = len(test_params) - 1
890 test_params = test_params[list_index]
891 if settings.getValue('CUMULATIVE_PARAMS'):
892 test_params = merge_spec(settings.getValue('TEST_PARAMS'), test_params)
893 settings.setValue('TEST_PARAMS', test_params)
895 if args['integration']:
896 test = IntegrationTestCase(cfg)
898 test = K8sPerformanceTestCase(cfg)
900 test = PerformanceTestCase(cfg)
903 suite.addTest(MockTestCase('', True, test.name))
905 # pylint: disable=broad-except
906 except (Exception) as ex:
907 _LOGGER.exception("Failed to run test: %s", test_name)
908 suite.addTest(MockTestCase(str(ex), False, test_name))
909 _LOGGER.info("Continuing with next test...")
911 if not settings.getValue('CUMULATIVE_PARAMS'):
912 settings.restore_from_dict(settings_snapshot)
914 settings.restore_from_dict(settings_snapshot)
917 # Generate and printout Performance Matrix
919 generate_performance_matrix(selected_tests, results_path)
921 # generate final rst report with results of all executed TCs
922 generate_final_report()
926 if settings.getValue('XUNIT'):
927 xmlrunner.XMLTestRunner(
928 output=settings.getValue('XUNIT_DIR'), outsuffix="",
929 verbosity=0).run(suite)
931 if args['opnfvpod'] or settings.getValue('OPNFVPOD'):
932 pod_name = (args['opnfvpod'] if args['opnfvpod'] else
933 settings.getValue('OPNFVPOD'))
934 installer_name = str(settings.getValue('OPNFV_INSTALLER')).lower()
935 opnfv_url = settings.getValue('OPNFV_URL')
936 pkg_list = settings.getValue('PACKAGE_LIST')
938 int_data = {'pod': pod_name,
939 'build_tag': get_build_tag(),
940 'installer': installer_name,
941 'pkg_list': pkg_list,
943 # pass vswitch name from configuration to be used for failed
944 # TCs; In case of successful TCs it is safer to use vswitch
945 # name from CSV as TC can override global configuration
946 'vswitch': str(settings.getValue('VSWITCH')).lower()}
947 tc_names = [tc['Name'] for tc in selected_tests]
948 opnfvdashboard.results2opnfv_dashboard(tc_names, results_path, int_data)
950 # cleanup before exit
953 if __name__ == "__main__":