X-Git-Url: https://gerrit.opnfv.org/gerrit/gitweb?a=blobdiff_plain;f=vsperf;h=003ca88bf4218d1a74a129d15ff50be6311b4512;hb=c5a075eafc322d9ae62322e90ab4e232df176743;hp=d205ad1f5a530c02ccd67ed0acafd0201024057e;hpb=a9655607f7618fd392cf719e8d9ea2dbf6c8dd1c;p=vswitchperf.git diff --git a/vsperf b/vsperf index d205ad1f..003ca88b 100755 --- a/vsperf +++ b/vsperf @@ -116,7 +116,6 @@ def parse_arguments(): e.g. --test-params "['x=z; y=(a,b)','x=z']" """ def __call__(self, parser, namespace, values, option_string=None): - if values[0] == '[': input_list = ast.literal_eval(values) parameter_list = [] @@ -236,13 +235,31 @@ def parse_arguments(): def configure_logging(level): """Configure logging. """ + name, ext = os.path.splitext(settings.getValue('LOG_FILE_DEFAULT')) + rename_default = "{name}_{uid}{ex}".format(name=name, + uid=settings.getValue( + 'LOG_TIMESTAMP'), + ex=ext) log_file_default = os.path.join( - settings.getValue('LOG_DIR'), settings.getValue('LOG_FILE_DEFAULT')) + settings.getValue('RESULTS_PATH'), rename_default) + name, ext = os.path.splitext(settings.getValue('LOG_FILE_HOST_CMDS')) + rename_hostcmd = "{name}_{uid}{ex}".format(name=name, + uid=settings.getValue( + 'LOG_TIMESTAMP'), + ex=ext) log_file_host_cmds = os.path.join( - settings.getValue('LOG_DIR'), settings.getValue('LOG_FILE_HOST_CMDS')) + settings.getValue('RESULTS_PATH'), rename_hostcmd) + name, ext = os.path.splitext(settings.getValue('LOG_FILE_TRAFFIC_GEN')) + rename_traffic = "{name}_{uid}{ex}".format(name=name, + uid=settings.getValue( + 'LOG_TIMESTAMP'), + ex=ext) log_file_traffic_gen = os.path.join( - settings.getValue('LOG_DIR'), - settings.getValue('LOG_FILE_TRAFFIC_GEN')) + settings.getValue('RESULTS_PATH'), rename_traffic) + metrics_file = (settings.getValue('LOG_FILE_INFRA_METRICS_PFX') + + settings.getValue('LOG_TIMESTAMP') + '.log') + log_file_infra_metrics = os.path.join(settings.getValue('LOG_DIR'), + metrics_file) _LOGGER.setLevel(logging.DEBUG) @@ -254,6 +271,8 @@ def configure_logging(level): file_logger = logging.FileHandler(filename=log_file_default) file_logger.setLevel(logging.DEBUG) + file_logger.setFormatter(logging.Formatter( + '%(asctime)s : %(message)s')) _LOGGER.addHandler(file_logger) class CommandFilter(logging.Filter): @@ -266,6 +285,11 @@ def configure_logging(level): def filter(self, record): return record.getMessage().startswith(trafficgen.CMD_PREFIX) + class CollectdMetricsFilter(logging.Filter): + """Filter out strings beginning with 'COLLECTD' :'""" + def filter(self, record): + return record.getMessage().startswith('COLLECTD') + cmd_logger = logging.FileHandler(filename=log_file_host_cmds) cmd_logger.setLevel(logging.DEBUG) cmd_logger.addFilter(CommandFilter()) @@ -276,6 +300,12 @@ def configure_logging(level): gen_logger.addFilter(TrafficGenCommandFilter()) _LOGGER.addHandler(gen_logger) + if settings.getValue('COLLECTOR') == 'Collectd': + met_logger = logging.FileHandler(filename=log_file_infra_metrics) + met_logger.setLevel(logging.DEBUG) + met_logger.addFilter(CollectdMetricsFilter()) + _LOGGER.addHandler(met_logger) + def apply_filter(tests, tc_filter): """Allow a subset of tests to be conveniently selected @@ -327,7 +357,7 @@ def get_vswitch_names(rst_files): """ Function will return a list of vSwitches detected in given ``rst_files``. """ vswitch_names = set() - if len(rst_files): + if rst_files: try: output = subprocess.check_output(['grep', '-h', '^* vSwitch'] + rst_files).decode().splitlines() for line in output: @@ -335,7 +365,7 @@ def get_vswitch_names(rst_files): if match: vswitch_names.add(match.group(1)) - if len(vswitch_names): + if vswitch_names: return list(vswitch_names) except subprocess.CalledProcessError: @@ -366,7 +396,7 @@ def generate_final_report(): # check if there are any results in rst format rst_results = glob.glob(os.path.join(path, 'result*rst')) pkt_processors = get_vswitch_names(rst_results) - if len(rst_results): + if rst_results: try: test_report = os.path.join(path, '{}_{}'.format('_'.join(pkt_processors), _TEMPLATE_RST['final'])) # create report caption directly - it is not worth to execute jinja machinery @@ -474,7 +504,7 @@ def enable_sriov(nic_list): sriov_nic.update({tmp_nic[0] : int(tmp_nic[1][2:])}) # sriov is required for some NICs - if len(sriov_nic): + if sriov_nic: for nic in sriov_nic: # check if SRIOV is supported and enough virt interfaces are available if not networkcard.is_sriov_supported(nic) \ @@ -590,7 +620,7 @@ def vsperf_finalize(): if os.path.exists(results_path): files_list = os.listdir(results_path) if files_list == []: - _LOGGER.info("Removing empty result directory: " + results_path) + _LOGGER.info("Removing empty result directory: %s", results_path) shutil.rmtree(results_path) except AttributeError: # skip it if parameter doesn't exist @@ -642,6 +672,11 @@ def main(): settings.load_from_dir(os.path.join(_CURR_DIR, 'conf')) + # Define the timestamp to be used by logs and results + date = datetime.datetime.fromtimestamp(time.time()) + timestamp = date.strftime('%Y-%m-%d_%H-%M-%S') + settings.setValue('LOG_TIMESTAMP', timestamp) + # Load non performance/integration tests if args['integration']: settings.load_from_dir(os.path.join(_CURR_DIR, 'conf/integration')) @@ -669,8 +704,20 @@ def main(): # if required, handle list-* operations handle_list_options(args) + # generate results directory name + results_dir = "results_" + timestamp + results_path = os.path.join(settings.getValue('LOG_DIR'), results_dir) + settings.setValue('RESULTS_PATH', results_path) + + # create results directory + if not os.path.exists(results_path): + os.makedirs(results_path) + configure_logging(settings.getValue('VERBOSITY')) + # CI build support + _LOGGER.info("Creating result directory: %s", results_path) + # check and fix locale check_and_set_locale() @@ -750,16 +797,7 @@ def main(): # for backward compatibility settings.setValue('WHITELIST_NICS', list(nic['pci'] for nic in nic_list)) - # generate results directory name - date = datetime.datetime.fromtimestamp(time.time()) - results_dir = "results_" + date.strftime('%Y-%m-%d_%H-%M-%S') - results_path = os.path.join(settings.getValue('LOG_DIR'), results_dir) - settings.setValue('RESULTS_PATH', results_path) - # create results directory - if not os.path.exists(results_path): - _LOGGER.info("Creating result directory: " + results_path) - os.makedirs(results_path) # pylint: disable=too-many-nested-blocks if settings.getValue('mode') == 'trafficgen': # execute only traffic generator @@ -802,15 +840,11 @@ def main(): # Default - run all tests selected_tests = testcases - if not len(selected_tests): + if not selected_tests: _LOGGER.error("No tests matched --tests option or positional args. Done.") vsperf_finalize() sys.exit(1) - # run tests - # Add pylint exception: Redefinition of test type from - # testcases.integration.IntegrationTestCase to testcases.performance.PerformanceTestCase - # pylint: disable=redefined-variable-type suite = unittest.TestSuite() settings_snapshot = copy.deepcopy(settings.__dict__)