Tools: Dockerfile to run VSPERF in a Container.
[vswitchperf.git] / vsperf
diff --git a/vsperf b/vsperf
index a141742..f4104bc 100755 (executable)
--- a/vsperf
+++ b/vsperf
@@ -235,13 +235,31 @@ def parse_arguments():
 def configure_logging(level):
     """Configure logging.
     """
+    name, ext = os.path.splitext(settings.getValue('LOG_FILE_DEFAULT'))
+    rename_default = "{name}_{uid}{ex}".format(name=name,
+                                               uid=settings.getValue(
+                                                   'LOG_TIMESTAMP'),
+                                               ex=ext)
     log_file_default = os.path.join(
-        settings.getValue('LOG_DIR'), settings.getValue('LOG_FILE_DEFAULT'))
+        settings.getValue('RESULTS_PATH'), rename_default)
+    name, ext = os.path.splitext(settings.getValue('LOG_FILE_HOST_CMDS'))
+    rename_hostcmd = "{name}_{uid}{ex}".format(name=name,
+                                               uid=settings.getValue(
+                                                   'LOG_TIMESTAMP'),
+                                               ex=ext)
     log_file_host_cmds = os.path.join(
-        settings.getValue('LOG_DIR'), settings.getValue('LOG_FILE_HOST_CMDS'))
+        settings.getValue('RESULTS_PATH'), rename_hostcmd)
+    name, ext = os.path.splitext(settings.getValue('LOG_FILE_TRAFFIC_GEN'))
+    rename_traffic = "{name}_{uid}{ex}".format(name=name,
+                                               uid=settings.getValue(
+                                                   'LOG_TIMESTAMP'),
+                                               ex=ext)
     log_file_traffic_gen = os.path.join(
-        settings.getValue('LOG_DIR'),
-        settings.getValue('LOG_FILE_TRAFFIC_GEN'))
+        settings.getValue('RESULTS_PATH'), rename_traffic)
+    metrics_file = (settings.getValue('LOG_FILE_INFRA_METRICS_PFX') +
+                    settings.getValue('LOG_TIMESTAMP') + '.log')
+    log_file_infra_metrics = os.path.join(settings.getValue('LOG_DIR'),
+                                          metrics_file)
 
     _LOGGER.setLevel(logging.DEBUG)
 
@@ -253,6 +271,8 @@ def configure_logging(level):
 
     file_logger = logging.FileHandler(filename=log_file_default)
     file_logger.setLevel(logging.DEBUG)
+    file_logger.setFormatter(logging.Formatter(
+        '%(asctime)s : %(message)s'))
     _LOGGER.addHandler(file_logger)
 
     class CommandFilter(logging.Filter):
@@ -265,6 +285,11 @@ def configure_logging(level):
         def filter(self, record):
             return record.getMessage().startswith(trafficgen.CMD_PREFIX)
 
+    class CollectdMetricsFilter(logging.Filter):
+        """Filter out strings beginning with 'COLLECTD' :'"""
+        def filter(self, record):
+            return record.getMessage().startswith('COLLECTD')
+
     cmd_logger = logging.FileHandler(filename=log_file_host_cmds)
     cmd_logger.setLevel(logging.DEBUG)
     cmd_logger.addFilter(CommandFilter())
@@ -275,6 +300,12 @@ def configure_logging(level):
     gen_logger.addFilter(TrafficGenCommandFilter())
     _LOGGER.addHandler(gen_logger)
 
+    if settings.getValue('COLLECTOR') == 'Collectd':
+        met_logger = logging.FileHandler(filename=log_file_infra_metrics)
+        met_logger.setLevel(logging.DEBUG)
+        met_logger.addFilter(CollectdMetricsFilter())
+        _LOGGER.addHandler(met_logger)
+
 
 def apply_filter(tests, tc_filter):
     """Allow a subset of tests to be conveniently selected
@@ -641,7 +672,22 @@ def main():
 
     settings.load_from_dir(os.path.join(_CURR_DIR, 'conf'))
 
-    # Load non performance/integration tests
+    # define the timestamp to be used by logs and results
+    date = datetime.datetime.fromtimestamp(time.time())
+    timestamp = date.strftime('%Y-%m-%d_%H-%M-%S')
+    settings.setValue('LOG_TIMESTAMP', timestamp)
+
+    # generate results directory name
+    # integration test use vswitchd log in test step assertions, ensure that
+    # correct value will be set before loading integration test configuration
+    results_dir = "results_" + timestamp
+    results_path = os.path.join(settings.getValue('LOG_DIR'), results_dir)
+    settings.setValue('RESULTS_PATH', results_path)
+    # create results directory
+    if not os.path.exists(results_path):
+        os.makedirs(results_path)
+
+    # load non performance/integration tests
     if args['integration']:
         settings.load_from_dir(os.path.join(_CURR_DIR, 'conf/integration'))
 
@@ -670,6 +716,9 @@ def main():
 
     configure_logging(settings.getValue('VERBOSITY'))
 
+    # CI build support
+    _LOGGER.info("Creating result directory: %s", results_path)
+
     # check and fix locale
     check_and_set_locale()
 
@@ -749,16 +798,7 @@ def main():
     # for backward compatibility
     settings.setValue('WHITELIST_NICS', list(nic['pci'] for nic in nic_list))
 
-    # generate results directory name
-    date = datetime.datetime.fromtimestamp(time.time())
-    results_dir = "results_" + date.strftime('%Y-%m-%d_%H-%M-%S')
-    results_path = os.path.join(settings.getValue('LOG_DIR'), results_dir)
-    settings.setValue('RESULTS_PATH', results_path)
 
-    # create results directory
-    if not os.path.exists(results_path):
-        _LOGGER.info("Creating result directory: %s", results_path)
-        os.makedirs(results_path)
     # pylint: disable=too-many-nested-blocks
     if settings.getValue('mode') == 'trafficgen':
         # execute only traffic generator
@@ -857,8 +897,9 @@ def main():
                 output=settings.getValue('XUNIT_DIR'), outsuffix="",
                 verbosity=0).run(suite)
 
-        if args['opnfvpod']:
-            pod_name = args['opnfvpod']
+        if args['opnfvpod'] or settings.getValue('OPNFVPOD'):
+            pod_name = (args['opnfvpod'] if args['opnfvpod'] else
+                        settings.getValue('OPNFVPOD'))
             installer_name = str(settings.getValue('OPNFV_INSTALLER')).lower()
             opnfv_url = settings.getValue('OPNFV_URL')
             pkg_list = settings.getValue('PACKAGE_LIST')