Logging: Enhance logging with separate files and timestamps 31/59931/3
authorSridhar K. N. Rao <sridhar.rao@spirent.com>
Wed, 18 Jul 2018 12:14:47 +0000 (17:44 +0530)
committerSridhar K. N. Rao <sridhar.rao@spirent.com>
Fri, 27 Jul 2018 05:28:12 +0000 (10:58 +0530)
This patch adds following:
1. overall.log file is change to vsperf-overall-timestamp.log files
2. The overall.log files will include timestamps too.
3. Separate log files for infrastructure metrics.
4. All timestamps of the logfiles for a given test will be same.
5. Store the timestamp in 'LOG_TIMESTAMP'
6. Create metrics-log only if 'COLLECTOR' is set to 'Collectd'

JIRA: VSPERF-586

Change-Id: I35d373a66b1074f6ee1a440ffc82bc3db03f36e7
Signed-off-by: Sridhar K. N. Rao <sridhar.rao@spirent.com>
conf/00_common.conf
vsperf

index d54c8a5..a846fb5 100644 (file)
@@ -98,11 +98,14 @@ SHELL_CMD = ['/bin/bash', '-c']
 LOG_DIR = '/tmp'
 
 # default log for all "small" executables
-LOG_FILE_DEFAULT = 'overall.log'
+LOG_FILE_DEFAULT = 'vsperf-overall.log'
 
 # log file for all commands executed on host
 LOG_FILE_HOST_CMDS = 'host-cmds.log'
 
+# log file prefix for infrastructure metrics
+LOG_FILE_INFRA_METRICS_PFX = 'collectd_'
+
 # ############################
 # Test configuration
 # ############################
diff --git a/vsperf b/vsperf
index a141742..5c1d88a 100755 (executable)
--- a/vsperf
+++ b/vsperf
@@ -235,13 +235,22 @@ def parse_arguments():
 def configure_logging(level):
     """Configure logging.
     """
+    date = datetime.datetime.fromtimestamp(time.time())
+    timestamp = date.strftime('%Y-%m-%d_%H-%M-%S')
+    settings.setValue('LOG_TIMEMSTAMP', timestamp)
+    name, ext = os.path.splitext(settings.getValue('LOG_FILE_DEFAULT'))
+    rename_default = "{name}_{uid}{ex}".format(name=name, uid=timestamp, ex=ext)
     log_file_default = os.path.join(
-        settings.getValue('LOG_DIR'), settings.getValue('LOG_FILE_DEFAULT'))
+        settings.getValue('LOG_DIR'), rename_default)
     log_file_host_cmds = os.path.join(
         settings.getValue('LOG_DIR'), settings.getValue('LOG_FILE_HOST_CMDS'))
     log_file_traffic_gen = os.path.join(
         settings.getValue('LOG_DIR'),
         settings.getValue('LOG_FILE_TRAFFIC_GEN'))
+    metrics_file = (settings.getValue('LOG_FILE_INFRA_METRICS_PFX') +
+                    timestamp + '.log')
+    log_file_infra_metrics = os.path.join(settings.getValue('LOG_DIR'),
+                                          metrics_file)
 
     _LOGGER.setLevel(logging.DEBUG)
 
@@ -253,6 +262,8 @@ def configure_logging(level):
 
     file_logger = logging.FileHandler(filename=log_file_default)
     file_logger.setLevel(logging.DEBUG)
+    file_logger.setFormatter(logging.Formatter(
+        '%(asctime)s : %(message)s'))
     _LOGGER.addHandler(file_logger)
 
     class CommandFilter(logging.Filter):
@@ -265,6 +276,11 @@ def configure_logging(level):
         def filter(self, record):
             return record.getMessage().startswith(trafficgen.CMD_PREFIX)
 
+    class CollectdMetricsFilter(logging.Filter):
+        """Filter out strings beginning with 'COLLECTD' :'"""
+        def filter(self, record):
+            return record.getMessage().startswith('COLLECTD')
+
     cmd_logger = logging.FileHandler(filename=log_file_host_cmds)
     cmd_logger.setLevel(logging.DEBUG)
     cmd_logger.addFilter(CommandFilter())
@@ -275,6 +291,12 @@ def configure_logging(level):
     gen_logger.addFilter(TrafficGenCommandFilter())
     _LOGGER.addHandler(gen_logger)
 
+    if settings.getValue('COLLECTOR') == 'Collectd':
+        met_logger = logging.FileHandler(filename=log_file_infra_metrics)
+        met_logger.setLevel(logging.DEBUG)
+        met_logger.addFilter(CollectdMetricsFilter())
+        _LOGGER.addHandler(met_logger)
+
 
 def apply_filter(tests, tc_filter):
     """Allow a subset of tests to be conveniently selected