NFVBENCH-193: Correct a critical bug introduced in commit NFVBENCH-192
[nfvbench.git] / nfvbench / summarizer.py
index 1eaa8d6..0759933 100644 (file)
 #    under the License.
 #
 
-import bitmath
 from contextlib import contextmanager
 from datetime import datetime
 import math
+
+import bitmath
 import pytz
-from specs import ChainType
 from tabulate import tabulate
 
+def _annotate_chain_stats(chain_stats, nodrop_marker='=>'):
+    """Transform a plain chain stats into an annotated one.
+
+    Example:
+    {
+         0: {'packets': [2000054, 1999996, 1999996, 1999996],
+             'lat_min_usec': 10,
+             'lat_max_usec': 187,
+             'lat_avg_usec': 45},
+         1: {...},
+         'total': {...}
+    }
+    should become:
+    {
+         0: {'packets': [2000054, -58 (-0.034%), '=>', 1999996],
+             'lat_min_usec': 10,
+             'lat_max_usec': 187,
+             'lat_avg_usec': 45},
+         1: {...},
+         'total': {...}
+    }
+
+    In the case of shared net, some columns in packets array can have ''.
+    Some columns cab also be None which means the data is not available.
+    """
+    for stats in list(chain_stats.values()):
+        packets = stats['packets']
+        count = len(packets)
+        if count > 1:
+            # keep the first counter
+            annotated_packets = [packets[0]]
+            # modify all remaining counters
+            prev_count = packets[0]
+            for index in range(1, count):
+                cur_count = packets[index]
+                if cur_count == '':
+                    # an empty string indicates an unknown counter for a shared interface
+                    # do not annotate those
+                    annotated_value = ''
+                elif cur_count is None:
+                    # Not available
+                    annotated_value = 'n/a'
+                else:
+                    drop = cur_count - prev_count
+                    if drop:
+                        dr = (drop * 100.0) / prev_count if prev_count else 0
+                        annotated_value = '{:+,} ({:+.4f}%)'.format(drop, dr)
+                    else:
+                        # no drop
+                        # if last column we display the value
+                        annotated_value = cur_count if index == count - 1 else nodrop_marker
+                    prev_count = cur_count
+                annotated_packets.append(annotated_value)
+
+            stats['packets'] = annotated_packets
 
 class Formatter(object):
-    """Collection of string formatter methods"""
+    """Collection of string formatter methods."""
 
     @staticmethod
     def fixed(data):
@@ -40,12 +95,11 @@ class Formatter(object):
 
     @staticmethod
     def standard(data):
-        if type(data) == int:
+        if isinstance(data, int):
             return Formatter.int(data)
-        elif type(data) == float:
+        if isinstance(data, float):
             return Formatter.float(4)(data)
-        else:
-            return Formatter.fixed(data)
+        return Formatter.fixed(data)
 
     @staticmethod
     def suffix(suffix_str):
@@ -70,29 +124,27 @@ class Formatter(object):
         bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
         if bps.unit != 'Bit':
             return bps.format("{value:.4f} {unit}ps")
-        else:
-            return bps.format("{value:.4f} bps")
+        return bps.format("{value:.4f} bps")
 
     @staticmethod
     def percentage(data):
         if data is None:
             return ''
-        elif math.isnan(data):
+        if math.isnan(data):
             return '-'
-        else:
-            return Formatter.suffix('%')(Formatter.float(4)(data))
+        return Formatter.suffix('%')(Formatter.float(4)(data))
 
 
 class Table(object):
-    """ASCII readable table class"""
+    """ASCII readable table class."""
 
     def __init__(self, header):
-        header_row, self.formatters = zip(*header)
+        header_row, self.formatters = list(zip(*header))
         self.data = [header_row]
         self.columns = len(header_row)
 
     def add_row(self, row):
-        assert (self.columns == len(row))
+        assert self.columns == len(row)
         formatted_row = []
         for entry, formatter in zip(row, self.formatters):
             formatted_row.append(formatter(entry))
@@ -109,7 +161,7 @@ class Table(object):
 
 
 class Summarizer(object):
-    """Generic summarizer class"""
+    """Generic summarizer class."""
 
     indent_per_level = 2
 
@@ -123,7 +175,7 @@ class Summarizer(object):
         self.marker_stack.append(marker)
 
     def __unindent(self):
-        assert (self.indent_size >= self.indent_per_level)
+        assert self.indent_size >= self.indent_per_level
         self.indent_size -= self.indent_per_level
         self.marker_stack.pop()
 
@@ -135,7 +187,7 @@ class Summarizer(object):
 
     def _put(self, *args):
         self.str += self.__get_indent_string()
-        if len(args) and type(args[-1]) == dict:
+        if args and isinstance(args[-1], dict):
             self.str += ' '.join(map(str, args[:-1])) + '\n'
             self._put_dict(args[-1])
         else:
@@ -143,8 +195,8 @@ class Summarizer(object):
 
     def _put_dict(self, data):
         with self._create_block(False):
-            for key, value in data.iteritems():
-                if type(value) == dict:
+            for key, value in list(data.items()):
+                if isinstance(value, dict):
                     self._put(key + ':')
                     self._put_dict(value)
                 else:
@@ -165,7 +217,7 @@ class Summarizer(object):
 
 
 class NFVBenchSummarizer(Summarizer):
-    """Summarize nfvbench json result"""
+    """Summarize nfvbench json result."""
 
     ndr_pdr_header = [
         ('-', Formatter.fixed),
@@ -196,44 +248,47 @@ class NFVBenchSummarizer(Summarizer):
         ('RX Rate (pps)', Formatter.suffix(' pps'))
     ]
 
-    chain_analysis_header = [
-        ('Interface', Formatter.standard),
-        ('Device', Formatter.standard),
-        ('Packets (fwd)', Formatter.standard),
-        ('Drops (fwd)', Formatter.standard),
-        ('Drop% (fwd)', Formatter.percentage),
-        ('Packets (rev)', Formatter.standard),
-        ('Drops (rev)', Formatter.standard),
-        ('Drop% (rev)', Formatter.percentage)
-    ]
-
     direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
     direction_names = ['Forward', 'Reverse', 'Total']
 
     def __init__(self, result, sender):
+        """Create a summarizer instance."""
         Summarizer.__init__(self)
         self.result = result
         self.config = self.result['config']
         self.record_header = None
         self.record_data = None
         self.sender = sender
+
+        # add percentiles headers if hdrh enabled
+        if not self.config.disable_hdrh:
+            for percentile in self.config.lat_percentiles:
+                # 'append' expects a single parameter => double parentheses
+                self.ndr_pdr_header.append((str(percentile) + ' %ile lat.', Formatter.standard))
+                self.single_run_header.append((str(percentile) + ' %ile lat.', Formatter.standard))
         # if sender is available initialize record
         if self.sender:
             self.__record_init()
         self.__summarize()
 
+    def __get_openstack_spec(self, property):
+        try:
+            return self.result['openstack_spec'][property]
+        except KeyError:
+            return ''
+
     def __summarize(self):
         self._put()
         self._put('========== NFVBench Summary ==========')
         self._put('Date:', self.result['date'])
         self._put('NFVBench version', self.result['nfvbench_version'])
         self._put('Openstack Neutron:', {
-            'vSwitch': self.result['openstack_spec']['vswitch'],
-            'Encapsulation': self.result['openstack_spec']['encaps']
+            'vSwitch': self.__get_openstack_spec('vswitch'),
+            'Encapsulation': self.__get_openstack_spec('encaps')
         })
         self.__record_header_put('version', self.result['nfvbench_version'])
-        self.__record_header_put('vSwitch', self.result['openstack_spec']['vswitch'])
-        self.__record_header_put('Encapsulation', self.result['openstack_spec']['encaps'])
+        self.__record_header_put('vSwitch', self.__get_openstack_spec('vswitch'))
+        self.__record_header_put('Encapsulation', self.__get_openstack_spec('encaps'))
         self._put('Benchmarks:')
         with self._create_block():
             self._put('Networks:')
@@ -242,17 +297,14 @@ class NFVBenchSummarizer(Summarizer):
 
                 self._put('Components:')
                 with self._create_block():
-                    self._put('TOR:')
-                    with self._create_block(False):
-                        self._put('Type:', self.config['tor']['type'])
                     self._put('Traffic Generator:')
                     with self._create_block(False):
-                        self._put('Profile:', self.config['generator_config']['name'])
-                        self._put('Tool:', self.config['generator_config']['tool'])
+                        self._put('Profile:', self.config['tg-name'])
+                        self._put('Tool:', self.config['tg-tool'])
                     if network_benchmark['versions']:
                         self._put('Versions:')
                         with self._create_block():
-                            for component, version in network_benchmark['versions'].iteritems():
+                            for component, version in list(network_benchmark['versions'].items()):
                                 self._put(component + ':', version)
 
                 if self.config['ndr_run'] or self.config['pdr_run']:
@@ -263,15 +315,12 @@ class NFVBenchSummarizer(Summarizer):
                         if self.config['pdr_run']:
                             self._put('PDR:', self.config['measurement']['PDR'])
                 self._put('Service chain:')
-                for result in network_benchmark['service_chain'].iteritems():
+                for result in list(network_benchmark['service_chain'].items()):
                     with self._create_block():
                         self.__chain_summarize(*result)
 
     def __chain_summarize(self, chain_name, chain_benchmark):
         self._put(chain_name + ':')
-        if chain_name == ChainType.PVVP:
-            self._put('Mode:', chain_benchmark.get('mode'))
-            chain_name += "-" + chain_benchmark.get('mode')
         self.__record_header_put('service_chain', chain_name)
         with self._create_block():
             self._put('Traffic:')
@@ -283,13 +332,13 @@ class NFVBenchSummarizer(Summarizer):
         self._put('Bidirectional:', traffic_benchmark['bidirectional'])
         self._put('Flow count:', traffic_benchmark['flow_count'])
         self._put('Service chains count:', traffic_benchmark['service_chain_count'])
-        self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
+        self._put('Compute nodes:', list(traffic_benchmark['compute_nodes'].keys()))
 
         self.__record_header_put('profile', traffic_benchmark['profile'])
         self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
         self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
         self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
-        self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
+        self.__record_header_put('compute_nodes', list(traffic_benchmark['compute_nodes'].keys()))
         with self._create_block(False):
             self._put()
             if not self.config['no_traffic']:
@@ -303,20 +352,15 @@ class NFVBenchSummarizer(Summarizer):
                     except KeyError:
                         pass
 
-            for entry in traffic_benchmark['result'].iteritems():
+            for entry in list(traffic_benchmark['result'].items()):
                 if 'warning' in entry:
                     continue
                 self.__chain_analysis_summarize(*entry)
-                self.__record_send()
+            self.__record_send()
 
     def __chain_analysis_summarize(self, frame_size, analysis):
         self._put()
         self._put('L2 frame size:', frame_size)
-        if 'analysis_duration_sec' in analysis:
-            self._put('Chain analysis duration:',
-                      Formatter.float(3)(analysis['analysis_duration_sec']), 'seconds')
-            self.__record_data_put(frame_size, {'chain_analysis_duration': Formatter.float(3)(
-                analysis['analysis_duration_sec'])})
         if self.config['ndr_run']:
             self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
                       'seconds')
@@ -339,12 +383,13 @@ class NFVBenchSummarizer(Summarizer):
                     self._put(analysis['run_config']['warning'])
                 self._put()
 
-        if 'packet_analysis' in analysis:
-            self._put('Chain Analysis:')
-            self._put()
-            with self._create_block(False):
-                self._put_table(self.__get_chain_analysis_table(analysis['packet_analysis']))
+        if 'packet_path_stats' in analysis:
+            for dir in ['Forward', 'Reverse']:
+                self._put(dir + ' Chain Packet Counters and Latency:')
                 self._put()
+                with self._create_block(False):
+                    self._put_table(self._get_chain_table(analysis['packet_path_stats'][dir]))
+                    self._put()
 
     def __get_summary_table(self, traffic_result):
         if self.config['single_run']:
@@ -353,10 +398,11 @@ class NFVBenchSummarizer(Summarizer):
             summary_table = Table(self.ndr_pdr_header)
 
         if self.config['ndr_run']:
-            for frame_size, analysis in traffic_result.iteritems():
+            for frame_size, analysis in list(traffic_result.items()):
                 if frame_size == 'warning':
                     continue
-                summary_table.add_row([
+
+                row_data = [
                     'NDR',
                     frame_size,
                     analysis['ndr']['rate_bps'],
@@ -365,21 +411,34 @@ class NFVBenchSummarizer(Summarizer):
                     analysis['ndr']['stats']['overall']['avg_delay_usec'],
                     analysis['ndr']['stats']['overall']['min_delay_usec'],
                     analysis['ndr']['stats']['overall']['max_delay_usec']
-                ])
-                self.__record_data_put(frame_size, {'ndr': {
+                ]
+                if not self.config.disable_hdrh:
+                    self.extract_hdrh_percentiles(
+                        analysis['ndr']['stats']['overall']['lat_percentile'], row_data)
+                summary_table.add_row(row_data)
+
+                ndr_data = {
                     'type': 'NDR',
                     'rate_bps': analysis['ndr']['rate_bps'],
                     'rate_pps': analysis['ndr']['rate_pps'],
-                    'drop_percantage': analysis['ndr']['stats']['overall']['drop_percentage'],
+                    'offered_tx_rate_bps': analysis['ndr']['stats']['offered_tx_rate_bps'],
+                    'theoretical_tx_rate_pps': analysis['ndr']['stats']['theoretical_tx_rate_pps'],
+                    'theoretical_tx_rate_bps': analysis['ndr']['stats']['theoretical_tx_rate_bps'],
+                    'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
                     'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
                     'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
                     'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
-                }})
+                }
+                if not self.config.disable_hdrh:
+                    self.extract_hdrh_percentiles(
+                        analysis['ndr']['stats']['overall']['lat_percentile'], ndr_data, True)
+                self.__record_data_put(frame_size, {'ndr': ndr_data})
         if self.config['pdr_run']:
-            for frame_size, analysis in traffic_result.iteritems():
+            for frame_size, analysis in list(traffic_result.items()):
                 if frame_size == 'warning':
                     continue
-                summary_table.add_row([
+
+                row_data = [
                     'PDR',
                     frame_size,
                     analysis['pdr']['rate_bps'],
@@ -388,34 +447,67 @@ class NFVBenchSummarizer(Summarizer):
                     analysis['pdr']['stats']['overall']['avg_delay_usec'],
                     analysis['pdr']['stats']['overall']['min_delay_usec'],
                     analysis['pdr']['stats']['overall']['max_delay_usec']
-                ])
-                self.__record_data_put(frame_size, {'pdr': {
+                ]
+                if not self.config.disable_hdrh:
+                    self.extract_hdrh_percentiles(
+                        analysis['pdr']['stats']['overall']['lat_percentile'], row_data)
+                summary_table.add_row(row_data)
+
+                pdr_data = {
                     'type': 'PDR',
                     'rate_bps': analysis['pdr']['rate_bps'],
                     'rate_pps': analysis['pdr']['rate_pps'],
-                    'drop_percantage': analysis['pdr']['stats']['overall']['drop_percentage'],
+                    'offered_tx_rate_bps': analysis['pdr']['stats']['offered_tx_rate_bps'],
+                    'theoretical_tx_rate_pps': analysis['pdr']['stats']['theoretical_tx_rate_pps'],
+                    'theoretical_tx_rate_bps': analysis['pdr']['stats']['theoretical_tx_rate_bps'],
+                    'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
                     'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
                     'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
                     'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
-                }})
+                }
+                if not self.config.disable_hdrh:
+                    self.extract_hdrh_percentiles(
+                        analysis['pdr']['stats']['overall']['lat_percentile'], pdr_data, True)
+                self.__record_data_put(frame_size, {'pdr': pdr_data})
         if self.config['single_run']:
-            for frame_size, analysis in traffic_result.iteritems():
-                summary_table.add_row([
+            for frame_size, analysis in list(traffic_result.items()):
+                row_data = [
                     frame_size,
                     analysis['stats']['overall']['drop_rate_percent'],
                     analysis['stats']['overall']['rx']['avg_delay_usec'],
                     analysis['stats']['overall']['rx']['min_delay_usec'],
                     analysis['stats']['overall']['rx']['max_delay_usec']
-                ])
-                self.__record_data_put(frame_size, {'single_run': {
+                ]
+                if not self.config.disable_hdrh:
+                    self.extract_hdrh_percentiles(
+                        analysis['stats']['overall']['rx']['lat_percentile'], row_data)
+                summary_table.add_row(row_data)
+
+                single_run_data = {
                     'type': 'single_run',
+                    'offered_tx_rate_bps': analysis['stats']['offered_tx_rate_bps'],
+                    'theoretical_tx_rate_pps': analysis['stats']['theoretical_tx_rate_pps'],
+                    'theoretical_tx_rate_bps': analysis['stats']['theoretical_tx_rate_bps'],
                     'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
                     'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
                     'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
                     'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
-                }})
+                }
+                if not self.config.disable_hdrh:
+                    self.extract_hdrh_percentiles(
+                        analysis['stats']['overall']['rx']['lat_percentile'], single_run_data, True)
+                self.__record_data_put(frame_size, {'single_run': single_run_data})
         return summary_table
 
+    def extract_hdrh_percentiles(self, lat_percentile, data, add_key=False):
+        if add_key:
+            data['lat_percentile'] = {}
+        for percentile in self.config.lat_percentiles:
+            if add_key:
+                data['lat_percentile_' + str(percentile)] = lat_percentile[percentile]
+            else:
+                data.append(lat_percentile[percentile])
+
     def __get_config_table(self, run_config, frame_size):
         config_table = Table(self.config_header)
         for key, name in zip(self.direction_keys, self.direction_names):
@@ -441,23 +533,64 @@ class NFVBenchSummarizer(Summarizer):
             })
         return config_table
 
-    def __get_chain_analysis_table(self, packet_analysis):
-        chain_analysis_table = Table(self.chain_analysis_header)
-        forward_analysis = packet_analysis['direction-forward']
-        reverse_analysis = packet_analysis['direction-reverse']
-        reverse_analysis.reverse()
-        for fwd, rev in zip(forward_analysis, reverse_analysis):
-            chain_analysis_table.add_row([
-                fwd['interface'],
-                fwd['device'],
-                fwd['packet_count'],
-                fwd.get('packet_drop_count', None),
-                fwd.get('packet_drop_percentage', None),
-                rev['packet_count'],
-                rev.get('packet_drop_count', None),
-                rev.get('packet_drop_percentage', None),
-            ])
-        return chain_analysis_table
+    def _get_chain_table(self, chain_stats):
+        """Retrieve the table for a direction.
+
+        chain_stats: {
+             'interfaces': ['Port0', 'drop %'', 'vhost0', 'Port1'],
+             'chains': {
+                 '0': {'packets': [2000054, '-0.023%', 1999996, 1999996],
+                     'lat_min_usec': 10,
+                     'lat_max_usec': 187,
+                     'lat_avg_usec': 45},
+                 '1': {...},
+                 'total': {...}
+             }
+        }
+        """
+        chains = chain_stats['chains']
+        _annotate_chain_stats(chains)
+        header = [('Chain', Formatter.standard)] + \
+                 [(ifname, Formatter.standard) for ifname in chain_stats['interfaces']]
+        # add latency columns if available Avg, Min, Max and percentiles
+        lat_keys = []
+        lat_map = {'lat_avg_usec': 'Avg lat.',
+                   'lat_min_usec': 'Min lat.',
+                   'lat_max_usec': 'Max lat.'}
+        if 'lat_avg_usec' in chains['0']:
+            lat_keys = ['lat_avg_usec', 'lat_min_usec', 'lat_max_usec']
+
+            if not self.config.disable_hdrh:
+                lat_keys.append('lat_percentile')
+                for percentile in self.config.lat_percentiles:
+                    lat_map['lat_' + str(percentile) + '_percentile'] = \
+                        str(percentile) + ' %ile lat.'
+
+            for key in lat_map:
+                # 'append' expects a single parameter => double parentheses
+                header.append((lat_map[key], Formatter.standard))
+
+        table = Table(header)
+        for chain in sorted(list(chains.keys()), key=str):
+            row = [chain] + chains[chain]['packets']
+            for lat_key in lat_keys:
+
+                if lat_key != 'lat_percentile':
+                    if chains[chain].get(lat_key, None):
+                        row.append(Formatter.standard(chains[chain][lat_key]))
+                    else:
+                        row.append('n/a')
+                else:
+                    if not self.config.disable_hdrh:
+                        if chains[chain].get(lat_key, None):
+                            for percentile in chains[chain][lat_key]:
+                                row.append(Formatter.standard(
+                                    chains[chain][lat_key][percentile]))
+                        else:
+                            for percentile in self.config.lat_percentiles:
+                                row.append('n/a')
+            table.add_row(row)
+        return table
 
     def __record_header_put(self, key, value):
         if self.sender:
@@ -472,8 +605,7 @@ class NFVBenchSummarizer(Summarizer):
     def __record_send(self):
         if self.sender:
             self.record_header["@timestamp"] = datetime.utcnow().replace(
-                tzinfo=pytz.utc).strftime(
-                "%Y-%m-%dT%H:%M:%S.%f%z")
+                tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
             for frame_size in self.record_data:
                 data = self.record_header
                 data['frame_size'] = frame_size
@@ -500,5 +632,6 @@ class NFVBenchSummarizer(Summarizer):
         # init is called after checking for sender
         self.record_header = {
             "runlogdate": self.sender.runlogdate,
+            "user_label": self.config['user_label']
         }
         self.record_data = {}