NFVBENCH-193: Correct a critical bug introduced in commit NFVBENCH-192
[nfvbench.git] / nfvbench / summarizer.py
index 0d84ab2..0759933 100644 (file)
@@ -44,9 +44,10 @@ def _annotate_chain_stats(chain_stats, nodrop_marker='=>'):
          'total': {...}
     }
 
-    In the case of shared net, some columns in packets array can have ''
+    In the case of shared net, some columns in packets array can have ''.
+    Some columns cab also be None which means the data is not available.
     """
-    for stats in chain_stats.values():
+    for stats in list(chain_stats.values()):
         packets = stats['packets']
         count = len(packets)
         if count > 1:
@@ -60,6 +61,9 @@ def _annotate_chain_stats(chain_stats, nodrop_marker='=>'):
                     # an empty string indicates an unknown counter for a shared interface
                     # do not annotate those
                     annotated_value = ''
+                elif cur_count is None:
+                    # Not available
+                    annotated_value = 'n/a'
                 else:
                     drop = cur_count - prev_count
                     if drop:
@@ -93,7 +97,7 @@ class Formatter(object):
     def standard(data):
         if isinstance(data, int):
             return Formatter.int(data)
-        elif isinstance(data, float):
+        if isinstance(data, float):
             return Formatter.float(4)(data)
         return Formatter.fixed(data)
 
@@ -126,7 +130,7 @@ class Formatter(object):
     def percentage(data):
         if data is None:
             return ''
-        elif math.isnan(data):
+        if math.isnan(data):
             return '-'
         return Formatter.suffix('%')(Formatter.float(4)(data))
 
@@ -135,7 +139,7 @@ class Table(object):
     """ASCII readable table class."""
 
     def __init__(self, header):
-        header_row, self.formatters = zip(*header)
+        header_row, self.formatters = list(zip(*header))
         self.data = [header_row]
         self.columns = len(header_row)
 
@@ -191,7 +195,7 @@ class Summarizer(object):
 
     def _put_dict(self, data):
         with self._create_block(False):
-            for key, value in data.iteritems():
+            for key, value in list(data.items()):
                 if isinstance(value, dict):
                     self._put(key + ':')
                     self._put_dict(value)
@@ -255,6 +259,13 @@ class NFVBenchSummarizer(Summarizer):
         self.record_header = None
         self.record_data = None
         self.sender = sender
+
+        # add percentiles headers if hdrh enabled
+        if not self.config.disable_hdrh:
+            for percentile in self.config.lat_percentiles:
+                # 'append' expects a single parameter => double parentheses
+                self.ndr_pdr_header.append((str(percentile) + ' %ile lat.', Formatter.standard))
+                self.single_run_header.append((str(percentile) + ' %ile lat.', Formatter.standard))
         # if sender is available initialize record
         if self.sender:
             self.__record_init()
@@ -293,7 +304,7 @@ class NFVBenchSummarizer(Summarizer):
                     if network_benchmark['versions']:
                         self._put('Versions:')
                         with self._create_block():
-                            for component, version in network_benchmark['versions'].iteritems():
+                            for component, version in list(network_benchmark['versions'].items()):
                                 self._put(component + ':', version)
 
                 if self.config['ndr_run'] or self.config['pdr_run']:
@@ -304,7 +315,7 @@ class NFVBenchSummarizer(Summarizer):
                         if self.config['pdr_run']:
                             self._put('PDR:', self.config['measurement']['PDR'])
                 self._put('Service chain:')
-                for result in network_benchmark['service_chain'].iteritems():
+                for result in list(network_benchmark['service_chain'].items()):
                     with self._create_block():
                         self.__chain_summarize(*result)
 
@@ -321,13 +332,13 @@ class NFVBenchSummarizer(Summarizer):
         self._put('Bidirectional:', traffic_benchmark['bidirectional'])
         self._put('Flow count:', traffic_benchmark['flow_count'])
         self._put('Service chains count:', traffic_benchmark['service_chain_count'])
-        self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
+        self._put('Compute nodes:', list(traffic_benchmark['compute_nodes'].keys()))
 
         self.__record_header_put('profile', traffic_benchmark['profile'])
         self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
         self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
         self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
-        self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
+        self.__record_header_put('compute_nodes', list(traffic_benchmark['compute_nodes'].keys()))
         with self._create_block(False):
             self._put()
             if not self.config['no_traffic']:
@@ -341,7 +352,7 @@ class NFVBenchSummarizer(Summarizer):
                     except KeyError:
                         pass
 
-            for entry in traffic_benchmark['result'].iteritems():
+            for entry in list(traffic_benchmark['result'].items()):
                 if 'warning' in entry:
                     continue
                 self.__chain_analysis_summarize(*entry)
@@ -350,12 +361,6 @@ class NFVBenchSummarizer(Summarizer):
     def __chain_analysis_summarize(self, frame_size, analysis):
         self._put()
         self._put('L2 frame size:', frame_size)
-        if 'actual_l2frame_size' in analysis:
-            self._put('Actual l2 frame size:', analysis['actual_l2frame_size'])
-        elif self.config['ndr_run'] and 'actual_l2frame_size' in analysis['ndr']:
-            self._put('Actual l2 frame size:', analysis['ndr']['actual_l2frame_size'])
-        elif self.config['pdr_run'] and 'actual_l2frame_size' in analysis['pdr']:
-            self._put('Actual l2 frame size:', analysis['pdr']['actual_l2frame_size'])
         if self.config['ndr_run']:
             self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
                       'seconds')
@@ -393,10 +398,11 @@ class NFVBenchSummarizer(Summarizer):
             summary_table = Table(self.ndr_pdr_header)
 
         if self.config['ndr_run']:
-            for frame_size, analysis in traffic_result.iteritems():
+            for frame_size, analysis in list(traffic_result.items()):
                 if frame_size == 'warning':
                     continue
-                summary_table.add_row([
+
+                row_data = [
                     'NDR',
                     frame_size,
                     analysis['ndr']['rate_bps'],
@@ -405,21 +411,34 @@ class NFVBenchSummarizer(Summarizer):
                     analysis['ndr']['stats']['overall']['avg_delay_usec'],
                     analysis['ndr']['stats']['overall']['min_delay_usec'],
                     analysis['ndr']['stats']['overall']['max_delay_usec']
-                ])
-                self.__record_data_put(frame_size, {'ndr': {
+                ]
+                if not self.config.disable_hdrh:
+                    self.extract_hdrh_percentiles(
+                        analysis['ndr']['stats']['overall']['lat_percentile'], row_data)
+                summary_table.add_row(row_data)
+
+                ndr_data = {
                     'type': 'NDR',
                     'rate_bps': analysis['ndr']['rate_bps'],
                     'rate_pps': analysis['ndr']['rate_pps'],
+                    'offered_tx_rate_bps': analysis['ndr']['stats']['offered_tx_rate_bps'],
+                    'theoretical_tx_rate_pps': analysis['ndr']['stats']['theoretical_tx_rate_pps'],
+                    'theoretical_tx_rate_bps': analysis['ndr']['stats']['theoretical_tx_rate_bps'],
                     'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
                     'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
                     'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
                     'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
-                }})
+                }
+                if not self.config.disable_hdrh:
+                    self.extract_hdrh_percentiles(
+                        analysis['ndr']['stats']['overall']['lat_percentile'], ndr_data, True)
+                self.__record_data_put(frame_size, {'ndr': ndr_data})
         if self.config['pdr_run']:
-            for frame_size, analysis in traffic_result.iteritems():
+            for frame_size, analysis in list(traffic_result.items()):
                 if frame_size == 'warning':
                     continue
-                summary_table.add_row([
+
+                row_data = [
                     'PDR',
                     frame_size,
                     analysis['pdr']['rate_bps'],
@@ -428,34 +447,67 @@ class NFVBenchSummarizer(Summarizer):
                     analysis['pdr']['stats']['overall']['avg_delay_usec'],
                     analysis['pdr']['stats']['overall']['min_delay_usec'],
                     analysis['pdr']['stats']['overall']['max_delay_usec']
-                ])
-                self.__record_data_put(frame_size, {'pdr': {
+                ]
+                if not self.config.disable_hdrh:
+                    self.extract_hdrh_percentiles(
+                        analysis['pdr']['stats']['overall']['lat_percentile'], row_data)
+                summary_table.add_row(row_data)
+
+                pdr_data = {
                     'type': 'PDR',
                     'rate_bps': analysis['pdr']['rate_bps'],
                     'rate_pps': analysis['pdr']['rate_pps'],
+                    'offered_tx_rate_bps': analysis['pdr']['stats']['offered_tx_rate_bps'],
+                    'theoretical_tx_rate_pps': analysis['pdr']['stats']['theoretical_tx_rate_pps'],
+                    'theoretical_tx_rate_bps': analysis['pdr']['stats']['theoretical_tx_rate_bps'],
                     'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
                     'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
                     'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
                     'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
-                }})
+                }
+                if not self.config.disable_hdrh:
+                    self.extract_hdrh_percentiles(
+                        analysis['pdr']['stats']['overall']['lat_percentile'], pdr_data, True)
+                self.__record_data_put(frame_size, {'pdr': pdr_data})
         if self.config['single_run']:
-            for frame_size, analysis in traffic_result.iteritems():
-                summary_table.add_row([
+            for frame_size, analysis in list(traffic_result.items()):
+                row_data = [
                     frame_size,
                     analysis['stats']['overall']['drop_rate_percent'],
                     analysis['stats']['overall']['rx']['avg_delay_usec'],
                     analysis['stats']['overall']['rx']['min_delay_usec'],
                     analysis['stats']['overall']['rx']['max_delay_usec']
-                ])
-                self.__record_data_put(frame_size, {'single_run': {
+                ]
+                if not self.config.disable_hdrh:
+                    self.extract_hdrh_percentiles(
+                        analysis['stats']['overall']['rx']['lat_percentile'], row_data)
+                summary_table.add_row(row_data)
+
+                single_run_data = {
                     'type': 'single_run',
+                    'offered_tx_rate_bps': analysis['stats']['offered_tx_rate_bps'],
+                    'theoretical_tx_rate_pps': analysis['stats']['theoretical_tx_rate_pps'],
+                    'theoretical_tx_rate_bps': analysis['stats']['theoretical_tx_rate_bps'],
                     'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
                     'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
                     'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
                     'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
-                }})
+                }
+                if not self.config.disable_hdrh:
+                    self.extract_hdrh_percentiles(
+                        analysis['stats']['overall']['rx']['lat_percentile'], single_run_data, True)
+                self.__record_data_put(frame_size, {'single_run': single_run_data})
         return summary_table
 
+    def extract_hdrh_percentiles(self, lat_percentile, data, add_key=False):
+        if add_key:
+            data['lat_percentile'] = {}
+        for percentile in self.config.lat_percentiles:
+            if add_key:
+                data['lat_percentile_' + str(percentile)] = lat_percentile[percentile]
+            else:
+                data.append(lat_percentile[percentile])
+
     def __get_config_table(self, run_config, frame_size):
         config_table = Table(self.config_header)
         for key, name in zip(self.direction_keys, self.direction_names):
@@ -487,11 +539,11 @@ class NFVBenchSummarizer(Summarizer):
         chain_stats: {
              'interfaces': ['Port0', 'drop %'', 'vhost0', 'Port1'],
              'chains': {
-                 0: {'packets': [2000054, '-0.023%', 1999996, 1999996],
+                 '0': {'packets': [2000054, '-0.023%', 1999996, 1999996],
                      'lat_min_usec': 10,
                      'lat_max_usec': 187,
                      'lat_avg_usec': 45},
-                 1: {...},
+                 '1': {...},
                  'total': {...}
              }
         }
@@ -500,21 +552,43 @@ class NFVBenchSummarizer(Summarizer):
         _annotate_chain_stats(chains)
         header = [('Chain', Formatter.standard)] + \
                  [(ifname, Formatter.standard) for ifname in chain_stats['interfaces']]
-        # add latency columns if available Avg, Min, Max
+        # add latency columns if available Avg, Min, Max and percentiles
         lat_keys = []
         lat_map = {'lat_avg_usec': 'Avg lat.',
                    'lat_min_usec': 'Min lat.',
                    'lat_max_usec': 'Max lat.'}
-        if 'lat_avg_usec' in chains[0]:
+        if 'lat_avg_usec' in chains['0']:
             lat_keys = ['lat_avg_usec', 'lat_min_usec', 'lat_max_usec']
-            for key in lat_keys:
+
+            if not self.config.disable_hdrh:
+                lat_keys.append('lat_percentile')
+                for percentile in self.config.lat_percentiles:
+                    lat_map['lat_' + str(percentile) + '_percentile'] = \
+                        str(percentile) + ' %ile lat.'
+
+            for key in lat_map:
+                # 'append' expects a single parameter => double parentheses
                 header.append((lat_map[key], Formatter.standard))
 
         table = Table(header)
-        for chain in sorted(chains.keys()):
+        for chain in sorted(list(chains.keys()), key=str):
             row = [chain] + chains[chain]['packets']
             for lat_key in lat_keys:
-                row.append('{:,} usec'.format(chains[chain][lat_key]))
+
+                if lat_key != 'lat_percentile':
+                    if chains[chain].get(lat_key, None):
+                        row.append(Formatter.standard(chains[chain][lat_key]))
+                    else:
+                        row.append('n/a')
+                else:
+                    if not self.config.disable_hdrh:
+                        if chains[chain].get(lat_key, None):
+                            for percentile in chains[chain][lat_key]:
+                                row.append(Formatter.standard(
+                                    chains[chain][lat_key][percentile]))
+                        else:
+                            for percentile in self.config.lat_percentiles:
+                                row.append('n/a')
             table.add_row(row)
         return table