NFVBENCH-153 Add support for python3
[nfvbench.git] / nfvbench / summarizer.py
1 #!/usr/bin/env python
2 # Copyright 2016 Cisco Systems, Inc.  All rights reserved.
3 #
4 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
5 #    not use this file except in compliance with the License. You may obtain
6 #    a copy of the License at
7 #
8 #         http://www.apache.org/licenses/LICENSE-2.0
9 #
10 #    Unless required by applicable law or agreed to in writing, software
11 #    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 #    License for the specific language governing permissions and limitations
14 #    under the License.
15 #
16
17 from contextlib import contextmanager
18 from datetime import datetime
19 import math
20
21 import bitmath
22 import pytz
23 from tabulate import tabulate
24
25 def _annotate_chain_stats(chain_stats, nodrop_marker='=>'):
26     """Transform a plain chain stats into an annotated one.
27
28     Example:
29     {
30          0: {'packets': [2000054, 1999996, 1999996, 1999996],
31              'lat_min_usec': 10,
32              'lat_max_usec': 187,
33              'lat_avg_usec': 45},
34          1: {...},
35          'total': {...}
36     }
37     should become:
38     {
39          0: {'packets': [2000054, -58 (-0.034%), '=>', 1999996],
40              'lat_min_usec': 10,
41              'lat_max_usec': 187,
42              'lat_avg_usec': 45},
43          1: {...},
44          'total': {...}
45     }
46
47     In the case of shared net, some columns in packets array can have ''.
48     Some columns cab also be None which means the data is not available.
49     """
50     for stats in list(chain_stats.values()):
51         packets = stats['packets']
52         count = len(packets)
53         if count > 1:
54             # keep the first counter
55             annotated_packets = [packets[0]]
56             # modify all remaining counters
57             prev_count = packets[0]
58             for index in range(1, count):
59                 cur_count = packets[index]
60                 if cur_count == '':
61                     # an empty string indicates an unknown counter for a shared interface
62                     # do not annotate those
63                     annotated_value = ''
64                 elif cur_count is None:
65                     # Not available
66                     annotated_value = 'n/a'
67                 else:
68                     drop = cur_count - prev_count
69                     if drop:
70                         dr = (drop * 100.0) / prev_count if prev_count else 0
71                         annotated_value = '{:+,} ({:+.4f}%)'.format(drop, dr)
72                     else:
73                         # no drop
74                         # if last column we display the value
75                         annotated_value = cur_count if index == count - 1 else nodrop_marker
76                     prev_count = cur_count
77                 annotated_packets.append(annotated_value)
78
79             stats['packets'] = annotated_packets
80
81 class Formatter(object):
82     """Collection of string formatter methods."""
83
84     @staticmethod
85     def fixed(data):
86         return data
87
88     @staticmethod
89     def int(data):
90         return '{:,}'.format(data)
91
92     @staticmethod
93     def float(decimal):
94         return lambda data: '%.{}f'.format(decimal) % (data)
95
96     @staticmethod
97     def standard(data):
98         if isinstance(data, int):
99             return Formatter.int(data)
100         if isinstance(data, float):
101             return Formatter.float(4)(data)
102         return Formatter.fixed(data)
103
104     @staticmethod
105     def suffix(suffix_str):
106         return lambda data: Formatter.standard(data) + suffix_str
107
108     @staticmethod
109     def bits(data):
110         # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
111         # will convert it into bit format.
112         bit = 8.0 * bitmath.Bit(float(data))
113         bit = bit.best_prefix(bitmath.SI)
114         byte_to_bit_classes = {
115             'kB': bitmath.kb,
116             'MB': bitmath.Mb,
117             'GB': bitmath.Gb,
118             'TB': bitmath.Tb,
119             'PB': bitmath.Pb,
120             'EB': bitmath.Eb,
121             'ZB': bitmath.Zb,
122             'YB': bitmath.Yb,
123         }
124         bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
125         if bps.unit != 'Bit':
126             return bps.format("{value:.4f} {unit}ps")
127         return bps.format("{value:.4f} bps")
128
129     @staticmethod
130     def percentage(data):
131         if data is None:
132             return ''
133         if math.isnan(data):
134             return '-'
135         return Formatter.suffix('%')(Formatter.float(4)(data))
136
137
138 class Table(object):
139     """ASCII readable table class."""
140
141     def __init__(self, header):
142         header_row, self.formatters = list(zip(*header))
143         self.data = [header_row]
144         self.columns = len(header_row)
145
146     def add_row(self, row):
147         assert self.columns == len(row)
148         formatted_row = []
149         for entry, formatter in zip(row, self.formatters):
150             formatted_row.append(formatter(entry))
151         self.data.append(formatted_row)
152
153     def get_string(self, indent=0):
154         spaces = ' ' * indent
155         table = tabulate(self.data,
156                          headers='firstrow',
157                          tablefmt='grid',
158                          stralign='center',
159                          floatfmt='.2f')
160         return table.replace('\n', '\n' + spaces)
161
162
163 class Summarizer(object):
164     """Generic summarizer class."""
165
166     indent_per_level = 2
167
168     def __init__(self):
169         self.indent_size = 0
170         self.marker_stack = [False]
171         self.str = ''
172
173     def __indent(self, marker):
174         self.indent_size += self.indent_per_level
175         self.marker_stack.append(marker)
176
177     def __unindent(self):
178         assert self.indent_size >= self.indent_per_level
179         self.indent_size -= self.indent_per_level
180         self.marker_stack.pop()
181
182     def __get_indent_string(self):
183         current_str = ' ' * self.indent_size
184         if self.marker_stack[-1]:
185             current_str = current_str[:-2] + '> '
186         return current_str
187
188     def _put(self, *args):
189         self.str += self.__get_indent_string()
190         if args and isinstance(args[-1], dict):
191             self.str += ' '.join(map(str, args[:-1])) + '\n'
192             self._put_dict(args[-1])
193         else:
194             self.str += ' '.join(map(str, args)) + '\n'
195
196     def _put_dict(self, data):
197         with self._create_block(False):
198             for key, value in list(data.items()):
199                 if isinstance(value, dict):
200                     self._put(key + ':')
201                     self._put_dict(value)
202                 else:
203                     self._put(key + ':', value)
204
205     def _put_table(self, table):
206         self.str += self.__get_indent_string()
207         self.str += table.get_string(self.indent_size) + '\n'
208
209     def __str__(self):
210         return self.str
211
212     @contextmanager
213     def _create_block(self, marker=True):
214         self.__indent(marker)
215         yield
216         self.__unindent()
217
218
219 class NFVBenchSummarizer(Summarizer):
220     """Summarize nfvbench json result."""
221
222     ndr_pdr_header = [
223         ('-', Formatter.fixed),
224         ('L2 Frame Size', Formatter.standard),
225         ('Rate (fwd+rev)', Formatter.bits),
226         ('Rate (fwd+rev)', Formatter.suffix(' pps')),
227         ('Avg Drop Rate', Formatter.suffix('%')),
228         ('Avg Latency (usec)', Formatter.standard),
229         ('Min Latency (usec)', Formatter.standard),
230         ('Max Latency (usec)', Formatter.standard)
231     ]
232
233     single_run_header = [
234         ('L2 Frame Size', Formatter.standard),
235         ('Drop Rate', Formatter.suffix('%')),
236         ('Avg Latency (usec)', Formatter.standard),
237         ('Min Latency (usec)', Formatter.standard),
238         ('Max Latency (usec)', Formatter.standard)
239     ]
240
241     config_header = [
242         ('Direction', Formatter.standard),
243         ('Requested TX Rate (bps)', Formatter.bits),
244         ('Actual TX Rate (bps)', Formatter.bits),
245         ('RX Rate (bps)', Formatter.bits),
246         ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
247         ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
248         ('RX Rate (pps)', Formatter.suffix(' pps'))
249     ]
250
251     direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
252     direction_names = ['Forward', 'Reverse', 'Total']
253
254     def __init__(self, result, sender):
255         """Create a summarizer instance."""
256         Summarizer.__init__(self)
257         self.result = result
258         self.config = self.result['config']
259         self.record_header = None
260         self.record_data = None
261         self.sender = sender
262         # if sender is available initialize record
263         if self.sender:
264             self.__record_init()
265         self.__summarize()
266
267     def __get_openstack_spec(self, property):
268         try:
269             return self.result['openstack_spec'][property]
270         except KeyError:
271             return ''
272
273     def __summarize(self):
274         self._put()
275         self._put('========== NFVBench Summary ==========')
276         self._put('Date:', self.result['date'])
277         self._put('NFVBench version', self.result['nfvbench_version'])
278         self._put('Openstack Neutron:', {
279             'vSwitch': self.__get_openstack_spec('vswitch'),
280             'Encapsulation': self.__get_openstack_spec('encaps')
281         })
282         self.__record_header_put('version', self.result['nfvbench_version'])
283         self.__record_header_put('vSwitch', self.__get_openstack_spec('vswitch'))
284         self.__record_header_put('Encapsulation', self.__get_openstack_spec('encaps'))
285         self._put('Benchmarks:')
286         with self._create_block():
287             self._put('Networks:')
288             with self._create_block():
289                 network_benchmark = self.result['benchmarks']['network']
290
291                 self._put('Components:')
292                 with self._create_block():
293                     self._put('Traffic Generator:')
294                     with self._create_block(False):
295                         self._put('Profile:', self.config['tg-name'])
296                         self._put('Tool:', self.config['tg-tool'])
297                     if network_benchmark['versions']:
298                         self._put('Versions:')
299                         with self._create_block():
300                             for component, version in list(network_benchmark['versions'].items()):
301                                 self._put(component + ':', version)
302
303                 if self.config['ndr_run'] or self.config['pdr_run']:
304                     self._put('Measurement Parameters:')
305                     with self._create_block(False):
306                         if self.config['ndr_run']:
307                             self._put('NDR:', self.config['measurement']['NDR'])
308                         if self.config['pdr_run']:
309                             self._put('PDR:', self.config['measurement']['PDR'])
310                 self._put('Service chain:')
311                 for result in list(network_benchmark['service_chain'].items()):
312                     with self._create_block():
313                         self.__chain_summarize(*result)
314
315     def __chain_summarize(self, chain_name, chain_benchmark):
316         self._put(chain_name + ':')
317         self.__record_header_put('service_chain', chain_name)
318         with self._create_block():
319             self._put('Traffic:')
320             with self._create_block(False):
321                 self.__traffic_summarize(chain_benchmark['result'])
322
323     def __traffic_summarize(self, traffic_benchmark):
324         self._put('Profile:', traffic_benchmark['profile'])
325         self._put('Bidirectional:', traffic_benchmark['bidirectional'])
326         self._put('Flow count:', traffic_benchmark['flow_count'])
327         self._put('Service chains count:', traffic_benchmark['service_chain_count'])
328         self._put('Compute nodes:', list(traffic_benchmark['compute_nodes'].keys()))
329
330         self.__record_header_put('profile', traffic_benchmark['profile'])
331         self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
332         self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
333         self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
334         self.__record_header_put('compute_nodes', list(traffic_benchmark['compute_nodes'].keys()))
335         with self._create_block(False):
336             self._put()
337             if not self.config['no_traffic']:
338                 self._put('Run Summary:')
339                 self._put()
340                 with self._create_block(False):
341                     self._put_table(self.__get_summary_table(traffic_benchmark['result']))
342                     try:
343                         self._put()
344                         self._put(traffic_benchmark['result']['warning'])
345                     except KeyError:
346                         pass
347
348             for entry in list(traffic_benchmark['result'].items()):
349                 if 'warning' in entry:
350                     continue
351                 self.__chain_analysis_summarize(*entry)
352             self.__record_send()
353
354     def __chain_analysis_summarize(self, frame_size, analysis):
355         self._put()
356         self._put('L2 frame size:', frame_size)
357         if self.config['ndr_run']:
358             self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
359                       'seconds')
360             self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
361                 analysis['ndr']['time_taken_sec'])})
362         if self.config['pdr_run']:
363             self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
364                       'seconds')
365             self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
366                 analysis['pdr']['time_taken_sec'])})
367         self._put()
368
369         if not self.config['no_traffic'] and self.config['single_run']:
370             self._put('Run Config:')
371             self._put()
372             with self._create_block(False):
373                 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
374                 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
375                     self._put()
376                     self._put(analysis['run_config']['warning'])
377                 self._put()
378
379         if 'packet_path_stats' in analysis:
380             for dir in ['Forward', 'Reverse']:
381                 self._put(dir + ' Chain Packet Counters and Latency:')
382                 self._put()
383                 with self._create_block(False):
384                     self._put_table(self._get_chain_table(analysis['packet_path_stats'][dir]))
385                     self._put()
386
387     def __get_summary_table(self, traffic_result):
388         if self.config['single_run']:
389             summary_table = Table(self.single_run_header)
390         else:
391             summary_table = Table(self.ndr_pdr_header)
392
393         if self.config['ndr_run']:
394             for frame_size, analysis in list(traffic_result.items()):
395                 if frame_size == 'warning':
396                     continue
397                 summary_table.add_row([
398                     'NDR',
399                     frame_size,
400                     analysis['ndr']['rate_bps'],
401                     analysis['ndr']['rate_pps'],
402                     analysis['ndr']['stats']['overall']['drop_percentage'],
403                     analysis['ndr']['stats']['overall']['avg_delay_usec'],
404                     analysis['ndr']['stats']['overall']['min_delay_usec'],
405                     analysis['ndr']['stats']['overall']['max_delay_usec']
406                 ])
407                 self.__record_data_put(frame_size, {'ndr': {
408                     'type': 'NDR',
409                     'rate_bps': analysis['ndr']['rate_bps'],
410                     'rate_pps': analysis['ndr']['rate_pps'],
411                     'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
412                     'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
413                     'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
414                     'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
415                 }})
416         if self.config['pdr_run']:
417             for frame_size, analysis in list(traffic_result.items()):
418                 if frame_size == 'warning':
419                     continue
420                 summary_table.add_row([
421                     'PDR',
422                     frame_size,
423                     analysis['pdr']['rate_bps'],
424                     analysis['pdr']['rate_pps'],
425                     analysis['pdr']['stats']['overall']['drop_percentage'],
426                     analysis['pdr']['stats']['overall']['avg_delay_usec'],
427                     analysis['pdr']['stats']['overall']['min_delay_usec'],
428                     analysis['pdr']['stats']['overall']['max_delay_usec']
429                 ])
430                 self.__record_data_put(frame_size, {'pdr': {
431                     'type': 'PDR',
432                     'rate_bps': analysis['pdr']['rate_bps'],
433                     'rate_pps': analysis['pdr']['rate_pps'],
434                     'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
435                     'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
436                     'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
437                     'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
438                 }})
439         if self.config['single_run']:
440             for frame_size, analysis in list(traffic_result.items()):
441                 summary_table.add_row([
442                     frame_size,
443                     analysis['stats']['overall']['drop_rate_percent'],
444                     analysis['stats']['overall']['rx']['avg_delay_usec'],
445                     analysis['stats']['overall']['rx']['min_delay_usec'],
446                     analysis['stats']['overall']['rx']['max_delay_usec']
447                 ])
448                 self.__record_data_put(frame_size, {'single_run': {
449                     'type': 'single_run',
450                     'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
451                     'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
452                     'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
453                     'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
454                 }})
455         return summary_table
456
457     def __get_config_table(self, run_config, frame_size):
458         config_table = Table(self.config_header)
459         for key, name in zip(self.direction_keys, self.direction_names):
460             if key not in run_config:
461                 continue
462             config_table.add_row([
463                 name,
464                 run_config[key]['orig']['rate_bps'],
465                 run_config[key]['tx']['rate_bps'],
466                 run_config[key]['rx']['rate_bps'],
467                 int(run_config[key]['orig']['rate_pps']),
468                 int(run_config[key]['tx']['rate_pps']),
469                 int(run_config[key]['rx']['rate_pps']),
470             ])
471             self.__record_data_put(frame_size, {
472                 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
473                 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
474                 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
475                 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
476                 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
477                 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
478
479             })
480         return config_table
481
482     def _get_chain_table(self, chain_stats):
483         """Retrieve the table for a direction.
484
485         chain_stats: {
486              'interfaces': ['Port0', 'drop %'', 'vhost0', 'Port1'],
487              'chains': {
488                  '0': {'packets': [2000054, '-0.023%', 1999996, 1999996],
489                      'lat_min_usec': 10,
490                      'lat_max_usec': 187,
491                      'lat_avg_usec': 45},
492                  '1': {...},
493                  'total': {...}
494              }
495         }
496         """
497         chains = chain_stats['chains']
498         _annotate_chain_stats(chains)
499         header = [('Chain', Formatter.standard)] + \
500                  [(ifname, Formatter.standard) for ifname in chain_stats['interfaces']]
501         # add latency columns if available Avg, Min, Max
502         lat_keys = []
503         lat_map = {'lat_avg_usec': 'Avg lat.',
504                    'lat_min_usec': 'Min lat.',
505                    'lat_max_usec': 'Max lat.'}
506         if 'lat_avg_usec' in chains['0']:
507             lat_keys = ['lat_avg_usec', 'lat_min_usec', 'lat_max_usec']
508             for key in lat_keys:
509                 header.append((lat_map[key], Formatter.standard))
510
511         table = Table(header)
512         for chain in sorted(list(chains.keys()), key=str):
513             row = [chain] + chains[chain]['packets']
514             for lat_key in lat_keys:
515                 row.append('{:,} usec'.format(chains[chain][lat_key]))
516             table.add_row(row)
517         return table
518
519     def __record_header_put(self, key, value):
520         if self.sender:
521             self.record_header[key] = value
522
523     def __record_data_put(self, key, data):
524         if self.sender:
525             if key not in self.record_data:
526                 self.record_data[key] = {}
527             self.record_data[key].update(data)
528
529     def __record_send(self):
530         if self.sender:
531             self.record_header["@timestamp"] = datetime.utcnow().replace(
532                 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
533             for frame_size in self.record_data:
534                 data = self.record_header
535                 data['frame_size'] = frame_size
536                 data.update(self.record_data[frame_size])
537                 run_specific_data = {}
538                 if 'single_run' in data:
539                     run_specific_data['single_run'] = data['single_run']
540                     del data['single_run']
541                 if 'ndr' in data:
542                     run_specific_data['ndr'] = data['ndr']
543                     run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
544                     del data['ndr']
545                 if 'pdr' in data:
546                     run_specific_data['pdr'] = data['pdr']
547                     run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
548                     del data['pdr']
549                 for key in run_specific_data:
550                     data_to_send = data.copy()
551                     data_to_send.update(run_specific_data[key])
552                     self.sender.record_send(data_to_send)
553             self.__record_init()
554
555     def __record_init(self):
556         # init is called after checking for sender
557         self.record_header = {
558             "runlogdate": self.sender.runlogdate,
559             "user_label": self.config['user_label']
560         }
561         self.record_data = {}