Add + or - sign to drop packets and drop rates
[nfvbench.git] / nfvbench / summarizer.py
1 #!/usr/bin/env python
2 # Copyright 2016 Cisco Systems, Inc.  All rights reserved.
3 #
4 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
5 #    not use this file except in compliance with the License. You may obtain
6 #    a copy of the License at
7 #
8 #         http://www.apache.org/licenses/LICENSE-2.0
9 #
10 #    Unless required by applicable law or agreed to in writing, software
11 #    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 #    License for the specific language governing permissions and limitations
14 #    under the License.
15 #
16
17 from contextlib import contextmanager
18 from datetime import datetime
19 import math
20
21 import bitmath
22 import pytz
23 from tabulate import tabulate
24
25 def _annotate_chain_stats(chain_stats, nodrop_marker='=>'):
26     """Transform a plain chain stats into an annotated one.
27
28     Example:
29     {
30          0: {'packets': [2000054, 1999996, 1999996, 1999996],
31              'lat_min_usec': 10,
32              'lat_max_usec': 187,
33              'lat_avg_usec': 45},
34          1: {...},
35          'total': {...}
36     }
37     should become:
38     {
39          0: {'packets': [2000054, -58 (-0.034%), '=>', 1999996],
40              'lat_min_usec': 10,
41              'lat_max_usec': 187,
42              'lat_avg_usec': 45},
43          1: {...},
44          'total': {...}
45     }
46
47     In the case of shared net, some columns in packets array can have ''
48     """
49     for stats in chain_stats.values():
50         packets = stats['packets']
51         count = len(packets)
52         if count > 1:
53             # keep the first counter
54             annotated_packets = [packets[0]]
55             # modify all remaining counters
56             prev_count = packets[0]
57             for index in range(1, count):
58                 cur_count = packets[index]
59                 if cur_count == '':
60                     # an empty string indicates an unknown counter for a shared interface
61                     # do not annotate those
62                     annotated_value = ''
63                 else:
64                     drop = cur_count - prev_count
65                     if drop:
66                         dr = (drop * 100.0) / prev_count if prev_count else 0
67                         annotated_value = '{:+,} ({:+.4f}%)'.format(drop, dr)
68                     else:
69                         # no drop
70                         # if last column we display the value
71                         annotated_value = cur_count if index == count - 1 else nodrop_marker
72                     prev_count = cur_count
73                 annotated_packets.append(annotated_value)
74
75             stats['packets'] = annotated_packets
76
77 class Formatter(object):
78     """Collection of string formatter methods."""
79
80     @staticmethod
81     def fixed(data):
82         return data
83
84     @staticmethod
85     def int(data):
86         return '{:,}'.format(data)
87
88     @staticmethod
89     def float(decimal):
90         return lambda data: '%.{}f'.format(decimal) % (data)
91
92     @staticmethod
93     def standard(data):
94         if isinstance(data, int):
95             return Formatter.int(data)
96         elif isinstance(data, float):
97             return Formatter.float(4)(data)
98         return Formatter.fixed(data)
99
100     @staticmethod
101     def suffix(suffix_str):
102         return lambda data: Formatter.standard(data) + suffix_str
103
104     @staticmethod
105     def bits(data):
106         # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
107         # will convert it into bit format.
108         bit = 8.0 * bitmath.Bit(float(data))
109         bit = bit.best_prefix(bitmath.SI)
110         byte_to_bit_classes = {
111             'kB': bitmath.kb,
112             'MB': bitmath.Mb,
113             'GB': bitmath.Gb,
114             'TB': bitmath.Tb,
115             'PB': bitmath.Pb,
116             'EB': bitmath.Eb,
117             'ZB': bitmath.Zb,
118             'YB': bitmath.Yb,
119         }
120         bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
121         if bps.unit != 'Bit':
122             return bps.format("{value:.4f} {unit}ps")
123         return bps.format("{value:.4f} bps")
124
125     @staticmethod
126     def percentage(data):
127         if data is None:
128             return ''
129         elif math.isnan(data):
130             return '-'
131         return Formatter.suffix('%')(Formatter.float(4)(data))
132
133
134 class Table(object):
135     """ASCII readable table class."""
136
137     def __init__(self, header):
138         header_row, self.formatters = zip(*header)
139         self.data = [header_row]
140         self.columns = len(header_row)
141
142     def add_row(self, row):
143         assert self.columns == len(row)
144         formatted_row = []
145         for entry, formatter in zip(row, self.formatters):
146             formatted_row.append(formatter(entry))
147         self.data.append(formatted_row)
148
149     def get_string(self, indent=0):
150         spaces = ' ' * indent
151         table = tabulate(self.data,
152                          headers='firstrow',
153                          tablefmt='grid',
154                          stralign='center',
155                          floatfmt='.2f')
156         return table.replace('\n', '\n' + spaces)
157
158
159 class Summarizer(object):
160     """Generic summarizer class."""
161
162     indent_per_level = 2
163
164     def __init__(self):
165         self.indent_size = 0
166         self.marker_stack = [False]
167         self.str = ''
168
169     def __indent(self, marker):
170         self.indent_size += self.indent_per_level
171         self.marker_stack.append(marker)
172
173     def __unindent(self):
174         assert self.indent_size >= self.indent_per_level
175         self.indent_size -= self.indent_per_level
176         self.marker_stack.pop()
177
178     def __get_indent_string(self):
179         current_str = ' ' * self.indent_size
180         if self.marker_stack[-1]:
181             current_str = current_str[:-2] + '> '
182         return current_str
183
184     def _put(self, *args):
185         self.str += self.__get_indent_string()
186         if args and isinstance(args[-1], dict):
187             self.str += ' '.join(map(str, args[:-1])) + '\n'
188             self._put_dict(args[-1])
189         else:
190             self.str += ' '.join(map(str, args)) + '\n'
191
192     def _put_dict(self, data):
193         with self._create_block(False):
194             for key, value in data.iteritems():
195                 if isinstance(value, dict):
196                     self._put(key + ':')
197                     self._put_dict(value)
198                 else:
199                     self._put(key + ':', value)
200
201     def _put_table(self, table):
202         self.str += self.__get_indent_string()
203         self.str += table.get_string(self.indent_size) + '\n'
204
205     def __str__(self):
206         return self.str
207
208     @contextmanager
209     def _create_block(self, marker=True):
210         self.__indent(marker)
211         yield
212         self.__unindent()
213
214
215 class NFVBenchSummarizer(Summarizer):
216     """Summarize nfvbench json result."""
217
218     ndr_pdr_header = [
219         ('-', Formatter.fixed),
220         ('L2 Frame Size', Formatter.standard),
221         ('Rate (fwd+rev)', Formatter.bits),
222         ('Rate (fwd+rev)', Formatter.suffix(' pps')),
223         ('Avg Drop Rate', Formatter.suffix('%')),
224         ('Avg Latency (usec)', Formatter.standard),
225         ('Min Latency (usec)', Formatter.standard),
226         ('Max Latency (usec)', Formatter.standard)
227     ]
228
229     single_run_header = [
230         ('L2 Frame Size', Formatter.standard),
231         ('Drop Rate', Formatter.suffix('%')),
232         ('Avg Latency (usec)', Formatter.standard),
233         ('Min Latency (usec)', Formatter.standard),
234         ('Max Latency (usec)', Formatter.standard)
235     ]
236
237     config_header = [
238         ('Direction', Formatter.standard),
239         ('Requested TX Rate (bps)', Formatter.bits),
240         ('Actual TX Rate (bps)', Formatter.bits),
241         ('RX Rate (bps)', Formatter.bits),
242         ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
243         ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
244         ('RX Rate (pps)', Formatter.suffix(' pps'))
245     ]
246
247     direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
248     direction_names = ['Forward', 'Reverse', 'Total']
249
250     def __init__(self, result, sender):
251         """Create a summarizer instance."""
252         Summarizer.__init__(self)
253         self.result = result
254         self.config = self.result['config']
255         self.record_header = None
256         self.record_data = None
257         self.sender = sender
258         # if sender is available initialize record
259         if self.sender:
260             self.__record_init()
261         self.__summarize()
262
263     def __get_openstack_spec(self, property):
264         try:
265             return self.result['openstack_spec'][property]
266         except KeyError:
267             return ''
268
269     def __summarize(self):
270         self._put()
271         self._put('========== NFVBench Summary ==========')
272         self._put('Date:', self.result['date'])
273         self._put('NFVBench version', self.result['nfvbench_version'])
274         self._put('Openstack Neutron:', {
275             'vSwitch': self.__get_openstack_spec('vswitch'),
276             'Encapsulation': self.__get_openstack_spec('encaps')
277         })
278         self.__record_header_put('version', self.result['nfvbench_version'])
279         self.__record_header_put('vSwitch', self.__get_openstack_spec('vswitch'))
280         self.__record_header_put('Encapsulation', self.__get_openstack_spec('encaps'))
281         self._put('Benchmarks:')
282         with self._create_block():
283             self._put('Networks:')
284             with self._create_block():
285                 network_benchmark = self.result['benchmarks']['network']
286
287                 self._put('Components:')
288                 with self._create_block():
289                     self._put('Traffic Generator:')
290                     with self._create_block(False):
291                         self._put('Profile:', self.config['tg-name'])
292                         self._put('Tool:', self.config['tg-tool'])
293                     if network_benchmark['versions']:
294                         self._put('Versions:')
295                         with self._create_block():
296                             for component, version in network_benchmark['versions'].iteritems():
297                                 self._put(component + ':', version)
298
299                 if self.config['ndr_run'] or self.config['pdr_run']:
300                     self._put('Measurement Parameters:')
301                     with self._create_block(False):
302                         if self.config['ndr_run']:
303                             self._put('NDR:', self.config['measurement']['NDR'])
304                         if self.config['pdr_run']:
305                             self._put('PDR:', self.config['measurement']['PDR'])
306                 self._put('Service chain:')
307                 for result in network_benchmark['service_chain'].iteritems():
308                     with self._create_block():
309                         self.__chain_summarize(*result)
310
311     def __chain_summarize(self, chain_name, chain_benchmark):
312         self._put(chain_name + ':')
313         self.__record_header_put('service_chain', chain_name)
314         with self._create_block():
315             self._put('Traffic:')
316             with self._create_block(False):
317                 self.__traffic_summarize(chain_benchmark['result'])
318
319     def __traffic_summarize(self, traffic_benchmark):
320         self._put('Profile:', traffic_benchmark['profile'])
321         self._put('Bidirectional:', traffic_benchmark['bidirectional'])
322         self._put('Flow count:', traffic_benchmark['flow_count'])
323         self._put('Service chains count:', traffic_benchmark['service_chain_count'])
324         self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
325
326         self.__record_header_put('profile', traffic_benchmark['profile'])
327         self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
328         self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
329         self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
330         self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
331         with self._create_block(False):
332             self._put()
333             if not self.config['no_traffic']:
334                 self._put('Run Summary:')
335                 self._put()
336                 with self._create_block(False):
337                     self._put_table(self.__get_summary_table(traffic_benchmark['result']))
338                     try:
339                         self._put()
340                         self._put(traffic_benchmark['result']['warning'])
341                     except KeyError:
342                         pass
343
344             for entry in traffic_benchmark['result'].iteritems():
345                 if 'warning' in entry:
346                     continue
347                 self.__chain_analysis_summarize(*entry)
348             self.__record_send()
349
350     def __chain_analysis_summarize(self, frame_size, analysis):
351         self._put()
352         self._put('L2 frame size:', frame_size)
353         if 'actual_l2frame_size' in analysis:
354             self._put('Actual l2 frame size:', analysis['actual_l2frame_size'])
355         elif self.config['ndr_run'] and 'actual_l2frame_size' in analysis['ndr']:
356             self._put('Actual l2 frame size:', analysis['ndr']['actual_l2frame_size'])
357         elif self.config['pdr_run'] and 'actual_l2frame_size' in analysis['pdr']:
358             self._put('Actual l2 frame size:', analysis['pdr']['actual_l2frame_size'])
359         if self.config['ndr_run']:
360             self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
361                       'seconds')
362             self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
363                 analysis['ndr']['time_taken_sec'])})
364         if self.config['pdr_run']:
365             self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
366                       'seconds')
367             self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
368                 analysis['pdr']['time_taken_sec'])})
369         self._put()
370
371         if not self.config['no_traffic'] and self.config['single_run']:
372             self._put('Run Config:')
373             self._put()
374             with self._create_block(False):
375                 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
376                 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
377                     self._put()
378                     self._put(analysis['run_config']['warning'])
379                 self._put()
380
381         if 'packet_path_stats' in analysis:
382             for dir in ['Forward', 'Reverse']:
383                 self._put(dir + ' Chain Packet Counters and Latency:')
384                 self._put()
385                 with self._create_block(False):
386                     self._put_table(self._get_chain_table(analysis['packet_path_stats'][dir]))
387                     self._put()
388
389     def __get_summary_table(self, traffic_result):
390         if self.config['single_run']:
391             summary_table = Table(self.single_run_header)
392         else:
393             summary_table = Table(self.ndr_pdr_header)
394
395         if self.config['ndr_run']:
396             for frame_size, analysis in traffic_result.iteritems():
397                 if frame_size == 'warning':
398                     continue
399                 summary_table.add_row([
400                     'NDR',
401                     frame_size,
402                     analysis['ndr']['rate_bps'],
403                     analysis['ndr']['rate_pps'],
404                     analysis['ndr']['stats']['overall']['drop_percentage'],
405                     analysis['ndr']['stats']['overall']['avg_delay_usec'],
406                     analysis['ndr']['stats']['overall']['min_delay_usec'],
407                     analysis['ndr']['stats']['overall']['max_delay_usec']
408                 ])
409                 self.__record_data_put(frame_size, {'ndr': {
410                     'type': 'NDR',
411                     'rate_bps': analysis['ndr']['rate_bps'],
412                     'rate_pps': analysis['ndr']['rate_pps'],
413                     'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
414                     'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
415                     'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
416                     'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
417                 }})
418         if self.config['pdr_run']:
419             for frame_size, analysis in traffic_result.iteritems():
420                 if frame_size == 'warning':
421                     continue
422                 summary_table.add_row([
423                     'PDR',
424                     frame_size,
425                     analysis['pdr']['rate_bps'],
426                     analysis['pdr']['rate_pps'],
427                     analysis['pdr']['stats']['overall']['drop_percentage'],
428                     analysis['pdr']['stats']['overall']['avg_delay_usec'],
429                     analysis['pdr']['stats']['overall']['min_delay_usec'],
430                     analysis['pdr']['stats']['overall']['max_delay_usec']
431                 ])
432                 self.__record_data_put(frame_size, {'pdr': {
433                     'type': 'PDR',
434                     'rate_bps': analysis['pdr']['rate_bps'],
435                     'rate_pps': analysis['pdr']['rate_pps'],
436                     'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
437                     'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
438                     'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
439                     'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
440                 }})
441         if self.config['single_run']:
442             for frame_size, analysis in traffic_result.iteritems():
443                 summary_table.add_row([
444                     frame_size,
445                     analysis['stats']['overall']['drop_rate_percent'],
446                     analysis['stats']['overall']['rx']['avg_delay_usec'],
447                     analysis['stats']['overall']['rx']['min_delay_usec'],
448                     analysis['stats']['overall']['rx']['max_delay_usec']
449                 ])
450                 self.__record_data_put(frame_size, {'single_run': {
451                     'type': 'single_run',
452                     'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
453                     'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
454                     'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
455                     'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
456                 }})
457         return summary_table
458
459     def __get_config_table(self, run_config, frame_size):
460         config_table = Table(self.config_header)
461         for key, name in zip(self.direction_keys, self.direction_names):
462             if key not in run_config:
463                 continue
464             config_table.add_row([
465                 name,
466                 run_config[key]['orig']['rate_bps'],
467                 run_config[key]['tx']['rate_bps'],
468                 run_config[key]['rx']['rate_bps'],
469                 int(run_config[key]['orig']['rate_pps']),
470                 int(run_config[key]['tx']['rate_pps']),
471                 int(run_config[key]['rx']['rate_pps']),
472             ])
473             self.__record_data_put(frame_size, {
474                 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
475                 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
476                 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
477                 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
478                 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
479                 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
480
481             })
482         return config_table
483
484     def _get_chain_table(self, chain_stats):
485         """Retrieve the table for a direction.
486
487         chain_stats: {
488              'interfaces': ['Port0', 'drop %'', 'vhost0', 'Port1'],
489              'chains': {
490                  0: {'packets': [2000054, '-0.023%', 1999996, 1999996],
491                      'lat_min_usec': 10,
492                      'lat_max_usec': 187,
493                      'lat_avg_usec': 45},
494                  1: {...},
495                  'total': {...}
496              }
497         }
498         """
499         chains = chain_stats['chains']
500         _annotate_chain_stats(chains)
501         header = [('Chain', Formatter.standard)] + \
502                  [(ifname, Formatter.standard) for ifname in chain_stats['interfaces']]
503         # add latency columns if available Avg, Min, Max
504         lat_keys = []
505         lat_map = {'lat_avg_usec': 'Avg lat.',
506                    'lat_min_usec': 'Min lat.',
507                    'lat_max_usec': 'Max lat.'}
508         if 'lat_avg_usec' in chains[0]:
509             lat_keys = ['lat_avg_usec', 'lat_min_usec', 'lat_max_usec']
510             for key in lat_keys:
511                 header.append((lat_map[key], Formatter.standard))
512
513         table = Table(header)
514         for chain in sorted(chains.keys()):
515             row = [chain] + chains[chain]['packets']
516             for lat_key in lat_keys:
517                 row.append('{:,} usec'.format(chains[chain][lat_key]))
518             table.add_row(row)
519         return table
520
521     def __record_header_put(self, key, value):
522         if self.sender:
523             self.record_header[key] = value
524
525     def __record_data_put(self, key, data):
526         if self.sender:
527             if key not in self.record_data:
528                 self.record_data[key] = {}
529             self.record_data[key].update(data)
530
531     def __record_send(self):
532         if self.sender:
533             self.record_header["@timestamp"] = datetime.utcnow().replace(
534                 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
535             for frame_size in self.record_data:
536                 data = self.record_header
537                 data['frame_size'] = frame_size
538                 data.update(self.record_data[frame_size])
539                 run_specific_data = {}
540                 if 'single_run' in data:
541                     run_specific_data['single_run'] = data['single_run']
542                     del data['single_run']
543                 if 'ndr' in data:
544                     run_specific_data['ndr'] = data['ndr']
545                     run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
546                     del data['ndr']
547                 if 'pdr' in data:
548                     run_specific_data['pdr'] = data['pdr']
549                     run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
550                     del data['pdr']
551                 for key in run_specific_data:
552                     data_to_send = data.copy()
553                     data_to_send.update(run_specific_data[key])
554                     self.sender.record_send(data_to_send)
555             self.__record_init()
556
557     def __record_init(self):
558         # init is called after checking for sender
559         self.record_header = {
560             "runlogdate": self.sender.runlogdate,
561             "user_label": self.config['user_label']
562         }
563         self.record_data = {}