NFVBENCH-108 Adjust exact frame size to take into account FCS field
[nfvbench.git] / nfvbench / summarizer.py
1 #!/usr/bin/env python
2 # Copyright 2016 Cisco Systems, Inc.  All rights reserved.
3 #
4 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
5 #    not use this file except in compliance with the License. You may obtain
6 #    a copy of the License at
7 #
8 #         http://www.apache.org/licenses/LICENSE-2.0
9 #
10 #    Unless required by applicable law or agreed to in writing, software
11 #    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 #    License for the specific language governing permissions and limitations
14 #    under the License.
15 #
16
17 from contextlib import contextmanager
18 from datetime import datetime
19 import math
20
21 import bitmath
22 import pytz
23 from tabulate import tabulate
24
25 def _annotate_chain_stats(chain_stats, nodrop_marker='=>'):
26     """Transform a plain chain stats into an annotated one.
27
28     Example:
29     {
30          0: {'packets': [2000054, 1999996, 1999996, 1999996],
31              'lat_min_usec': 10,
32              'lat_max_usec': 187,
33              'lat_avg_usec': 45},
34          1: {...},
35          'total': {...}
36     }
37     should become:
38     {
39          0: {'packets': [2000054, -58 (-0.034%), '=>', 1999996],
40              'lat_min_usec': 10,
41              'lat_max_usec': 187,
42              'lat_avg_usec': 45},
43          1: {...},
44          'total': {...}
45     }
46
47     In the case of shared net, some columns in packets array can have ''
48     """
49     for stats in chain_stats.values():
50         packets = stats['packets']
51         count = len(packets)
52         if count > 1:
53             # keep the first counter
54             annotated_packets = [packets[0]]
55             # modify all remaining counters
56             prev_count = packets[0]
57             for index in range(1, count):
58                 cur_count = packets[index]
59                 if cur_count == '':
60                     # an empty string indicates an unknown counter for a shared interface
61                     # do not annotate those
62                     annotated_value = ''
63                 else:
64                     drop = cur_count - prev_count
65                     if drop:
66                         dr = (drop * 100.0) / prev_count if prev_count else 0
67                         annotated_value = '{:+,} ({:+.4f}%)'.format(drop, dr)
68                     else:
69                         # no drop
70                         # if last column we display the value
71                         annotated_value = cur_count if index == count - 1 else nodrop_marker
72                     prev_count = cur_count
73                 annotated_packets.append(annotated_value)
74
75             stats['packets'] = annotated_packets
76
77 class Formatter(object):
78     """Collection of string formatter methods."""
79
80     @staticmethod
81     def fixed(data):
82         return data
83
84     @staticmethod
85     def int(data):
86         return '{:,}'.format(data)
87
88     @staticmethod
89     def float(decimal):
90         return lambda data: '%.{}f'.format(decimal) % (data)
91
92     @staticmethod
93     def standard(data):
94         if isinstance(data, int):
95             return Formatter.int(data)
96         elif isinstance(data, float):
97             return Formatter.float(4)(data)
98         return Formatter.fixed(data)
99
100     @staticmethod
101     def suffix(suffix_str):
102         return lambda data: Formatter.standard(data) + suffix_str
103
104     @staticmethod
105     def bits(data):
106         # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
107         # will convert it into bit format.
108         bit = 8.0 * bitmath.Bit(float(data))
109         bit = bit.best_prefix(bitmath.SI)
110         byte_to_bit_classes = {
111             'kB': bitmath.kb,
112             'MB': bitmath.Mb,
113             'GB': bitmath.Gb,
114             'TB': bitmath.Tb,
115             'PB': bitmath.Pb,
116             'EB': bitmath.Eb,
117             'ZB': bitmath.Zb,
118             'YB': bitmath.Yb,
119         }
120         bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
121         if bps.unit != 'Bit':
122             return bps.format("{value:.4f} {unit}ps")
123         return bps.format("{value:.4f} bps")
124
125     @staticmethod
126     def percentage(data):
127         if data is None:
128             return ''
129         elif math.isnan(data):
130             return '-'
131         return Formatter.suffix('%')(Formatter.float(4)(data))
132
133
134 class Table(object):
135     """ASCII readable table class."""
136
137     def __init__(self, header):
138         header_row, self.formatters = zip(*header)
139         self.data = [header_row]
140         self.columns = len(header_row)
141
142     def add_row(self, row):
143         assert self.columns == len(row)
144         formatted_row = []
145         for entry, formatter in zip(row, self.formatters):
146             formatted_row.append(formatter(entry))
147         self.data.append(formatted_row)
148
149     def get_string(self, indent=0):
150         spaces = ' ' * indent
151         table = tabulate(self.data,
152                          headers='firstrow',
153                          tablefmt='grid',
154                          stralign='center',
155                          floatfmt='.2f')
156         return table.replace('\n', '\n' + spaces)
157
158
159 class Summarizer(object):
160     """Generic summarizer class."""
161
162     indent_per_level = 2
163
164     def __init__(self):
165         self.indent_size = 0
166         self.marker_stack = [False]
167         self.str = ''
168
169     def __indent(self, marker):
170         self.indent_size += self.indent_per_level
171         self.marker_stack.append(marker)
172
173     def __unindent(self):
174         assert self.indent_size >= self.indent_per_level
175         self.indent_size -= self.indent_per_level
176         self.marker_stack.pop()
177
178     def __get_indent_string(self):
179         current_str = ' ' * self.indent_size
180         if self.marker_stack[-1]:
181             current_str = current_str[:-2] + '> '
182         return current_str
183
184     def _put(self, *args):
185         self.str += self.__get_indent_string()
186         if args and isinstance(args[-1], dict):
187             self.str += ' '.join(map(str, args[:-1])) + '\n'
188             self._put_dict(args[-1])
189         else:
190             self.str += ' '.join(map(str, args)) + '\n'
191
192     def _put_dict(self, data):
193         with self._create_block(False):
194             for key, value in data.iteritems():
195                 if isinstance(value, dict):
196                     self._put(key + ':')
197                     self._put_dict(value)
198                 else:
199                     self._put(key + ':', value)
200
201     def _put_table(self, table):
202         self.str += self.__get_indent_string()
203         self.str += table.get_string(self.indent_size) + '\n'
204
205     def __str__(self):
206         return self.str
207
208     @contextmanager
209     def _create_block(self, marker=True):
210         self.__indent(marker)
211         yield
212         self.__unindent()
213
214
215 class NFVBenchSummarizer(Summarizer):
216     """Summarize nfvbench json result."""
217
218     ndr_pdr_header = [
219         ('-', Formatter.fixed),
220         ('L2 Frame Size', Formatter.standard),
221         ('Rate (fwd+rev)', Formatter.bits),
222         ('Rate (fwd+rev)', Formatter.suffix(' pps')),
223         ('Avg Drop Rate', Formatter.suffix('%')),
224         ('Avg Latency (usec)', Formatter.standard),
225         ('Min Latency (usec)', Formatter.standard),
226         ('Max Latency (usec)', Formatter.standard)
227     ]
228
229     single_run_header = [
230         ('L2 Frame Size', Formatter.standard),
231         ('Drop Rate', Formatter.suffix('%')),
232         ('Avg Latency (usec)', Formatter.standard),
233         ('Min Latency (usec)', Formatter.standard),
234         ('Max Latency (usec)', Formatter.standard)
235     ]
236
237     config_header = [
238         ('Direction', Formatter.standard),
239         ('Requested TX Rate (bps)', Formatter.bits),
240         ('Actual TX Rate (bps)', Formatter.bits),
241         ('RX Rate (bps)', Formatter.bits),
242         ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
243         ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
244         ('RX Rate (pps)', Formatter.suffix(' pps'))
245     ]
246
247     direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
248     direction_names = ['Forward', 'Reverse', 'Total']
249
250     def __init__(self, result, sender):
251         """Create a summarizer instance."""
252         Summarizer.__init__(self)
253         self.result = result
254         self.config = self.result['config']
255         self.record_header = None
256         self.record_data = None
257         self.sender = sender
258         # if sender is available initialize record
259         if self.sender:
260             self.__record_init()
261         self.__summarize()
262
263     def __get_openstack_spec(self, property):
264         try:
265             return self.result['openstack_spec'][property]
266         except KeyError:
267             return ''
268
269     def __summarize(self):
270         self._put()
271         self._put('========== NFVBench Summary ==========')
272         self._put('Date:', self.result['date'])
273         self._put('NFVBench version', self.result['nfvbench_version'])
274         self._put('Openstack Neutron:', {
275             'vSwitch': self.__get_openstack_spec('vswitch'),
276             'Encapsulation': self.__get_openstack_spec('encaps')
277         })
278         self.__record_header_put('version', self.result['nfvbench_version'])
279         self.__record_header_put('vSwitch', self.__get_openstack_spec('vswitch'))
280         self.__record_header_put('Encapsulation', self.__get_openstack_spec('encaps'))
281         self._put('Benchmarks:')
282         with self._create_block():
283             self._put('Networks:')
284             with self._create_block():
285                 network_benchmark = self.result['benchmarks']['network']
286
287                 self._put('Components:')
288                 with self._create_block():
289                     self._put('Traffic Generator:')
290                     with self._create_block(False):
291                         self._put('Profile:', self.config['tg-name'])
292                         self._put('Tool:', self.config['tg-tool'])
293                     if network_benchmark['versions']:
294                         self._put('Versions:')
295                         with self._create_block():
296                             for component, version in network_benchmark['versions'].iteritems():
297                                 self._put(component + ':', version)
298
299                 if self.config['ndr_run'] or self.config['pdr_run']:
300                     self._put('Measurement Parameters:')
301                     with self._create_block(False):
302                         if self.config['ndr_run']:
303                             self._put('NDR:', self.config['measurement']['NDR'])
304                         if self.config['pdr_run']:
305                             self._put('PDR:', self.config['measurement']['PDR'])
306                 self._put('Service chain:')
307                 for result in network_benchmark['service_chain'].iteritems():
308                     with self._create_block():
309                         self.__chain_summarize(*result)
310
311     def __chain_summarize(self, chain_name, chain_benchmark):
312         self._put(chain_name + ':')
313         self.__record_header_put('service_chain', chain_name)
314         with self._create_block():
315             self._put('Traffic:')
316             with self._create_block(False):
317                 self.__traffic_summarize(chain_benchmark['result'])
318
319     def __traffic_summarize(self, traffic_benchmark):
320         self._put('Profile:', traffic_benchmark['profile'])
321         self._put('Bidirectional:', traffic_benchmark['bidirectional'])
322         self._put('Flow count:', traffic_benchmark['flow_count'])
323         self._put('Service chains count:', traffic_benchmark['service_chain_count'])
324         self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
325
326         self.__record_header_put('profile', traffic_benchmark['profile'])
327         self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
328         self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
329         self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
330         self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
331         with self._create_block(False):
332             self._put()
333             if not self.config['no_traffic']:
334                 self._put('Run Summary:')
335                 self._put()
336                 with self._create_block(False):
337                     self._put_table(self.__get_summary_table(traffic_benchmark['result']))
338                     try:
339                         self._put()
340                         self._put(traffic_benchmark['result']['warning'])
341                     except KeyError:
342                         pass
343
344             for entry in traffic_benchmark['result'].iteritems():
345                 if 'warning' in entry:
346                     continue
347                 self.__chain_analysis_summarize(*entry)
348             self.__record_send()
349
350     def __chain_analysis_summarize(self, frame_size, analysis):
351         self._put()
352         self._put('L2 frame size:', frame_size)
353         if self.config['ndr_run']:
354             self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
355                       'seconds')
356             self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
357                 analysis['ndr']['time_taken_sec'])})
358         if self.config['pdr_run']:
359             self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
360                       'seconds')
361             self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
362                 analysis['pdr']['time_taken_sec'])})
363         self._put()
364
365         if not self.config['no_traffic'] and self.config['single_run']:
366             self._put('Run Config:')
367             self._put()
368             with self._create_block(False):
369                 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
370                 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
371                     self._put()
372                     self._put(analysis['run_config']['warning'])
373                 self._put()
374
375         if 'packet_path_stats' in analysis:
376             for dir in ['Forward', 'Reverse']:
377                 self._put(dir + ' Chain Packet Counters and Latency:')
378                 self._put()
379                 with self._create_block(False):
380                     self._put_table(self._get_chain_table(analysis['packet_path_stats'][dir]))
381                     self._put()
382
383     def __get_summary_table(self, traffic_result):
384         if self.config['single_run']:
385             summary_table = Table(self.single_run_header)
386         else:
387             summary_table = Table(self.ndr_pdr_header)
388
389         if self.config['ndr_run']:
390             for frame_size, analysis in traffic_result.iteritems():
391                 if frame_size == 'warning':
392                     continue
393                 summary_table.add_row([
394                     'NDR',
395                     frame_size,
396                     analysis['ndr']['rate_bps'],
397                     analysis['ndr']['rate_pps'],
398                     analysis['ndr']['stats']['overall']['drop_percentage'],
399                     analysis['ndr']['stats']['overall']['avg_delay_usec'],
400                     analysis['ndr']['stats']['overall']['min_delay_usec'],
401                     analysis['ndr']['stats']['overall']['max_delay_usec']
402                 ])
403                 self.__record_data_put(frame_size, {'ndr': {
404                     'type': 'NDR',
405                     'rate_bps': analysis['ndr']['rate_bps'],
406                     'rate_pps': analysis['ndr']['rate_pps'],
407                     'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
408                     'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
409                     'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
410                     'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
411                 }})
412         if self.config['pdr_run']:
413             for frame_size, analysis in traffic_result.iteritems():
414                 if frame_size == 'warning':
415                     continue
416                 summary_table.add_row([
417                     'PDR',
418                     frame_size,
419                     analysis['pdr']['rate_bps'],
420                     analysis['pdr']['rate_pps'],
421                     analysis['pdr']['stats']['overall']['drop_percentage'],
422                     analysis['pdr']['stats']['overall']['avg_delay_usec'],
423                     analysis['pdr']['stats']['overall']['min_delay_usec'],
424                     analysis['pdr']['stats']['overall']['max_delay_usec']
425                 ])
426                 self.__record_data_put(frame_size, {'pdr': {
427                     'type': 'PDR',
428                     'rate_bps': analysis['pdr']['rate_bps'],
429                     'rate_pps': analysis['pdr']['rate_pps'],
430                     'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
431                     'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
432                     'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
433                     'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
434                 }})
435         if self.config['single_run']:
436             for frame_size, analysis in traffic_result.iteritems():
437                 summary_table.add_row([
438                     frame_size,
439                     analysis['stats']['overall']['drop_rate_percent'],
440                     analysis['stats']['overall']['rx']['avg_delay_usec'],
441                     analysis['stats']['overall']['rx']['min_delay_usec'],
442                     analysis['stats']['overall']['rx']['max_delay_usec']
443                 ])
444                 self.__record_data_put(frame_size, {'single_run': {
445                     'type': 'single_run',
446                     'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
447                     'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
448                     'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
449                     'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
450                 }})
451         return summary_table
452
453     def __get_config_table(self, run_config, frame_size):
454         config_table = Table(self.config_header)
455         for key, name in zip(self.direction_keys, self.direction_names):
456             if key not in run_config:
457                 continue
458             config_table.add_row([
459                 name,
460                 run_config[key]['orig']['rate_bps'],
461                 run_config[key]['tx']['rate_bps'],
462                 run_config[key]['rx']['rate_bps'],
463                 int(run_config[key]['orig']['rate_pps']),
464                 int(run_config[key]['tx']['rate_pps']),
465                 int(run_config[key]['rx']['rate_pps']),
466             ])
467             self.__record_data_put(frame_size, {
468                 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
469                 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
470                 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
471                 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
472                 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
473                 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
474
475             })
476         return config_table
477
478     def _get_chain_table(self, chain_stats):
479         """Retrieve the table for a direction.
480
481         chain_stats: {
482              'interfaces': ['Port0', 'drop %'', 'vhost0', 'Port1'],
483              'chains': {
484                  0: {'packets': [2000054, '-0.023%', 1999996, 1999996],
485                      'lat_min_usec': 10,
486                      'lat_max_usec': 187,
487                      'lat_avg_usec': 45},
488                  1: {...},
489                  'total': {...}
490              }
491         }
492         """
493         chains = chain_stats['chains']
494         _annotate_chain_stats(chains)
495         header = [('Chain', Formatter.standard)] + \
496                  [(ifname, Formatter.standard) for ifname in chain_stats['interfaces']]
497         # add latency columns if available Avg, Min, Max
498         lat_keys = []
499         lat_map = {'lat_avg_usec': 'Avg lat.',
500                    'lat_min_usec': 'Min lat.',
501                    'lat_max_usec': 'Max lat.'}
502         if 'lat_avg_usec' in chains[0]:
503             lat_keys = ['lat_avg_usec', 'lat_min_usec', 'lat_max_usec']
504             for key in lat_keys:
505                 header.append((lat_map[key], Formatter.standard))
506
507         table = Table(header)
508         for chain in sorted(chains.keys()):
509             row = [chain] + chains[chain]['packets']
510             for lat_key in lat_keys:
511                 row.append('{:,} usec'.format(chains[chain][lat_key]))
512             table.add_row(row)
513         return table
514
515     def __record_header_put(self, key, value):
516         if self.sender:
517             self.record_header[key] = value
518
519     def __record_data_put(self, key, data):
520         if self.sender:
521             if key not in self.record_data:
522                 self.record_data[key] = {}
523             self.record_data[key].update(data)
524
525     def __record_send(self):
526         if self.sender:
527             self.record_header["@timestamp"] = datetime.utcnow().replace(
528                 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
529             for frame_size in self.record_data:
530                 data = self.record_header
531                 data['frame_size'] = frame_size
532                 data.update(self.record_data[frame_size])
533                 run_specific_data = {}
534                 if 'single_run' in data:
535                     run_specific_data['single_run'] = data['single_run']
536                     del data['single_run']
537                 if 'ndr' in data:
538                     run_specific_data['ndr'] = data['ndr']
539                     run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
540                     del data['ndr']
541                 if 'pdr' in data:
542                     run_specific_data['pdr'] = data['pdr']
543                     run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
544                     del data['pdr']
545                 for key in run_specific_data:
546                     data_to_send = data.copy()
547                     data_to_send.update(run_specific_data[key])
548                     self.sender.record_send(data_to_send)
549             self.__record_init()
550
551     def __record_init(self):
552         # init is called after checking for sender
553         self.record_header = {
554             "runlogdate": self.sender.runlogdate,
555             "user_label": self.config['user_label']
556         }
557         self.record_data = {}