Merge "Auto Generated INFO.yaml file"
[nfvbench.git] / nfvbench / summarizer.py
1 #!/usr/bin/env python
2 # Copyright 2016 Cisco Systems, Inc.  All rights reserved.
3 #
4 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
5 #    not use this file except in compliance with the License. You may obtain
6 #    a copy of the License at
7 #
8 #         http://www.apache.org/licenses/LICENSE-2.0
9 #
10 #    Unless required by applicable law or agreed to in writing, software
11 #    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 #    License for the specific language governing permissions and limitations
14 #    under the License.
15 #
16
17 from contextlib import contextmanager
18 from datetime import datetime
19 import math
20
21 import bitmath
22 import pytz
23 from tabulate import tabulate
24
25 from specs import ChainType
26
27
28 class Formatter(object):
29     """Collection of string formatter methods"""
30
31     @staticmethod
32     def fixed(data):
33         return data
34
35     @staticmethod
36     def int(data):
37         return '{:,}'.format(data)
38
39     @staticmethod
40     def float(decimal):
41         return lambda data: '%.{}f'.format(decimal) % (data)
42
43     @staticmethod
44     def standard(data):
45         if isinstance(data, int):
46             return Formatter.int(data)
47         elif isinstance(data, float):
48             return Formatter.float(4)(data)
49         return Formatter.fixed(data)
50
51     @staticmethod
52     def suffix(suffix_str):
53         return lambda data: Formatter.standard(data) + suffix_str
54
55     @staticmethod
56     def bits(data):
57         # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
58         # will convert it into bit format.
59         bit = 8.0 * bitmath.Bit(float(data))
60         bit = bit.best_prefix(bitmath.SI)
61         byte_to_bit_classes = {
62             'kB': bitmath.kb,
63             'MB': bitmath.Mb,
64             'GB': bitmath.Gb,
65             'TB': bitmath.Tb,
66             'PB': bitmath.Pb,
67             'EB': bitmath.Eb,
68             'ZB': bitmath.Zb,
69             'YB': bitmath.Yb,
70         }
71         bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
72         if bps.unit != 'Bit':
73             return bps.format("{value:.4f} {unit}ps")
74         return bps.format("{value:.4f} bps")
75
76     @staticmethod
77     def percentage(data):
78         if data is None:
79             return ''
80         elif math.isnan(data):
81             return '-'
82         return Formatter.suffix('%')(Formatter.float(4)(data))
83
84
85 class Table(object):
86     """ASCII readable table class"""
87
88     def __init__(self, header):
89         header_row, self.formatters = zip(*header)
90         self.data = [header_row]
91         self.columns = len(header_row)
92
93     def add_row(self, row):
94         assert self.columns == len(row)
95         formatted_row = []
96         for entry, formatter in zip(row, self.formatters):
97             formatted_row.append(formatter(entry))
98         self.data.append(formatted_row)
99
100     def get_string(self, indent=0):
101         spaces = ' ' * indent
102         table = tabulate(self.data,
103                          headers='firstrow',
104                          tablefmt='grid',
105                          stralign='center',
106                          floatfmt='.2f')
107         return table.replace('\n', '\n' + spaces)
108
109
110 class Summarizer(object):
111     """Generic summarizer class"""
112
113     indent_per_level = 2
114
115     def __init__(self):
116         self.indent_size = 0
117         self.marker_stack = [False]
118         self.str = ''
119
120     def __indent(self, marker):
121         self.indent_size += self.indent_per_level
122         self.marker_stack.append(marker)
123
124     def __unindent(self):
125         assert self.indent_size >= self.indent_per_level
126         self.indent_size -= self.indent_per_level
127         self.marker_stack.pop()
128
129     def __get_indent_string(self):
130         current_str = ' ' * self.indent_size
131         if self.marker_stack[-1]:
132             current_str = current_str[:-2] + '> '
133         return current_str
134
135     def _put(self, *args):
136         self.str += self.__get_indent_string()
137         if args and isinstance(args[-1], dict):
138             self.str += ' '.join(map(str, args[:-1])) + '\n'
139             self._put_dict(args[-1])
140         else:
141             self.str += ' '.join(map(str, args)) + '\n'
142
143     def _put_dict(self, data):
144         with self._create_block(False):
145             for key, value in data.iteritems():
146                 if isinstance(value, dict):
147                     self._put(key + ':')
148                     self._put_dict(value)
149                 else:
150                     self._put(key + ':', value)
151
152     def _put_table(self, table):
153         self.str += self.__get_indent_string()
154         self.str += table.get_string(self.indent_size) + '\n'
155
156     def __str__(self):
157         return self.str
158
159     @contextmanager
160     def _create_block(self, marker=True):
161         self.__indent(marker)
162         yield
163         self.__unindent()
164
165
166 class NFVBenchSummarizer(Summarizer):
167     """Summarize nfvbench json result"""
168
169     ndr_pdr_header = [
170         ('-', Formatter.fixed),
171         ('L2 Frame Size', Formatter.standard),
172         ('Rate (fwd+rev)', Formatter.bits),
173         ('Rate (fwd+rev)', Formatter.suffix(' pps')),
174         ('Avg Drop Rate', Formatter.suffix('%')),
175         ('Avg Latency (usec)', Formatter.standard),
176         ('Min Latency (usec)', Formatter.standard),
177         ('Max Latency (usec)', Formatter.standard)
178     ]
179
180     single_run_header = [
181         ('L2 Frame Size', Formatter.standard),
182         ('Drop Rate', Formatter.suffix('%')),
183         ('Avg Latency (usec)', Formatter.standard),
184         ('Min Latency (usec)', Formatter.standard),
185         ('Max Latency (usec)', Formatter.standard)
186     ]
187
188     config_header = [
189         ('Direction', Formatter.standard),
190         ('Requested TX Rate (bps)', Formatter.bits),
191         ('Actual TX Rate (bps)', Formatter.bits),
192         ('RX Rate (bps)', Formatter.bits),
193         ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
194         ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
195         ('RX Rate (pps)', Formatter.suffix(' pps'))
196     ]
197
198     chain_analysis_header = [
199         ('Interface', Formatter.standard),
200         ('Device', Formatter.standard),
201         ('Packets (fwd)', Formatter.standard),
202         ('Drops (fwd)', Formatter.standard),
203         ('Drop% (fwd)', Formatter.percentage),
204         ('Packets (rev)', Formatter.standard),
205         ('Drops (rev)', Formatter.standard),
206         ('Drop% (rev)', Formatter.percentage)
207     ]
208
209     direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
210     direction_names = ['Forward', 'Reverse', 'Total']
211
212     def __init__(self, result, sender):
213         Summarizer.__init__(self)
214         self.result = result
215         self.config = self.result['config']
216         self.record_header = None
217         self.record_data = None
218         self.sender = sender
219         # if sender is available initialize record
220         if self.sender:
221             self.__record_init()
222         self.__summarize()
223
224     def __get_openstack_spec(self, property):
225         try:
226             return self.result['openstack_spec'][property]
227         except KeyError:
228             return ''
229
230     def __summarize(self):
231         self._put()
232         self._put('========== NFVBench Summary ==========')
233         self._put('Date:', self.result['date'])
234         self._put('NFVBench version', self.result['nfvbench_version'])
235         self._put('Openstack Neutron:', {
236             'vSwitch': self.__get_openstack_spec('vswitch'),
237             'Encapsulation': self.__get_openstack_spec('encaps')
238         })
239         self.__record_header_put('version', self.result['nfvbench_version'])
240         self.__record_header_put('vSwitch', self.__get_openstack_spec('vswitch'))
241         self.__record_header_put('Encapsulation', self.__get_openstack_spec('encaps'))
242         self._put('Benchmarks:')
243         with self._create_block():
244             self._put('Networks:')
245             with self._create_block():
246                 network_benchmark = self.result['benchmarks']['network']
247
248                 self._put('Components:')
249                 with self._create_block():
250                     self._put('TOR:')
251                     with self._create_block(False):
252                         self._put('Type:', self.config['tor']['type'])
253                     self._put('Traffic Generator:')
254                     with self._create_block(False):
255                         self._put('Profile:', self.config['generator_config']['name'])
256                         self._put('Tool:', self.config['generator_config']['tool'])
257                     if network_benchmark['versions']:
258                         self._put('Versions:')
259                         with self._create_block():
260                             for component, version in network_benchmark['versions'].iteritems():
261                                 self._put(component + ':', version)
262
263                 if self.config['ndr_run'] or self.config['pdr_run']:
264                     self._put('Measurement Parameters:')
265                     with self._create_block(False):
266                         if self.config['ndr_run']:
267                             self._put('NDR:', self.config['measurement']['NDR'])
268                         if self.config['pdr_run']:
269                             self._put('PDR:', self.config['measurement']['PDR'])
270                 self._put('Service chain:')
271                 for result in network_benchmark['service_chain'].iteritems():
272                     with self._create_block():
273                         self.__chain_summarize(*result)
274
275     def __chain_summarize(self, chain_name, chain_benchmark):
276         self._put(chain_name + ':')
277         if chain_name == ChainType.PVVP:
278             self._put('Mode:', chain_benchmark.get('mode'))
279             chain_name += "-" + chain_benchmark.get('mode')
280         self.__record_header_put('service_chain', chain_name)
281         with self._create_block():
282             self._put('Traffic:')
283             with self._create_block(False):
284                 self.__traffic_summarize(chain_benchmark['result'])
285
286     def __traffic_summarize(self, traffic_benchmark):
287         self._put('Profile:', traffic_benchmark['profile'])
288         self._put('Bidirectional:', traffic_benchmark['bidirectional'])
289         self._put('Flow count:', traffic_benchmark['flow_count'])
290         self._put('Service chains count:', traffic_benchmark['service_chain_count'])
291         self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
292
293         self.__record_header_put('profile', traffic_benchmark['profile'])
294         self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
295         self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
296         self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
297         self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
298         with self._create_block(False):
299             self._put()
300             if not self.config['no_traffic']:
301                 self._put('Run Summary:')
302                 self._put()
303                 with self._create_block(False):
304                     self._put_table(self.__get_summary_table(traffic_benchmark['result']))
305                     try:
306                         self._put()
307                         self._put(traffic_benchmark['result']['warning'])
308                     except KeyError:
309                         pass
310
311             for entry in traffic_benchmark['result'].iteritems():
312                 if 'warning' in entry:
313                     continue
314                 self.__chain_analysis_summarize(*entry)
315             self.__record_send()
316
317     def __chain_analysis_summarize(self, frame_size, analysis):
318         self._put()
319         self._put('L2 frame size:', frame_size)
320         if 'actual_l2frame_size' in analysis:
321             self._put('Actual l2 frame size:', analysis['actual_l2frame_size'])
322         elif self.config['ndr_run'] and 'actual_l2frame_size' in analysis['ndr']:
323             self._put('Actual l2 frame size:', analysis['ndr']['actual_l2frame_size'])
324         elif self.config['pdr_run'] and 'actual_l2frame_size' in analysis['pdr']:
325             self._put('Actual l2 frame size:', analysis['pdr']['actual_l2frame_size'])
326         if 'analysis_duration_sec' in analysis:
327             self._put('Chain analysis duration:',
328                       Formatter.float(3)(analysis['analysis_duration_sec']), 'seconds')
329             self.__record_data_put(frame_size, {'chain_analysis_duration': Formatter.float(3)(
330                 analysis['analysis_duration_sec'])})
331         if self.config['ndr_run']:
332             self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
333                       'seconds')
334             self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
335                 analysis['ndr']['time_taken_sec'])})
336         if self.config['pdr_run']:
337             self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
338                       'seconds')
339             self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
340                 analysis['pdr']['time_taken_sec'])})
341         self._put()
342
343         if not self.config['no_traffic'] and self.config['single_run']:
344             self._put('Run Config:')
345             self._put()
346             with self._create_block(False):
347                 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
348                 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
349                     self._put()
350                     self._put(analysis['run_config']['warning'])
351                 self._put()
352
353         if 'packet_analysis' in analysis:
354             self._put('Chain Analysis:')
355             self._put()
356             with self._create_block(False):
357                 self._put_table(self.__get_chain_analysis_table(analysis['packet_analysis']))
358                 self._put()
359
360     def __get_summary_table(self, traffic_result):
361         if self.config['single_run']:
362             summary_table = Table(self.single_run_header)
363         else:
364             summary_table = Table(self.ndr_pdr_header)
365
366         if self.config['ndr_run']:
367             for frame_size, analysis in traffic_result.iteritems():
368                 if frame_size == 'warning':
369                     continue
370                 summary_table.add_row([
371                     'NDR',
372                     frame_size,
373                     analysis['ndr']['rate_bps'],
374                     analysis['ndr']['rate_pps'],
375                     analysis['ndr']['stats']['overall']['drop_percentage'],
376                     analysis['ndr']['stats']['overall']['avg_delay_usec'],
377                     analysis['ndr']['stats']['overall']['min_delay_usec'],
378                     analysis['ndr']['stats']['overall']['max_delay_usec']
379                 ])
380                 self.__record_data_put(frame_size, {'ndr': {
381                     'type': 'NDR',
382                     'rate_bps': analysis['ndr']['rate_bps'],
383                     'rate_pps': analysis['ndr']['rate_pps'],
384                     'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
385                     'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
386                     'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
387                     'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
388                 }})
389         if self.config['pdr_run']:
390             for frame_size, analysis in traffic_result.iteritems():
391                 if frame_size == 'warning':
392                     continue
393                 summary_table.add_row([
394                     'PDR',
395                     frame_size,
396                     analysis['pdr']['rate_bps'],
397                     analysis['pdr']['rate_pps'],
398                     analysis['pdr']['stats']['overall']['drop_percentage'],
399                     analysis['pdr']['stats']['overall']['avg_delay_usec'],
400                     analysis['pdr']['stats']['overall']['min_delay_usec'],
401                     analysis['pdr']['stats']['overall']['max_delay_usec']
402                 ])
403                 self.__record_data_put(frame_size, {'pdr': {
404                     'type': 'PDR',
405                     'rate_bps': analysis['pdr']['rate_bps'],
406                     'rate_pps': analysis['pdr']['rate_pps'],
407                     'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
408                     'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
409                     'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
410                     'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
411                 }})
412         if self.config['single_run']:
413             for frame_size, analysis in traffic_result.iteritems():
414                 summary_table.add_row([
415                     frame_size,
416                     analysis['stats']['overall']['drop_rate_percent'],
417                     analysis['stats']['overall']['rx']['avg_delay_usec'],
418                     analysis['stats']['overall']['rx']['min_delay_usec'],
419                     analysis['stats']['overall']['rx']['max_delay_usec']
420                 ])
421                 self.__record_data_put(frame_size, {'single_run': {
422                     'type': 'single_run',
423                     'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
424                     'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
425                     'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
426                     'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
427                 }})
428         return summary_table
429
430     def __get_config_table(self, run_config, frame_size):
431         config_table = Table(self.config_header)
432         for key, name in zip(self.direction_keys, self.direction_names):
433             if key not in run_config:
434                 continue
435             config_table.add_row([
436                 name,
437                 run_config[key]['orig']['rate_bps'],
438                 run_config[key]['tx']['rate_bps'],
439                 run_config[key]['rx']['rate_bps'],
440                 int(run_config[key]['orig']['rate_pps']),
441                 int(run_config[key]['tx']['rate_pps']),
442                 int(run_config[key]['rx']['rate_pps']),
443             ])
444             self.__record_data_put(frame_size, {
445                 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
446                 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
447                 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
448                 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
449                 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
450                 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
451
452             })
453         return config_table
454
455     def __get_chain_analysis_table(self, packet_analysis):
456         chain_analysis_table = Table(self.chain_analysis_header)
457         forward_analysis = packet_analysis['direction-forward']
458         reverse_analysis = packet_analysis['direction-reverse']
459         reverse_analysis.reverse()
460         for fwd, rev in zip(forward_analysis, reverse_analysis):
461             chain_analysis_table.add_row([
462                 fwd['interface'],
463                 fwd['device'],
464                 fwd['packet_count'],
465                 fwd.get('packet_drop_count', None),
466                 fwd.get('packet_drop_percentage', None),
467                 rev['packet_count'],
468                 rev.get('packet_drop_count', None),
469                 rev.get('packet_drop_percentage', None),
470             ])
471         return chain_analysis_table
472
473     def __record_header_put(self, key, value):
474         if self.sender:
475             self.record_header[key] = value
476
477     def __record_data_put(self, key, data):
478         if self.sender:
479             if key not in self.record_data:
480                 self.record_data[key] = {}
481             self.record_data[key].update(data)
482
483     def __record_send(self):
484         if self.sender:
485             self.record_header["@timestamp"] = datetime.utcnow().replace(
486                 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
487             for frame_size in self.record_data:
488                 data = self.record_header
489                 data['frame_size'] = frame_size
490                 data.update(self.record_data[frame_size])
491                 run_specific_data = {}
492                 if 'single_run' in data:
493                     run_specific_data['single_run'] = data['single_run']
494                     del data['single_run']
495                 if 'ndr' in data:
496                     run_specific_data['ndr'] = data['ndr']
497                     run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
498                     del data['ndr']
499                 if 'pdr' in data:
500                     run_specific_data['pdr'] = data['pdr']
501                     run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
502                     del data['pdr']
503                 for key in run_specific_data:
504                     data_to_send = data.copy()
505                     data_to_send.update(run_specific_data[key])
506                     self.sender.record_send(data_to_send)
507             self.__record_init()
508
509     def __record_init(self):
510         # init is called after checking for sender
511         self.record_header = {
512             "runlogdate": self.sender.runlogdate,
513             "user_label": self.config['user_label']
514         }
515         self.record_data = {}