NFVBENCH-40 Add pylint to tox
[nfvbench.git] / nfvbench / summarizer.py
1 #!/usr/bin/env python
2 # Copyright 2016 Cisco Systems, Inc.  All rights reserved.
3 #
4 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
5 #    not use this file except in compliance with the License. You may obtain
6 #    a copy of the License at
7 #
8 #         http://www.apache.org/licenses/LICENSE-2.0
9 #
10 #    Unless required by applicable law or agreed to in writing, software
11 #    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 #    License for the specific language governing permissions and limitations
14 #    under the License.
15 #
16
17 from contextlib import contextmanager
18 from datetime import datetime
19 import math
20
21 import bitmath
22 import pytz
23 from specs import ChainType
24 from tabulate import tabulate
25
26
27 class Formatter(object):
28     """Collection of string formatter methods"""
29
30     @staticmethod
31     def fixed(data):
32         return data
33
34     @staticmethod
35     def int(data):
36         return '{:,}'.format(data)
37
38     @staticmethod
39     def float(decimal):
40         return lambda data: '%.{}f'.format(decimal) % (data)
41
42     @staticmethod
43     def standard(data):
44         if isinstance(data, int):
45             return Formatter.int(data)
46         elif isinstance(data, float):
47             return Formatter.float(4)(data)
48         return Formatter.fixed(data)
49
50     @staticmethod
51     def suffix(suffix_str):
52         return lambda data: Formatter.standard(data) + suffix_str
53
54     @staticmethod
55     def bits(data):
56         # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
57         # will convert it into bit format.
58         bit = 8.0 * bitmath.Bit(float(data))
59         bit = bit.best_prefix(bitmath.SI)
60         byte_to_bit_classes = {
61             'kB': bitmath.kb,
62             'MB': bitmath.Mb,
63             'GB': bitmath.Gb,
64             'TB': bitmath.Tb,
65             'PB': bitmath.Pb,
66             'EB': bitmath.Eb,
67             'ZB': bitmath.Zb,
68             'YB': bitmath.Yb,
69         }
70         bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
71         if bps.unit != 'Bit':
72             return bps.format("{value:.4f} {unit}ps")
73         return bps.format("{value:.4f} bps")
74
75     @staticmethod
76     def percentage(data):
77         if data is None:
78             return ''
79         elif math.isnan(data):
80             return '-'
81         return Formatter.suffix('%')(Formatter.float(4)(data))
82
83
84 class Table(object):
85     """ASCII readable table class"""
86
87     def __init__(self, header):
88         header_row, self.formatters = zip(*header)
89         self.data = [header_row]
90         self.columns = len(header_row)
91
92     def add_row(self, row):
93         assert self.columns == len(row)
94         formatted_row = []
95         for entry, formatter in zip(row, self.formatters):
96             formatted_row.append(formatter(entry))
97         self.data.append(formatted_row)
98
99     def get_string(self, indent=0):
100         spaces = ' ' * indent
101         table = tabulate(self.data,
102                          headers='firstrow',
103                          tablefmt='grid',
104                          stralign='center',
105                          floatfmt='.2f')
106         return table.replace('\n', '\n' + spaces)
107
108
109 class Summarizer(object):
110     """Generic summarizer class"""
111
112     indent_per_level = 2
113
114     def __init__(self):
115         self.indent_size = 0
116         self.marker_stack = [False]
117         self.str = ''
118
119     def __indent(self, marker):
120         self.indent_size += self.indent_per_level
121         self.marker_stack.append(marker)
122
123     def __unindent(self):
124         assert self.indent_size >= self.indent_per_level
125         self.indent_size -= self.indent_per_level
126         self.marker_stack.pop()
127
128     def __get_indent_string(self):
129         current_str = ' ' * self.indent_size
130         if self.marker_stack[-1]:
131             current_str = current_str[:-2] + '> '
132         return current_str
133
134     def _put(self, *args):
135         self.str += self.__get_indent_string()
136         if args and isinstance(args[-1], dict):
137             self.str += ' '.join(map(str, args[:-1])) + '\n'
138             self._put_dict(args[-1])
139         else:
140             self.str += ' '.join(map(str, args)) + '\n'
141
142     def _put_dict(self, data):
143         with self._create_block(False):
144             for key, value in data.iteritems():
145                 if isinstance(value, dict):
146                     self._put(key + ':')
147                     self._put_dict(value)
148                 else:
149                     self._put(key + ':', value)
150
151     def _put_table(self, table):
152         self.str += self.__get_indent_string()
153         self.str += table.get_string(self.indent_size) + '\n'
154
155     def __str__(self):
156         return self.str
157
158     @contextmanager
159     def _create_block(self, marker=True):
160         self.__indent(marker)
161         yield
162         self.__unindent()
163
164
165 class NFVBenchSummarizer(Summarizer):
166     """Summarize nfvbench json result"""
167
168     ndr_pdr_header = [
169         ('-', Formatter.fixed),
170         ('L2 Frame Size', Formatter.standard),
171         ('Rate (fwd+rev)', Formatter.bits),
172         ('Rate (fwd+rev)', Formatter.suffix(' pps')),
173         ('Avg Drop Rate', Formatter.suffix('%')),
174         ('Avg Latency (usec)', Formatter.standard),
175         ('Min Latency (usec)', Formatter.standard),
176         ('Max Latency (usec)', Formatter.standard)
177     ]
178
179     single_run_header = [
180         ('L2 Frame Size', Formatter.standard),
181         ('Drop Rate', Formatter.suffix('%')),
182         ('Avg Latency (usec)', Formatter.standard),
183         ('Min Latency (usec)', Formatter.standard),
184         ('Max Latency (usec)', Formatter.standard)
185     ]
186
187     config_header = [
188         ('Direction', Formatter.standard),
189         ('Requested TX Rate (bps)', Formatter.bits),
190         ('Actual TX Rate (bps)', Formatter.bits),
191         ('RX Rate (bps)', Formatter.bits),
192         ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
193         ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
194         ('RX Rate (pps)', Formatter.suffix(' pps'))
195     ]
196
197     chain_analysis_header = [
198         ('Interface', Formatter.standard),
199         ('Device', Formatter.standard),
200         ('Packets (fwd)', Formatter.standard),
201         ('Drops (fwd)', Formatter.standard),
202         ('Drop% (fwd)', Formatter.percentage),
203         ('Packets (rev)', Formatter.standard),
204         ('Drops (rev)', Formatter.standard),
205         ('Drop% (rev)', Formatter.percentage)
206     ]
207
208     direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
209     direction_names = ['Forward', 'Reverse', 'Total']
210
211     def __init__(self, result, sender):
212         Summarizer.__init__(self)
213         self.result = result
214         self.config = self.result['config']
215         self.record_header = None
216         self.record_data = None
217         self.sender = sender
218         # if sender is available initialize record
219         if self.sender:
220             self.__record_init()
221         self.__summarize()
222
223     def __summarize(self):
224         self._put()
225         self._put('========== NFVBench Summary ==========')
226         self._put('Date:', self.result['date'])
227         self._put('NFVBench version', self.result['nfvbench_version'])
228         self._put('Openstack Neutron:', {
229             'vSwitch': self.result['openstack_spec']['vswitch'],
230             'Encapsulation': self.result['openstack_spec']['encaps']
231         })
232         self.__record_header_put('version', self.result['nfvbench_version'])
233         self.__record_header_put('vSwitch', self.result['openstack_spec']['vswitch'])
234         self.__record_header_put('Encapsulation', self.result['openstack_spec']['encaps'])
235         self._put('Benchmarks:')
236         with self._create_block():
237             self._put('Networks:')
238             with self._create_block():
239                 network_benchmark = self.result['benchmarks']['network']
240
241                 self._put('Components:')
242                 with self._create_block():
243                     self._put('TOR:')
244                     with self._create_block(False):
245                         self._put('Type:', self.config['tor']['type'])
246                     self._put('Traffic Generator:')
247                     with self._create_block(False):
248                         self._put('Profile:', self.config['generator_config']['name'])
249                         self._put('Tool:', self.config['generator_config']['tool'])
250                     if network_benchmark['versions']:
251                         self._put('Versions:')
252                         with self._create_block():
253                             for component, version in network_benchmark['versions'].iteritems():
254                                 self._put(component + ':', version)
255
256                 if self.config['ndr_run'] or self.config['pdr_run']:
257                     self._put('Measurement Parameters:')
258                     with self._create_block(False):
259                         if self.config['ndr_run']:
260                             self._put('NDR:', self.config['measurement']['NDR'])
261                         if self.config['pdr_run']:
262                             self._put('PDR:', self.config['measurement']['PDR'])
263                 self._put('Service chain:')
264                 for result in network_benchmark['service_chain'].iteritems():
265                     with self._create_block():
266                         self.__chain_summarize(*result)
267
268     def __chain_summarize(self, chain_name, chain_benchmark):
269         self._put(chain_name + ':')
270         if chain_name == ChainType.PVVP:
271             self._put('Mode:', chain_benchmark.get('mode'))
272             chain_name += "-" + chain_benchmark.get('mode')
273         self.__record_header_put('service_chain', chain_name)
274         with self._create_block():
275             self._put('Traffic:')
276             with self._create_block(False):
277                 self.__traffic_summarize(chain_benchmark['result'])
278
279     def __traffic_summarize(self, traffic_benchmark):
280         self._put('Profile:', traffic_benchmark['profile'])
281         self._put('Bidirectional:', traffic_benchmark['bidirectional'])
282         self._put('Flow count:', traffic_benchmark['flow_count'])
283         self._put('Service chains count:', traffic_benchmark['service_chain_count'])
284         self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
285
286         self.__record_header_put('profile', traffic_benchmark['profile'])
287         self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
288         self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
289         self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
290         self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
291         with self._create_block(False):
292             self._put()
293             if not self.config['no_traffic']:
294                 self._put('Run Summary:')
295                 self._put()
296                 with self._create_block(False):
297                     self._put_table(self.__get_summary_table(traffic_benchmark['result']))
298                     try:
299                         self._put()
300                         self._put(traffic_benchmark['result']['warning'])
301                     except KeyError:
302                         pass
303
304             for entry in traffic_benchmark['result'].iteritems():
305                 if 'warning' in entry:
306                     continue
307                 self.__chain_analysis_summarize(*entry)
308                 self.__record_send()
309
310     def __chain_analysis_summarize(self, frame_size, analysis):
311         self._put()
312         self._put('L2 frame size:', frame_size)
313         if 'analysis_duration_sec' in analysis:
314             self._put('Chain analysis duration:',
315                       Formatter.float(3)(analysis['analysis_duration_sec']), 'seconds')
316             self.__record_data_put(frame_size, {'chain_analysis_duration': Formatter.float(3)(
317                 analysis['analysis_duration_sec'])})
318         if self.config['ndr_run']:
319             self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
320                       'seconds')
321             self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
322                 analysis['ndr']['time_taken_sec'])})
323         if self.config['pdr_run']:
324             self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
325                       'seconds')
326             self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
327                 analysis['pdr']['time_taken_sec'])})
328         self._put()
329
330         if not self.config['no_traffic'] and self.config['single_run']:
331             self._put('Run Config:')
332             self._put()
333             with self._create_block(False):
334                 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
335                 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
336                     self._put()
337                     self._put(analysis['run_config']['warning'])
338                 self._put()
339
340         if 'packet_analysis' in analysis:
341             self._put('Chain Analysis:')
342             self._put()
343             with self._create_block(False):
344                 self._put_table(self.__get_chain_analysis_table(analysis['packet_analysis']))
345                 self._put()
346
347     def __get_summary_table(self, traffic_result):
348         if self.config['single_run']:
349             summary_table = Table(self.single_run_header)
350         else:
351             summary_table = Table(self.ndr_pdr_header)
352
353         if self.config['ndr_run']:
354             for frame_size, analysis in traffic_result.iteritems():
355                 if frame_size == 'warning':
356                     continue
357                 summary_table.add_row([
358                     'NDR',
359                     frame_size,
360                     analysis['ndr']['rate_bps'],
361                     analysis['ndr']['rate_pps'],
362                     analysis['ndr']['stats']['overall']['drop_percentage'],
363                     analysis['ndr']['stats']['overall']['avg_delay_usec'],
364                     analysis['ndr']['stats']['overall']['min_delay_usec'],
365                     analysis['ndr']['stats']['overall']['max_delay_usec']
366                 ])
367                 self.__record_data_put(frame_size, {'ndr': {
368                     'type': 'NDR',
369                     'rate_bps': analysis['ndr']['rate_bps'],
370                     'rate_pps': analysis['ndr']['rate_pps'],
371                     'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
372                     'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
373                     'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
374                     'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
375                 }})
376         if self.config['pdr_run']:
377             for frame_size, analysis in traffic_result.iteritems():
378                 if frame_size == 'warning':
379                     continue
380                 summary_table.add_row([
381                     'PDR',
382                     frame_size,
383                     analysis['pdr']['rate_bps'],
384                     analysis['pdr']['rate_pps'],
385                     analysis['pdr']['stats']['overall']['drop_percentage'],
386                     analysis['pdr']['stats']['overall']['avg_delay_usec'],
387                     analysis['pdr']['stats']['overall']['min_delay_usec'],
388                     analysis['pdr']['stats']['overall']['max_delay_usec']
389                 ])
390                 self.__record_data_put(frame_size, {'pdr': {
391                     'type': 'PDR',
392                     'rate_bps': analysis['pdr']['rate_bps'],
393                     'rate_pps': analysis['pdr']['rate_pps'],
394                     'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
395                     'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
396                     'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
397                     'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
398                 }})
399         if self.config['single_run']:
400             for frame_size, analysis in traffic_result.iteritems():
401                 summary_table.add_row([
402                     frame_size,
403                     analysis['stats']['overall']['drop_rate_percent'],
404                     analysis['stats']['overall']['rx']['avg_delay_usec'],
405                     analysis['stats']['overall']['rx']['min_delay_usec'],
406                     analysis['stats']['overall']['rx']['max_delay_usec']
407                 ])
408                 self.__record_data_put(frame_size, {'single_run': {
409                     'type': 'single_run',
410                     'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
411                     'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
412                     'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
413                     'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
414                 }})
415         return summary_table
416
417     def __get_config_table(self, run_config, frame_size):
418         config_table = Table(self.config_header)
419         for key, name in zip(self.direction_keys, self.direction_names):
420             if key not in run_config:
421                 continue
422             config_table.add_row([
423                 name,
424                 run_config[key]['orig']['rate_bps'],
425                 run_config[key]['tx']['rate_bps'],
426                 run_config[key]['rx']['rate_bps'],
427                 int(run_config[key]['orig']['rate_pps']),
428                 int(run_config[key]['tx']['rate_pps']),
429                 int(run_config[key]['rx']['rate_pps']),
430             ])
431             self.__record_data_put(frame_size, {
432                 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
433                 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
434                 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
435                 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
436                 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
437                 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
438
439             })
440         return config_table
441
442     def __get_chain_analysis_table(self, packet_analysis):
443         chain_analysis_table = Table(self.chain_analysis_header)
444         forward_analysis = packet_analysis['direction-forward']
445         reverse_analysis = packet_analysis['direction-reverse']
446         reverse_analysis.reverse()
447         for fwd, rev in zip(forward_analysis, reverse_analysis):
448             chain_analysis_table.add_row([
449                 fwd['interface'],
450                 fwd['device'],
451                 fwd['packet_count'],
452                 fwd.get('packet_drop_count', None),
453                 fwd.get('packet_drop_percentage', None),
454                 rev['packet_count'],
455                 rev.get('packet_drop_count', None),
456                 rev.get('packet_drop_percentage', None),
457             ])
458         return chain_analysis_table
459
460     def __record_header_put(self, key, value):
461         if self.sender:
462             self.record_header[key] = value
463
464     def __record_data_put(self, key, data):
465         if self.sender:
466             if key not in self.record_data:
467                 self.record_data[key] = {}
468             self.record_data[key].update(data)
469
470     def __record_send(self):
471         if self.sender:
472             self.record_header["@timestamp"] = datetime.utcnow().replace(
473                 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
474             for frame_size in self.record_data:
475                 data = self.record_header
476                 data['frame_size'] = frame_size
477                 data.update(self.record_data[frame_size])
478                 run_specific_data = {}
479                 if 'single_run' in data:
480                     run_specific_data['single_run'] = data['single_run']
481                     del data['single_run']
482                 if 'ndr' in data:
483                     run_specific_data['ndr'] = data['ndr']
484                     run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
485                     del data['ndr']
486                 if 'pdr' in data:
487                     run_specific_data['pdr'] = data['pdr']
488                     run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
489                     del data['pdr']
490                 for key in run_specific_data:
491                     data_to_send = data.copy()
492                     data_to_send.update(run_specific_data[key])
493                     self.sender.record_send(data_to_send)
494             self.__record_init()
495
496     def __record_init(self):
497         # init is called after checking for sender
498         self.record_header = {
499             "runlogdate": self.sender.runlogdate,
500             "user_label": self.config['user_label']
501         }
502         self.record_data = {}