1676e9351dd3821e8a7d4470ad2369fdc2e20fa7
[nfvbench.git] / nfvbench / summarizer.py
1 #!/usr/bin/env python
2 # Copyright 2016 Cisco Systems, Inc.  All rights reserved.
3 #
4 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
5 #    not use this file except in compliance with the License. You may obtain
6 #    a copy of the License at
7 #
8 #         http://www.apache.org/licenses/LICENSE-2.0
9 #
10 #    Unless required by applicable law or agreed to in writing, software
11 #    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 #    License for the specific language governing permissions and limitations
14 #    under the License.
15 #
16
17 from contextlib import contextmanager
18 from datetime import datetime
19 import math
20
21 import bitmath
22 import pytz
23 from tabulate import tabulate
24
25 from specs import ChainType
26
27
28 class Formatter(object):
29     """Collection of string formatter methods"""
30
31     @staticmethod
32     def fixed(data):
33         return data
34
35     @staticmethod
36     def int(data):
37         return '{:,}'.format(data)
38
39     @staticmethod
40     def float(decimal):
41         return lambda data: '%.{}f'.format(decimal) % (data)
42
43     @staticmethod
44     def standard(data):
45         if isinstance(data, int):
46             return Formatter.int(data)
47         elif isinstance(data, float):
48             return Formatter.float(4)(data)
49         return Formatter.fixed(data)
50
51     @staticmethod
52     def suffix(suffix_str):
53         return lambda data: Formatter.standard(data) + suffix_str
54
55     @staticmethod
56     def bits(data):
57         # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
58         # will convert it into bit format.
59         bit = 8.0 * bitmath.Bit(float(data))
60         bit = bit.best_prefix(bitmath.SI)
61         byte_to_bit_classes = {
62             'kB': bitmath.kb,
63             'MB': bitmath.Mb,
64             'GB': bitmath.Gb,
65             'TB': bitmath.Tb,
66             'PB': bitmath.Pb,
67             'EB': bitmath.Eb,
68             'ZB': bitmath.Zb,
69             'YB': bitmath.Yb,
70         }
71         bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
72         if bps.unit != 'Bit':
73             return bps.format("{value:.4f} {unit}ps")
74         return bps.format("{value:.4f} bps")
75
76     @staticmethod
77     def percentage(data):
78         if data is None:
79             return ''
80         elif math.isnan(data):
81             return '-'
82         return Formatter.suffix('%')(Formatter.float(4)(data))
83
84
85 class Table(object):
86     """ASCII readable table class"""
87
88     def __init__(self, header):
89         header_row, self.formatters = zip(*header)
90         self.data = [header_row]
91         self.columns = len(header_row)
92
93     def add_row(self, row):
94         assert self.columns == len(row)
95         formatted_row = []
96         for entry, formatter in zip(row, self.formatters):
97             formatted_row.append(formatter(entry))
98         self.data.append(formatted_row)
99
100     def get_string(self, indent=0):
101         spaces = ' ' * indent
102         table = tabulate(self.data,
103                          headers='firstrow',
104                          tablefmt='grid',
105                          stralign='center',
106                          floatfmt='.2f')
107         return table.replace('\n', '\n' + spaces)
108
109
110 class Summarizer(object):
111     """Generic summarizer class"""
112
113     indent_per_level = 2
114
115     def __init__(self):
116         self.indent_size = 0
117         self.marker_stack = [False]
118         self.str = ''
119
120     def __indent(self, marker):
121         self.indent_size += self.indent_per_level
122         self.marker_stack.append(marker)
123
124     def __unindent(self):
125         assert self.indent_size >= self.indent_per_level
126         self.indent_size -= self.indent_per_level
127         self.marker_stack.pop()
128
129     def __get_indent_string(self):
130         current_str = ' ' * self.indent_size
131         if self.marker_stack[-1]:
132             current_str = current_str[:-2] + '> '
133         return current_str
134
135     def _put(self, *args):
136         self.str += self.__get_indent_string()
137         if args and isinstance(args[-1], dict):
138             self.str += ' '.join(map(str, args[:-1])) + '\n'
139             self._put_dict(args[-1])
140         else:
141             self.str += ' '.join(map(str, args)) + '\n'
142
143     def _put_dict(self, data):
144         with self._create_block(False):
145             for key, value in data.iteritems():
146                 if isinstance(value, dict):
147                     self._put(key + ':')
148                     self._put_dict(value)
149                 else:
150                     self._put(key + ':', value)
151
152     def _put_table(self, table):
153         self.str += self.__get_indent_string()
154         self.str += table.get_string(self.indent_size) + '\n'
155
156     def __str__(self):
157         return self.str
158
159     @contextmanager
160     def _create_block(self, marker=True):
161         self.__indent(marker)
162         yield
163         self.__unindent()
164
165
166 class NFVBenchSummarizer(Summarizer):
167     """Summarize nfvbench json result"""
168
169     ndr_pdr_header = [
170         ('-', Formatter.fixed),
171         ('L2 Frame Size', Formatter.standard),
172         ('Rate (fwd+rev)', Formatter.bits),
173         ('Rate (fwd+rev)', Formatter.suffix(' pps')),
174         ('Avg Drop Rate', Formatter.suffix('%')),
175         ('Avg Latency (usec)', Formatter.standard),
176         ('Min Latency (usec)', Formatter.standard),
177         ('Max Latency (usec)', Formatter.standard)
178     ]
179
180     single_run_header = [
181         ('L2 Frame Size', Formatter.standard),
182         ('Drop Rate', Formatter.suffix('%')),
183         ('Avg Latency (usec)', Formatter.standard),
184         ('Min Latency (usec)', Formatter.standard),
185         ('Max Latency (usec)', Formatter.standard)
186     ]
187
188     config_header = [
189         ('Direction', Formatter.standard),
190         ('Requested TX Rate (bps)', Formatter.bits),
191         ('Actual TX Rate (bps)', Formatter.bits),
192         ('RX Rate (bps)', Formatter.bits),
193         ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
194         ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
195         ('RX Rate (pps)', Formatter.suffix(' pps'))
196     ]
197
198     chain_analysis_header = [
199         ('Interface', Formatter.standard),
200         ('Device', Formatter.standard),
201         ('Packets (fwd)', Formatter.standard),
202         ('Drops (fwd)', Formatter.standard),
203         ('Drop% (fwd)', Formatter.percentage),
204         ('Packets (rev)', Formatter.standard),
205         ('Drops (rev)', Formatter.standard),
206         ('Drop% (rev)', Formatter.percentage)
207     ]
208
209     direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
210     direction_names = ['Forward', 'Reverse', 'Total']
211
212     def __init__(self, result, sender):
213         Summarizer.__init__(self)
214         self.result = result
215         self.config = self.result['config']
216         self.record_header = None
217         self.record_data = None
218         self.sender = sender
219         # if sender is available initialize record
220         if self.sender:
221             self.__record_init()
222         self.__summarize()
223
224     def __summarize(self):
225         self._put()
226         self._put('========== NFVBench Summary ==========')
227         self._put('Date:', self.result['date'])
228         self._put('NFVBench version', self.result['nfvbench_version'])
229         self._put('Openstack Neutron:', {
230             'vSwitch': self.result['openstack_spec']['vswitch'],
231             'Encapsulation': self.result['openstack_spec']['encaps']
232         })
233         self.__record_header_put('version', self.result['nfvbench_version'])
234         self.__record_header_put('vSwitch', self.result['openstack_spec']['vswitch'])
235         self.__record_header_put('Encapsulation', self.result['openstack_spec']['encaps'])
236         self._put('Benchmarks:')
237         with self._create_block():
238             self._put('Networks:')
239             with self._create_block():
240                 network_benchmark = self.result['benchmarks']['network']
241
242                 self._put('Components:')
243                 with self._create_block():
244                     self._put('TOR:')
245                     with self._create_block(False):
246                         self._put('Type:', self.config['tor']['type'])
247                     self._put('Traffic Generator:')
248                     with self._create_block(False):
249                         self._put('Profile:', self.config['generator_config']['name'])
250                         self._put('Tool:', self.config['generator_config']['tool'])
251                     if network_benchmark['versions']:
252                         self._put('Versions:')
253                         with self._create_block():
254                             for component, version in network_benchmark['versions'].iteritems():
255                                 self._put(component + ':', version)
256
257                 if self.config['ndr_run'] or self.config['pdr_run']:
258                     self._put('Measurement Parameters:')
259                     with self._create_block(False):
260                         if self.config['ndr_run']:
261                             self._put('NDR:', self.config['measurement']['NDR'])
262                         if self.config['pdr_run']:
263                             self._put('PDR:', self.config['measurement']['PDR'])
264                 self._put('Service chain:')
265                 for result in network_benchmark['service_chain'].iteritems():
266                     with self._create_block():
267                         self.__chain_summarize(*result)
268
269     def __chain_summarize(self, chain_name, chain_benchmark):
270         self._put(chain_name + ':')
271         if chain_name == ChainType.PVVP:
272             self._put('Mode:', chain_benchmark.get('mode'))
273             chain_name += "-" + chain_benchmark.get('mode')
274         self.__record_header_put('service_chain', chain_name)
275         with self._create_block():
276             self._put('Traffic:')
277             with self._create_block(False):
278                 self.__traffic_summarize(chain_benchmark['result'])
279
280     def __traffic_summarize(self, traffic_benchmark):
281         self._put('Profile:', traffic_benchmark['profile'])
282         self._put('Bidirectional:', traffic_benchmark['bidirectional'])
283         self._put('Flow count:', traffic_benchmark['flow_count'])
284         self._put('Service chains count:', traffic_benchmark['service_chain_count'])
285         self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
286
287         self.__record_header_put('profile', traffic_benchmark['profile'])
288         self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
289         self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
290         self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
291         self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
292         with self._create_block(False):
293             self._put()
294             if not self.config['no_traffic']:
295                 self._put('Run Summary:')
296                 self._put()
297                 with self._create_block(False):
298                     self._put_table(self.__get_summary_table(traffic_benchmark['result']))
299                     try:
300                         self._put()
301                         self._put(traffic_benchmark['result']['warning'])
302                     except KeyError:
303                         pass
304
305             for entry in traffic_benchmark['result'].iteritems():
306                 if 'warning' in entry:
307                     continue
308                 self.__chain_analysis_summarize(*entry)
309                 self.__record_send()
310
311     def __chain_analysis_summarize(self, frame_size, analysis):
312         self._put()
313         self._put('L2 frame size:', frame_size)
314         if 'analysis_duration_sec' in analysis:
315             self._put('Chain analysis duration:',
316                       Formatter.float(3)(analysis['analysis_duration_sec']), 'seconds')
317             self.__record_data_put(frame_size, {'chain_analysis_duration': Formatter.float(3)(
318                 analysis['analysis_duration_sec'])})
319         if self.config['ndr_run']:
320             self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
321                       'seconds')
322             self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
323                 analysis['ndr']['time_taken_sec'])})
324         if self.config['pdr_run']:
325             self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
326                       'seconds')
327             self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
328                 analysis['pdr']['time_taken_sec'])})
329         self._put()
330
331         if not self.config['no_traffic'] and self.config['single_run']:
332             self._put('Run Config:')
333             self._put()
334             with self._create_block(False):
335                 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
336                 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
337                     self._put()
338                     self._put(analysis['run_config']['warning'])
339                 self._put()
340
341         if 'packet_analysis' in analysis:
342             self._put('Chain Analysis:')
343             self._put()
344             with self._create_block(False):
345                 self._put_table(self.__get_chain_analysis_table(analysis['packet_analysis']))
346                 self._put()
347
348     def __get_summary_table(self, traffic_result):
349         if self.config['single_run']:
350             summary_table = Table(self.single_run_header)
351         else:
352             summary_table = Table(self.ndr_pdr_header)
353
354         if self.config['ndr_run']:
355             for frame_size, analysis in traffic_result.iteritems():
356                 if frame_size == 'warning':
357                     continue
358                 summary_table.add_row([
359                     'NDR',
360                     frame_size,
361                     analysis['ndr']['rate_bps'],
362                     analysis['ndr']['rate_pps'],
363                     analysis['ndr']['stats']['overall']['drop_percentage'],
364                     analysis['ndr']['stats']['overall']['avg_delay_usec'],
365                     analysis['ndr']['stats']['overall']['min_delay_usec'],
366                     analysis['ndr']['stats']['overall']['max_delay_usec']
367                 ])
368                 self.__record_data_put(frame_size, {'ndr': {
369                     'type': 'NDR',
370                     'rate_bps': analysis['ndr']['rate_bps'],
371                     'rate_pps': analysis['ndr']['rate_pps'],
372                     'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
373                     'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
374                     'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
375                     'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
376                 }})
377         if self.config['pdr_run']:
378             for frame_size, analysis in traffic_result.iteritems():
379                 if frame_size == 'warning':
380                     continue
381                 summary_table.add_row([
382                     'PDR',
383                     frame_size,
384                     analysis['pdr']['rate_bps'],
385                     analysis['pdr']['rate_pps'],
386                     analysis['pdr']['stats']['overall']['drop_percentage'],
387                     analysis['pdr']['stats']['overall']['avg_delay_usec'],
388                     analysis['pdr']['stats']['overall']['min_delay_usec'],
389                     analysis['pdr']['stats']['overall']['max_delay_usec']
390                 ])
391                 self.__record_data_put(frame_size, {'pdr': {
392                     'type': 'PDR',
393                     'rate_bps': analysis['pdr']['rate_bps'],
394                     'rate_pps': analysis['pdr']['rate_pps'],
395                     'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
396                     'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
397                     'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
398                     'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
399                 }})
400         if self.config['single_run']:
401             for frame_size, analysis in traffic_result.iteritems():
402                 summary_table.add_row([
403                     frame_size,
404                     analysis['stats']['overall']['drop_rate_percent'],
405                     analysis['stats']['overall']['rx']['avg_delay_usec'],
406                     analysis['stats']['overall']['rx']['min_delay_usec'],
407                     analysis['stats']['overall']['rx']['max_delay_usec']
408                 ])
409                 self.__record_data_put(frame_size, {'single_run': {
410                     'type': 'single_run',
411                     'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
412                     'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
413                     'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
414                     'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
415                 }})
416         return summary_table
417
418     def __get_config_table(self, run_config, frame_size):
419         config_table = Table(self.config_header)
420         for key, name in zip(self.direction_keys, self.direction_names):
421             if key not in run_config:
422                 continue
423             config_table.add_row([
424                 name,
425                 run_config[key]['orig']['rate_bps'],
426                 run_config[key]['tx']['rate_bps'],
427                 run_config[key]['rx']['rate_bps'],
428                 int(run_config[key]['orig']['rate_pps']),
429                 int(run_config[key]['tx']['rate_pps']),
430                 int(run_config[key]['rx']['rate_pps']),
431             ])
432             self.__record_data_put(frame_size, {
433                 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
434                 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
435                 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
436                 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
437                 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
438                 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
439
440             })
441         return config_table
442
443     def __get_chain_analysis_table(self, packet_analysis):
444         chain_analysis_table = Table(self.chain_analysis_header)
445         forward_analysis = packet_analysis['direction-forward']
446         reverse_analysis = packet_analysis['direction-reverse']
447         reverse_analysis.reverse()
448         for fwd, rev in zip(forward_analysis, reverse_analysis):
449             chain_analysis_table.add_row([
450                 fwd['interface'],
451                 fwd['device'],
452                 fwd['packet_count'],
453                 fwd.get('packet_drop_count', None),
454                 fwd.get('packet_drop_percentage', None),
455                 rev['packet_count'],
456                 rev.get('packet_drop_count', None),
457                 rev.get('packet_drop_percentage', None),
458             ])
459         return chain_analysis_table
460
461     def __record_header_put(self, key, value):
462         if self.sender:
463             self.record_header[key] = value
464
465     def __record_data_put(self, key, data):
466         if self.sender:
467             if key not in self.record_data:
468                 self.record_data[key] = {}
469             self.record_data[key].update(data)
470
471     def __record_send(self):
472         if self.sender:
473             self.record_header["@timestamp"] = datetime.utcnow().replace(
474                 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
475             for frame_size in self.record_data:
476                 data = self.record_header
477                 data['frame_size'] = frame_size
478                 data.update(self.record_data[frame_size])
479                 run_specific_data = {}
480                 if 'single_run' in data:
481                     run_specific_data['single_run'] = data['single_run']
482                     del data['single_run']
483                 if 'ndr' in data:
484                     run_specific_data['ndr'] = data['ndr']
485                     run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
486                     del data['ndr']
487                 if 'pdr' in data:
488                     run_specific_data['pdr'] = data['pdr']
489                     run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
490                     del data['pdr']
491                 for key in run_specific_data:
492                     data_to_send = data.copy()
493                     data_to_send.update(run_specific_data[key])
494                     self.sender.record_send(data_to_send)
495             self.__record_init()
496
497     def __record_init(self):
498         # init is called after checking for sender
499         self.record_header = {
500             "runlogdate": self.sender.runlogdate,
501             "user_label": self.config['user_label']
502         }
503         self.record_data = {}