1eaa8d6e15ff109a03f490e6e39350b99396d5f7
[nfvbench.git] / nfvbench / summarizer.py
1 #!/usr/bin/env python
2 # Copyright 2016 Cisco Systems, Inc.  All rights reserved.
3 #
4 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
5 #    not use this file except in compliance with the License. You may obtain
6 #    a copy of the License at
7 #
8 #         http://www.apache.org/licenses/LICENSE-2.0
9 #
10 #    Unless required by applicable law or agreed to in writing, software
11 #    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 #    License for the specific language governing permissions and limitations
14 #    under the License.
15 #
16
17 import bitmath
18 from contextlib import contextmanager
19 from datetime import datetime
20 import math
21 import pytz
22 from specs import ChainType
23 from tabulate import tabulate
24
25
26 class Formatter(object):
27     """Collection of string formatter methods"""
28
29     @staticmethod
30     def fixed(data):
31         return data
32
33     @staticmethod
34     def int(data):
35         return '{:,}'.format(data)
36
37     @staticmethod
38     def float(decimal):
39         return lambda data: '%.{}f'.format(decimal) % (data)
40
41     @staticmethod
42     def standard(data):
43         if type(data) == int:
44             return Formatter.int(data)
45         elif type(data) == float:
46             return Formatter.float(4)(data)
47         else:
48             return Formatter.fixed(data)
49
50     @staticmethod
51     def suffix(suffix_str):
52         return lambda data: Formatter.standard(data) + suffix_str
53
54     @staticmethod
55     def bits(data):
56         # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
57         # will convert it into bit format.
58         bit = 8.0 * bitmath.Bit(float(data))
59         bit = bit.best_prefix(bitmath.SI)
60         byte_to_bit_classes = {
61             'kB': bitmath.kb,
62             'MB': bitmath.Mb,
63             'GB': bitmath.Gb,
64             'TB': bitmath.Tb,
65             'PB': bitmath.Pb,
66             'EB': bitmath.Eb,
67             'ZB': bitmath.Zb,
68             'YB': bitmath.Yb,
69         }
70         bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
71         if bps.unit != 'Bit':
72             return bps.format("{value:.4f} {unit}ps")
73         else:
74             return bps.format("{value:.4f} bps")
75
76     @staticmethod
77     def percentage(data):
78         if data is None:
79             return ''
80         elif math.isnan(data):
81             return '-'
82         else:
83             return Formatter.suffix('%')(Formatter.float(4)(data))
84
85
86 class Table(object):
87     """ASCII readable table class"""
88
89     def __init__(self, header):
90         header_row, self.formatters = zip(*header)
91         self.data = [header_row]
92         self.columns = len(header_row)
93
94     def add_row(self, row):
95         assert (self.columns == len(row))
96         formatted_row = []
97         for entry, formatter in zip(row, self.formatters):
98             formatted_row.append(formatter(entry))
99         self.data.append(formatted_row)
100
101     def get_string(self, indent=0):
102         spaces = ' ' * indent
103         table = tabulate(self.data,
104                          headers='firstrow',
105                          tablefmt='grid',
106                          stralign='center',
107                          floatfmt='.2f')
108         return table.replace('\n', '\n' + spaces)
109
110
111 class Summarizer(object):
112     """Generic summarizer class"""
113
114     indent_per_level = 2
115
116     def __init__(self):
117         self.indent_size = 0
118         self.marker_stack = [False]
119         self.str = ''
120
121     def __indent(self, marker):
122         self.indent_size += self.indent_per_level
123         self.marker_stack.append(marker)
124
125     def __unindent(self):
126         assert (self.indent_size >= self.indent_per_level)
127         self.indent_size -= self.indent_per_level
128         self.marker_stack.pop()
129
130     def __get_indent_string(self):
131         current_str = ' ' * self.indent_size
132         if self.marker_stack[-1]:
133             current_str = current_str[:-2] + '> '
134         return current_str
135
136     def _put(self, *args):
137         self.str += self.__get_indent_string()
138         if len(args) and type(args[-1]) == dict:
139             self.str += ' '.join(map(str, args[:-1])) + '\n'
140             self._put_dict(args[-1])
141         else:
142             self.str += ' '.join(map(str, args)) + '\n'
143
144     def _put_dict(self, data):
145         with self._create_block(False):
146             for key, value in data.iteritems():
147                 if type(value) == dict:
148                     self._put(key + ':')
149                     self._put_dict(value)
150                 else:
151                     self._put(key + ':', value)
152
153     def _put_table(self, table):
154         self.str += self.__get_indent_string()
155         self.str += table.get_string(self.indent_size) + '\n'
156
157     def __str__(self):
158         return self.str
159
160     @contextmanager
161     def _create_block(self, marker=True):
162         self.__indent(marker)
163         yield
164         self.__unindent()
165
166
167 class NFVBenchSummarizer(Summarizer):
168     """Summarize nfvbench json result"""
169
170     ndr_pdr_header = [
171         ('-', Formatter.fixed),
172         ('L2 Frame Size', Formatter.standard),
173         ('Rate (fwd+rev)', Formatter.bits),
174         ('Rate (fwd+rev)', Formatter.suffix(' pps')),
175         ('Avg Drop Rate', Formatter.suffix('%')),
176         ('Avg Latency (usec)', Formatter.standard),
177         ('Min Latency (usec)', Formatter.standard),
178         ('Max Latency (usec)', Formatter.standard)
179     ]
180
181     single_run_header = [
182         ('L2 Frame Size', Formatter.standard),
183         ('Drop Rate', Formatter.suffix('%')),
184         ('Avg Latency (usec)', Formatter.standard),
185         ('Min Latency (usec)', Formatter.standard),
186         ('Max Latency (usec)', Formatter.standard)
187     ]
188
189     config_header = [
190         ('Direction', Formatter.standard),
191         ('Requested TX Rate (bps)', Formatter.bits),
192         ('Actual TX Rate (bps)', Formatter.bits),
193         ('RX Rate (bps)', Formatter.bits),
194         ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
195         ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
196         ('RX Rate (pps)', Formatter.suffix(' pps'))
197     ]
198
199     chain_analysis_header = [
200         ('Interface', Formatter.standard),
201         ('Device', Formatter.standard),
202         ('Packets (fwd)', Formatter.standard),
203         ('Drops (fwd)', Formatter.standard),
204         ('Drop% (fwd)', Formatter.percentage),
205         ('Packets (rev)', Formatter.standard),
206         ('Drops (rev)', Formatter.standard),
207         ('Drop% (rev)', Formatter.percentage)
208     ]
209
210     direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
211     direction_names = ['Forward', 'Reverse', 'Total']
212
213     def __init__(self, result, sender):
214         Summarizer.__init__(self)
215         self.result = result
216         self.config = self.result['config']
217         self.record_header = None
218         self.record_data = None
219         self.sender = sender
220         # if sender is available initialize record
221         if self.sender:
222             self.__record_init()
223         self.__summarize()
224
225     def __summarize(self):
226         self._put()
227         self._put('========== NFVBench Summary ==========')
228         self._put('Date:', self.result['date'])
229         self._put('NFVBench version', self.result['nfvbench_version'])
230         self._put('Openstack Neutron:', {
231             'vSwitch': self.result['openstack_spec']['vswitch'],
232             'Encapsulation': self.result['openstack_spec']['encaps']
233         })
234         self.__record_header_put('version', self.result['nfvbench_version'])
235         self.__record_header_put('vSwitch', self.result['openstack_spec']['vswitch'])
236         self.__record_header_put('Encapsulation', self.result['openstack_spec']['encaps'])
237         self._put('Benchmarks:')
238         with self._create_block():
239             self._put('Networks:')
240             with self._create_block():
241                 network_benchmark = self.result['benchmarks']['network']
242
243                 self._put('Components:')
244                 with self._create_block():
245                     self._put('TOR:')
246                     with self._create_block(False):
247                         self._put('Type:', self.config['tor']['type'])
248                     self._put('Traffic Generator:')
249                     with self._create_block(False):
250                         self._put('Profile:', self.config['generator_config']['name'])
251                         self._put('Tool:', self.config['generator_config']['tool'])
252                     if network_benchmark['versions']:
253                         self._put('Versions:')
254                         with self._create_block():
255                             for component, version in network_benchmark['versions'].iteritems():
256                                 self._put(component + ':', version)
257
258                 if self.config['ndr_run'] or self.config['pdr_run']:
259                     self._put('Measurement Parameters:')
260                     with self._create_block(False):
261                         if self.config['ndr_run']:
262                             self._put('NDR:', self.config['measurement']['NDR'])
263                         if self.config['pdr_run']:
264                             self._put('PDR:', self.config['measurement']['PDR'])
265                 self._put('Service chain:')
266                 for result in network_benchmark['service_chain'].iteritems():
267                     with self._create_block():
268                         self.__chain_summarize(*result)
269
270     def __chain_summarize(self, chain_name, chain_benchmark):
271         self._put(chain_name + ':')
272         if chain_name == ChainType.PVVP:
273             self._put('Mode:', chain_benchmark.get('mode'))
274             chain_name += "-" + chain_benchmark.get('mode')
275         self.__record_header_put('service_chain', chain_name)
276         with self._create_block():
277             self._put('Traffic:')
278             with self._create_block(False):
279                 self.__traffic_summarize(chain_benchmark['result'])
280
281     def __traffic_summarize(self, traffic_benchmark):
282         self._put('Profile:', traffic_benchmark['profile'])
283         self._put('Bidirectional:', traffic_benchmark['bidirectional'])
284         self._put('Flow count:', traffic_benchmark['flow_count'])
285         self._put('Service chains count:', traffic_benchmark['service_chain_count'])
286         self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
287
288         self.__record_header_put('profile', traffic_benchmark['profile'])
289         self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
290         self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
291         self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
292         self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
293         with self._create_block(False):
294             self._put()
295             if not self.config['no_traffic']:
296                 self._put('Run Summary:')
297                 self._put()
298                 with self._create_block(False):
299                     self._put_table(self.__get_summary_table(traffic_benchmark['result']))
300                     try:
301                         self._put()
302                         self._put(traffic_benchmark['result']['warning'])
303                     except KeyError:
304                         pass
305
306             for entry in traffic_benchmark['result'].iteritems():
307                 if 'warning' in entry:
308                     continue
309                 self.__chain_analysis_summarize(*entry)
310                 self.__record_send()
311
312     def __chain_analysis_summarize(self, frame_size, analysis):
313         self._put()
314         self._put('L2 frame size:', frame_size)
315         if 'analysis_duration_sec' in analysis:
316             self._put('Chain analysis duration:',
317                       Formatter.float(3)(analysis['analysis_duration_sec']), 'seconds')
318             self.__record_data_put(frame_size, {'chain_analysis_duration': Formatter.float(3)(
319                 analysis['analysis_duration_sec'])})
320         if self.config['ndr_run']:
321             self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
322                       'seconds')
323             self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
324                 analysis['ndr']['time_taken_sec'])})
325         if self.config['pdr_run']:
326             self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
327                       'seconds')
328             self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
329                 analysis['pdr']['time_taken_sec'])})
330         self._put()
331
332         if not self.config['no_traffic'] and self.config['single_run']:
333             self._put('Run Config:')
334             self._put()
335             with self._create_block(False):
336                 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
337                 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
338                     self._put()
339                     self._put(analysis['run_config']['warning'])
340                 self._put()
341
342         if 'packet_analysis' in analysis:
343             self._put('Chain Analysis:')
344             self._put()
345             with self._create_block(False):
346                 self._put_table(self.__get_chain_analysis_table(analysis['packet_analysis']))
347                 self._put()
348
349     def __get_summary_table(self, traffic_result):
350         if self.config['single_run']:
351             summary_table = Table(self.single_run_header)
352         else:
353             summary_table = Table(self.ndr_pdr_header)
354
355         if self.config['ndr_run']:
356             for frame_size, analysis in traffic_result.iteritems():
357                 if frame_size == 'warning':
358                     continue
359                 summary_table.add_row([
360                     'NDR',
361                     frame_size,
362                     analysis['ndr']['rate_bps'],
363                     analysis['ndr']['rate_pps'],
364                     analysis['ndr']['stats']['overall']['drop_percentage'],
365                     analysis['ndr']['stats']['overall']['avg_delay_usec'],
366                     analysis['ndr']['stats']['overall']['min_delay_usec'],
367                     analysis['ndr']['stats']['overall']['max_delay_usec']
368                 ])
369                 self.__record_data_put(frame_size, {'ndr': {
370                     'type': 'NDR',
371                     'rate_bps': analysis['ndr']['rate_bps'],
372                     'rate_pps': analysis['ndr']['rate_pps'],
373                     'drop_percantage': analysis['ndr']['stats']['overall']['drop_percentage'],
374                     'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
375                     'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
376                     'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
377                 }})
378         if self.config['pdr_run']:
379             for frame_size, analysis in traffic_result.iteritems():
380                 if frame_size == 'warning':
381                     continue
382                 summary_table.add_row([
383                     'PDR',
384                     frame_size,
385                     analysis['pdr']['rate_bps'],
386                     analysis['pdr']['rate_pps'],
387                     analysis['pdr']['stats']['overall']['drop_percentage'],
388                     analysis['pdr']['stats']['overall']['avg_delay_usec'],
389                     analysis['pdr']['stats']['overall']['min_delay_usec'],
390                     analysis['pdr']['stats']['overall']['max_delay_usec']
391                 ])
392                 self.__record_data_put(frame_size, {'pdr': {
393                     'type': 'PDR',
394                     'rate_bps': analysis['pdr']['rate_bps'],
395                     'rate_pps': analysis['pdr']['rate_pps'],
396                     'drop_percantage': analysis['pdr']['stats']['overall']['drop_percentage'],
397                     'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
398                     'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
399                     'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
400                 }})
401         if self.config['single_run']:
402             for frame_size, analysis in traffic_result.iteritems():
403                 summary_table.add_row([
404                     frame_size,
405                     analysis['stats']['overall']['drop_rate_percent'],
406                     analysis['stats']['overall']['rx']['avg_delay_usec'],
407                     analysis['stats']['overall']['rx']['min_delay_usec'],
408                     analysis['stats']['overall']['rx']['max_delay_usec']
409                 ])
410                 self.__record_data_put(frame_size, {'single_run': {
411                     'type': 'single_run',
412                     'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
413                     'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
414                     'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
415                     'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
416                 }})
417         return summary_table
418
419     def __get_config_table(self, run_config, frame_size):
420         config_table = Table(self.config_header)
421         for key, name in zip(self.direction_keys, self.direction_names):
422             if key not in run_config:
423                 continue
424             config_table.add_row([
425                 name,
426                 run_config[key]['orig']['rate_bps'],
427                 run_config[key]['tx']['rate_bps'],
428                 run_config[key]['rx']['rate_bps'],
429                 int(run_config[key]['orig']['rate_pps']),
430                 int(run_config[key]['tx']['rate_pps']),
431                 int(run_config[key]['rx']['rate_pps']),
432             ])
433             self.__record_data_put(frame_size, {
434                 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
435                 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
436                 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
437                 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
438                 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
439                 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
440
441             })
442         return config_table
443
444     def __get_chain_analysis_table(self, packet_analysis):
445         chain_analysis_table = Table(self.chain_analysis_header)
446         forward_analysis = packet_analysis['direction-forward']
447         reverse_analysis = packet_analysis['direction-reverse']
448         reverse_analysis.reverse()
449         for fwd, rev in zip(forward_analysis, reverse_analysis):
450             chain_analysis_table.add_row([
451                 fwd['interface'],
452                 fwd['device'],
453                 fwd['packet_count'],
454                 fwd.get('packet_drop_count', None),
455                 fwd.get('packet_drop_percentage', None),
456                 rev['packet_count'],
457                 rev.get('packet_drop_count', None),
458                 rev.get('packet_drop_percentage', None),
459             ])
460         return chain_analysis_table
461
462     def __record_header_put(self, key, value):
463         if self.sender:
464             self.record_header[key] = value
465
466     def __record_data_put(self, key, data):
467         if self.sender:
468             if key not in self.record_data:
469                 self.record_data[key] = {}
470             self.record_data[key].update(data)
471
472     def __record_send(self):
473         if self.sender:
474             self.record_header["@timestamp"] = datetime.utcnow().replace(
475                 tzinfo=pytz.utc).strftime(
476                 "%Y-%m-%dT%H:%M:%S.%f%z")
477             for frame_size in self.record_data:
478                 data = self.record_header
479                 data['frame_size'] = frame_size
480                 data.update(self.record_data[frame_size])
481                 run_specific_data = {}
482                 if 'single_run' in data:
483                     run_specific_data['single_run'] = data['single_run']
484                     del data['single_run']
485                 if 'ndr' in data:
486                     run_specific_data['ndr'] = data['ndr']
487                     run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
488                     del data['ndr']
489                 if 'pdr' in data:
490                     run_specific_data['pdr'] = data['pdr']
491                     run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
492                     del data['pdr']
493                 for key in run_specific_data:
494                     data_to_send = data.copy()
495                     data_to_send.update(run_specific_data[key])
496                     self.sender.record_send(data_to_send)
497             self.__record_init()
498
499     def __record_init(self):
500         # init is called after checking for sender
501         self.record_header = {
502             "runlogdate": self.sender.runlogdate,
503         }
504         self.record_data = {}