NFVBENCH-66 Fixed rate run with multiple frame sizes sends same tx/rx
[nfvbench.git] / nfvbench / summarizer.py
1 #!/usr/bin/env python
2 # Copyright 2016 Cisco Systems, Inc.  All rights reserved.
3 #
4 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
5 #    not use this file except in compliance with the License. You may obtain
6 #    a copy of the License at
7 #
8 #         http://www.apache.org/licenses/LICENSE-2.0
9 #
10 #    Unless required by applicable law or agreed to in writing, software
11 #    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 #    License for the specific language governing permissions and limitations
14 #    under the License.
15 #
16
17 from contextlib import contextmanager
18 from datetime import datetime
19 import math
20
21 import bitmath
22 import pytz
23 from tabulate import tabulate
24
25 from specs import ChainType
26
27
28 class Formatter(object):
29     """Collection of string formatter methods"""
30
31     @staticmethod
32     def fixed(data):
33         return data
34
35     @staticmethod
36     def int(data):
37         return '{:,}'.format(data)
38
39     @staticmethod
40     def float(decimal):
41         return lambda data: '%.{}f'.format(decimal) % (data)
42
43     @staticmethod
44     def standard(data):
45         if isinstance(data, int):
46             return Formatter.int(data)
47         elif isinstance(data, float):
48             return Formatter.float(4)(data)
49         return Formatter.fixed(data)
50
51     @staticmethod
52     def suffix(suffix_str):
53         return lambda data: Formatter.standard(data) + suffix_str
54
55     @staticmethod
56     def bits(data):
57         # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
58         # will convert it into bit format.
59         bit = 8.0 * bitmath.Bit(float(data))
60         bit = bit.best_prefix(bitmath.SI)
61         byte_to_bit_classes = {
62             'kB': bitmath.kb,
63             'MB': bitmath.Mb,
64             'GB': bitmath.Gb,
65             'TB': bitmath.Tb,
66             'PB': bitmath.Pb,
67             'EB': bitmath.Eb,
68             'ZB': bitmath.Zb,
69             'YB': bitmath.Yb,
70         }
71         bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
72         if bps.unit != 'Bit':
73             return bps.format("{value:.4f} {unit}ps")
74         return bps.format("{value:.4f} bps")
75
76     @staticmethod
77     def percentage(data):
78         if data is None:
79             return ''
80         elif math.isnan(data):
81             return '-'
82         return Formatter.suffix('%')(Formatter.float(4)(data))
83
84
85 class Table(object):
86     """ASCII readable table class"""
87
88     def __init__(self, header):
89         header_row, self.formatters = zip(*header)
90         self.data = [header_row]
91         self.columns = len(header_row)
92
93     def add_row(self, row):
94         assert self.columns == len(row)
95         formatted_row = []
96         for entry, formatter in zip(row, self.formatters):
97             formatted_row.append(formatter(entry))
98         self.data.append(formatted_row)
99
100     def get_string(self, indent=0):
101         spaces = ' ' * indent
102         table = tabulate(self.data,
103                          headers='firstrow',
104                          tablefmt='grid',
105                          stralign='center',
106                          floatfmt='.2f')
107         return table.replace('\n', '\n' + spaces)
108
109
110 class Summarizer(object):
111     """Generic summarizer class"""
112
113     indent_per_level = 2
114
115     def __init__(self):
116         self.indent_size = 0
117         self.marker_stack = [False]
118         self.str = ''
119
120     def __indent(self, marker):
121         self.indent_size += self.indent_per_level
122         self.marker_stack.append(marker)
123
124     def __unindent(self):
125         assert self.indent_size >= self.indent_per_level
126         self.indent_size -= self.indent_per_level
127         self.marker_stack.pop()
128
129     def __get_indent_string(self):
130         current_str = ' ' * self.indent_size
131         if self.marker_stack[-1]:
132             current_str = current_str[:-2] + '> '
133         return current_str
134
135     def _put(self, *args):
136         self.str += self.__get_indent_string()
137         if args and isinstance(args[-1], dict):
138             self.str += ' '.join(map(str, args[:-1])) + '\n'
139             self._put_dict(args[-1])
140         else:
141             self.str += ' '.join(map(str, args)) + '\n'
142
143     def _put_dict(self, data):
144         with self._create_block(False):
145             for key, value in data.iteritems():
146                 if isinstance(value, dict):
147                     self._put(key + ':')
148                     self._put_dict(value)
149                 else:
150                     self._put(key + ':', value)
151
152     def _put_table(self, table):
153         self.str += self.__get_indent_string()
154         self.str += table.get_string(self.indent_size) + '\n'
155
156     def __str__(self):
157         return self.str
158
159     @contextmanager
160     def _create_block(self, marker=True):
161         self.__indent(marker)
162         yield
163         self.__unindent()
164
165
166 class NFVBenchSummarizer(Summarizer):
167     """Summarize nfvbench json result"""
168
169     ndr_pdr_header = [
170         ('-', Formatter.fixed),
171         ('L2 Frame Size', Formatter.standard),
172         ('Rate (fwd+rev)', Formatter.bits),
173         ('Rate (fwd+rev)', Formatter.suffix(' pps')),
174         ('Avg Drop Rate', Formatter.suffix('%')),
175         ('Avg Latency (usec)', Formatter.standard),
176         ('Min Latency (usec)', Formatter.standard),
177         ('Max Latency (usec)', Formatter.standard)
178     ]
179
180     single_run_header = [
181         ('L2 Frame Size', Formatter.standard),
182         ('Drop Rate', Formatter.suffix('%')),
183         ('Avg Latency (usec)', Formatter.standard),
184         ('Min Latency (usec)', Formatter.standard),
185         ('Max Latency (usec)', Formatter.standard)
186     ]
187
188     config_header = [
189         ('Direction', Formatter.standard),
190         ('Requested TX Rate (bps)', Formatter.bits),
191         ('Actual TX Rate (bps)', Formatter.bits),
192         ('RX Rate (bps)', Formatter.bits),
193         ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
194         ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
195         ('RX Rate (pps)', Formatter.suffix(' pps'))
196     ]
197
198     chain_analysis_header = [
199         ('Interface', Formatter.standard),
200         ('Device', Formatter.standard),
201         ('Packets (fwd)', Formatter.standard),
202         ('Drops (fwd)', Formatter.standard),
203         ('Drop% (fwd)', Formatter.percentage),
204         ('Packets (rev)', Formatter.standard),
205         ('Drops (rev)', Formatter.standard),
206         ('Drop% (rev)', Formatter.percentage)
207     ]
208
209     direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
210     direction_names = ['Forward', 'Reverse', 'Total']
211
212     def __init__(self, result, sender):
213         Summarizer.__init__(self)
214         self.result = result
215         self.config = self.result['config']
216         self.record_header = None
217         self.record_data = None
218         self.sender = sender
219         # if sender is available initialize record
220         if self.sender:
221             self.__record_init()
222         self.__summarize()
223
224     def __get_openstack_spec(self, property):
225         try:
226             return self.result['openstack_spec'][property]
227         except KeyError:
228             return ''
229
230     def __summarize(self):
231         self._put()
232         self._put('========== NFVBench Summary ==========')
233         self._put('Date:', self.result['date'])
234         self._put('NFVBench version', self.result['nfvbench_version'])
235         self._put('Openstack Neutron:', {
236             'vSwitch': self.__get_openstack_spec('vswitch'),
237             'Encapsulation': self.__get_openstack_spec('encaps')
238         })
239         self.__record_header_put('version', self.result['nfvbench_version'])
240         self.__record_header_put('vSwitch', self.__get_openstack_spec('vswitch'))
241         self.__record_header_put('Encapsulation', self.__get_openstack_spec('encaps'))
242         self._put('Benchmarks:')
243         with self._create_block():
244             self._put('Networks:')
245             with self._create_block():
246                 network_benchmark = self.result['benchmarks']['network']
247
248                 self._put('Components:')
249                 with self._create_block():
250                     self._put('TOR:')
251                     with self._create_block(False):
252                         self._put('Type:', self.config['tor']['type'])
253                     self._put('Traffic Generator:')
254                     with self._create_block(False):
255                         self._put('Profile:', self.config['generator_config']['name'])
256                         self._put('Tool:', self.config['generator_config']['tool'])
257                     if network_benchmark['versions']:
258                         self._put('Versions:')
259                         with self._create_block():
260                             for component, version in network_benchmark['versions'].iteritems():
261                                 self._put(component + ':', version)
262
263                 if self.config['ndr_run'] or self.config['pdr_run']:
264                     self._put('Measurement Parameters:')
265                     with self._create_block(False):
266                         if self.config['ndr_run']:
267                             self._put('NDR:', self.config['measurement']['NDR'])
268                         if self.config['pdr_run']:
269                             self._put('PDR:', self.config['measurement']['PDR'])
270                 self._put('Service chain:')
271                 for result in network_benchmark['service_chain'].iteritems():
272                     with self._create_block():
273                         self.__chain_summarize(*result)
274
275     def __chain_summarize(self, chain_name, chain_benchmark):
276         self._put(chain_name + ':')
277         if chain_name == ChainType.PVVP:
278             self._put('Mode:', chain_benchmark.get('mode'))
279             chain_name += "-" + chain_benchmark.get('mode')
280         self.__record_header_put('service_chain', chain_name)
281         with self._create_block():
282             self._put('Traffic:')
283             with self._create_block(False):
284                 self.__traffic_summarize(chain_benchmark['result'])
285
286     def __traffic_summarize(self, traffic_benchmark):
287         self._put('Profile:', traffic_benchmark['profile'])
288         self._put('Bidirectional:', traffic_benchmark['bidirectional'])
289         self._put('Flow count:', traffic_benchmark['flow_count'])
290         self._put('Service chains count:', traffic_benchmark['service_chain_count'])
291         self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
292
293         self.__record_header_put('profile', traffic_benchmark['profile'])
294         self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
295         self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
296         self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
297         self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
298         with self._create_block(False):
299             self._put()
300             if not self.config['no_traffic']:
301                 self._put('Run Summary:')
302                 self._put()
303                 with self._create_block(False):
304                     self._put_table(self.__get_summary_table(traffic_benchmark['result']))
305                     try:
306                         self._put()
307                         self._put(traffic_benchmark['result']['warning'])
308                     except KeyError:
309                         pass
310
311             for entry in traffic_benchmark['result'].iteritems():
312                 if 'warning' in entry:
313                     continue
314                 self.__chain_analysis_summarize(*entry)
315             self.__record_send()
316
317     def __chain_analysis_summarize(self, frame_size, analysis):
318         self._put()
319         self._put('L2 frame size:', frame_size)
320         if 'analysis_duration_sec' in analysis:
321             self._put('Chain analysis duration:',
322                       Formatter.float(3)(analysis['analysis_duration_sec']), 'seconds')
323             self.__record_data_put(frame_size, {'chain_analysis_duration': Formatter.float(3)(
324                 analysis['analysis_duration_sec'])})
325         if self.config['ndr_run']:
326             self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
327                       'seconds')
328             self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
329                 analysis['ndr']['time_taken_sec'])})
330         if self.config['pdr_run']:
331             self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
332                       'seconds')
333             self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
334                 analysis['pdr']['time_taken_sec'])})
335         self._put()
336
337         if not self.config['no_traffic'] and self.config['single_run']:
338             self._put('Run Config:')
339             self._put()
340             with self._create_block(False):
341                 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
342                 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
343                     self._put()
344                     self._put(analysis['run_config']['warning'])
345                 self._put()
346
347         if 'packet_analysis' in analysis:
348             self._put('Chain Analysis:')
349             self._put()
350             with self._create_block(False):
351                 self._put_table(self.__get_chain_analysis_table(analysis['packet_analysis']))
352                 self._put()
353
354     def __get_summary_table(self, traffic_result):
355         if self.config['single_run']:
356             summary_table = Table(self.single_run_header)
357         else:
358             summary_table = Table(self.ndr_pdr_header)
359
360         if self.config['ndr_run']:
361             for frame_size, analysis in traffic_result.iteritems():
362                 if frame_size == 'warning':
363                     continue
364                 summary_table.add_row([
365                     'NDR',
366                     frame_size,
367                     analysis['ndr']['rate_bps'],
368                     analysis['ndr']['rate_pps'],
369                     analysis['ndr']['stats']['overall']['drop_percentage'],
370                     analysis['ndr']['stats']['overall']['avg_delay_usec'],
371                     analysis['ndr']['stats']['overall']['min_delay_usec'],
372                     analysis['ndr']['stats']['overall']['max_delay_usec']
373                 ])
374                 self.__record_data_put(frame_size, {'ndr': {
375                     'type': 'NDR',
376                     'rate_bps': analysis['ndr']['rate_bps'],
377                     'rate_pps': analysis['ndr']['rate_pps'],
378                     'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
379                     'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
380                     'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
381                     'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
382                 }})
383         if self.config['pdr_run']:
384             for frame_size, analysis in traffic_result.iteritems():
385                 if frame_size == 'warning':
386                     continue
387                 summary_table.add_row([
388                     'PDR',
389                     frame_size,
390                     analysis['pdr']['rate_bps'],
391                     analysis['pdr']['rate_pps'],
392                     analysis['pdr']['stats']['overall']['drop_percentage'],
393                     analysis['pdr']['stats']['overall']['avg_delay_usec'],
394                     analysis['pdr']['stats']['overall']['min_delay_usec'],
395                     analysis['pdr']['stats']['overall']['max_delay_usec']
396                 ])
397                 self.__record_data_put(frame_size, {'pdr': {
398                     'type': 'PDR',
399                     'rate_bps': analysis['pdr']['rate_bps'],
400                     'rate_pps': analysis['pdr']['rate_pps'],
401                     'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
402                     'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
403                     'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
404                     'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
405                 }})
406         if self.config['single_run']:
407             for frame_size, analysis in traffic_result.iteritems():
408                 summary_table.add_row([
409                     frame_size,
410                     analysis['stats']['overall']['drop_rate_percent'],
411                     analysis['stats']['overall']['rx']['avg_delay_usec'],
412                     analysis['stats']['overall']['rx']['min_delay_usec'],
413                     analysis['stats']['overall']['rx']['max_delay_usec']
414                 ])
415                 self.__record_data_put(frame_size, {'single_run': {
416                     'type': 'single_run',
417                     'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
418                     'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
419                     'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
420                     'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
421                 }})
422         return summary_table
423
424     def __get_config_table(self, run_config, frame_size):
425         config_table = Table(self.config_header)
426         for key, name in zip(self.direction_keys, self.direction_names):
427             if key not in run_config:
428                 continue
429             config_table.add_row([
430                 name,
431                 run_config[key]['orig']['rate_bps'],
432                 run_config[key]['tx']['rate_bps'],
433                 run_config[key]['rx']['rate_bps'],
434                 int(run_config[key]['orig']['rate_pps']),
435                 int(run_config[key]['tx']['rate_pps']),
436                 int(run_config[key]['rx']['rate_pps']),
437             ])
438             self.__record_data_put(frame_size, {
439                 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
440                 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
441                 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
442                 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
443                 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
444                 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
445
446             })
447         return config_table
448
449     def __get_chain_analysis_table(self, packet_analysis):
450         chain_analysis_table = Table(self.chain_analysis_header)
451         forward_analysis = packet_analysis['direction-forward']
452         reverse_analysis = packet_analysis['direction-reverse']
453         reverse_analysis.reverse()
454         for fwd, rev in zip(forward_analysis, reverse_analysis):
455             chain_analysis_table.add_row([
456                 fwd['interface'],
457                 fwd['device'],
458                 fwd['packet_count'],
459                 fwd.get('packet_drop_count', None),
460                 fwd.get('packet_drop_percentage', None),
461                 rev['packet_count'],
462                 rev.get('packet_drop_count', None),
463                 rev.get('packet_drop_percentage', None),
464             ])
465         return chain_analysis_table
466
467     def __record_header_put(self, key, value):
468         if self.sender:
469             self.record_header[key] = value
470
471     def __record_data_put(self, key, data):
472         if self.sender:
473             if key not in self.record_data:
474                 self.record_data[key] = {}
475             self.record_data[key].update(data)
476
477     def __record_send(self):
478         if self.sender:
479             self.record_header["@timestamp"] = datetime.utcnow().replace(
480                 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
481             for frame_size in self.record_data:
482                 data = self.record_header
483                 data['frame_size'] = frame_size
484                 data.update(self.record_data[frame_size])
485                 run_specific_data = {}
486                 if 'single_run' in data:
487                     run_specific_data['single_run'] = data['single_run']
488                     del data['single_run']
489                 if 'ndr' in data:
490                     run_specific_data['ndr'] = data['ndr']
491                     run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
492                     del data['ndr']
493                 if 'pdr' in data:
494                     run_specific_data['pdr'] = data['pdr']
495                     run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
496                     del data['pdr']
497                 for key in run_specific_data:
498                     data_to_send = data.copy()
499                     data_to_send.update(run_specific_data[key])
500                     self.sender.record_send(data_to_send)
501             self.__record_init()
502
503     def __record_init(self):
504         # init is called after checking for sender
505         self.record_header = {
506             "runlogdate": self.sender.runlogdate,
507             "user_label": self.config['user_label']
508         }
509         self.record_data = {}