2 # Copyright 2016 Cisco Systems, Inc. All rights reserved.
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
17 from contextlib import contextmanager
18 from datetime import datetime
23 from tabulate import tabulate
25 def _annotate_chain_stats(chain_stats, nodrop_marker='=>'):
26 """Transform a plain chain stats into an annotated one.
30 0: {'packets': [2000054, 1999996, 1999996, 1999996],
39 0: {'packets': [2000054, -58 (-0.034%), '=>', 1999996],
47 In the case of shared net, some columns in packets array can have ''.
48 Some columns cab also be None which means the data is not available.
50 for stats in list(chain_stats.values()):
51 packets = stats['packets']
54 # keep the first counter
55 annotated_packets = [packets[0]]
56 # modify all remaining counters
57 prev_count = packets[0]
58 for index in range(1, count):
59 cur_count = packets[index]
61 # an empty string indicates an unknown counter for a shared interface
62 # do not annotate those
64 elif cur_count is None:
66 annotated_value = 'n/a'
68 drop = cur_count - prev_count
70 dr = (drop * 100.0) / prev_count if prev_count else 0
71 annotated_value = '{:+,} ({:+.4f}%)'.format(drop, dr)
74 # if last column we display the value
75 annotated_value = cur_count if index == count - 1 else nodrop_marker
76 prev_count = cur_count
77 annotated_packets.append(annotated_value)
79 stats['packets'] = annotated_packets
81 class Formatter(object):
82 """Collection of string formatter methods."""
90 return '{:,}'.format(data)
94 return lambda data: '%.{}f'.format(decimal) % (data)
98 if isinstance(data, int):
99 return Formatter.int(data)
100 if isinstance(data, float):
101 return Formatter.float(4)(data)
102 return Formatter.fixed(data)
105 def suffix(suffix_str):
106 return lambda data: Formatter.standard(data) + suffix_str
110 # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
111 # will convert it into bit format.
112 bit = 8.0 * bitmath.Bit(float(data))
113 bit = bit.best_prefix(bitmath.SI)
114 byte_to_bit_classes = {
124 bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
125 if bps.unit != 'Bit':
126 return bps.format("{value:.4f} {unit}ps")
127 return bps.format("{value:.4f} bps")
130 def percentage(data):
135 return Formatter.suffix('%')(Formatter.float(4)(data))
139 """ASCII readable table class."""
141 def __init__(self, header):
142 header_row, self.formatters = list(zip(*header))
143 self.data = [header_row]
144 self.columns = len(header_row)
146 def add_row(self, row):
147 assert self.columns == len(row)
149 for entry, formatter in zip(row, self.formatters):
150 formatted_row.append(formatter(entry))
151 self.data.append(formatted_row)
153 def get_string(self, indent=0):
154 spaces = ' ' * indent
155 table = tabulate(self.data,
160 return table.replace('\n', '\n' + spaces)
163 class Summarizer(object):
164 """Generic summarizer class."""
170 self.marker_stack = [False]
173 def __indent(self, marker):
174 self.indent_size += self.indent_per_level
175 self.marker_stack.append(marker)
177 def __unindent(self):
178 assert self.indent_size >= self.indent_per_level
179 self.indent_size -= self.indent_per_level
180 self.marker_stack.pop()
182 def __get_indent_string(self):
183 current_str = ' ' * self.indent_size
184 if self.marker_stack[-1]:
185 current_str = current_str[:-2] + '> '
188 def _put(self, *args):
189 self.str += self.__get_indent_string()
190 if args and isinstance(args[-1], dict):
191 self.str += ' '.join(map(str, args[:-1])) + '\n'
192 self._put_dict(args[-1])
194 self.str += ' '.join(map(str, args)) + '\n'
196 def _put_dict(self, data):
197 with self._create_block(False):
198 for key, value in list(data.items()):
199 if isinstance(value, dict):
201 self._put_dict(value)
203 self._put(key + ':', value)
205 def _put_table(self, table):
206 self.str += self.__get_indent_string()
207 self.str += table.get_string(self.indent_size) + '\n'
213 def _create_block(self, marker=True):
214 self.__indent(marker)
219 class NFVBenchSummarizer(Summarizer):
220 """Summarize nfvbench json result."""
223 ('-', Formatter.fixed),
224 ('L2 Frame Size', Formatter.standard),
225 ('Rate (fwd+rev)', Formatter.bits),
226 ('Rate (fwd+rev)', Formatter.suffix(' pps')),
227 ('Avg Drop Rate', Formatter.suffix('%')),
228 ('Avg Latency (usec)', Formatter.standard),
229 ('Min Latency (usec)', Formatter.standard),
230 ('Max Latency (usec)', Formatter.standard)
233 single_run_header = [
234 ('L2 Frame Size', Formatter.standard),
235 ('Drop Rate', Formatter.suffix('%')),
236 ('Avg Latency (usec)', Formatter.standard),
237 ('Min Latency (usec)', Formatter.standard),
238 ('Max Latency (usec)', Formatter.standard)
242 ('Direction', Formatter.standard),
243 ('Requested TX Rate (bps)', Formatter.bits),
244 ('Actual TX Rate (bps)', Formatter.bits),
245 ('RX Rate (bps)', Formatter.bits),
246 ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
247 ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
248 ('RX Rate (pps)', Formatter.suffix(' pps'))
251 direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
252 direction_names = ['Forward', 'Reverse', 'Total']
254 def __init__(self, result, sender):
255 """Create a summarizer instance."""
256 Summarizer.__init__(self)
258 self.config = self.result['config']
259 self.record_header = None
260 self.record_data = None
263 # add percentiles headers if hdrh enabled
264 if not self.config.disable_hdrh:
265 for percentile in self.config.lat_percentiles:
266 self.ndr_pdr_header.append(str(percentile) + ' %ile lat.', Formatter.standard)
267 self.single_run_header.append(str(percentile) + ' %ile lat.', Formatter.standard)
268 # if sender is available initialize record
273 def __get_openstack_spec(self, property):
275 return self.result['openstack_spec'][property]
279 def __summarize(self):
281 self._put('========== NFVBench Summary ==========')
282 self._put('Date:', self.result['date'])
283 self._put('NFVBench version', self.result['nfvbench_version'])
284 self._put('Openstack Neutron:', {
285 'vSwitch': self.__get_openstack_spec('vswitch'),
286 'Encapsulation': self.__get_openstack_spec('encaps')
288 self.__record_header_put('version', self.result['nfvbench_version'])
289 self.__record_header_put('vSwitch', self.__get_openstack_spec('vswitch'))
290 self.__record_header_put('Encapsulation', self.__get_openstack_spec('encaps'))
291 self._put('Benchmarks:')
292 with self._create_block():
293 self._put('Networks:')
294 with self._create_block():
295 network_benchmark = self.result['benchmarks']['network']
297 self._put('Components:')
298 with self._create_block():
299 self._put('Traffic Generator:')
300 with self._create_block(False):
301 self._put('Profile:', self.config['tg-name'])
302 self._put('Tool:', self.config['tg-tool'])
303 if network_benchmark['versions']:
304 self._put('Versions:')
305 with self._create_block():
306 for component, version in list(network_benchmark['versions'].items()):
307 self._put(component + ':', version)
309 if self.config['ndr_run'] or self.config['pdr_run']:
310 self._put('Measurement Parameters:')
311 with self._create_block(False):
312 if self.config['ndr_run']:
313 self._put('NDR:', self.config['measurement']['NDR'])
314 if self.config['pdr_run']:
315 self._put('PDR:', self.config['measurement']['PDR'])
316 self._put('Service chain:')
317 for result in list(network_benchmark['service_chain'].items()):
318 with self._create_block():
319 self.__chain_summarize(*result)
321 def __chain_summarize(self, chain_name, chain_benchmark):
322 self._put(chain_name + ':')
323 self.__record_header_put('service_chain', chain_name)
324 with self._create_block():
325 self._put('Traffic:')
326 with self._create_block(False):
327 self.__traffic_summarize(chain_benchmark['result'])
329 def __traffic_summarize(self, traffic_benchmark):
330 self._put('Profile:', traffic_benchmark['profile'])
331 self._put('Bidirectional:', traffic_benchmark['bidirectional'])
332 self._put('Flow count:', traffic_benchmark['flow_count'])
333 self._put('Service chains count:', traffic_benchmark['service_chain_count'])
334 self._put('Compute nodes:', list(traffic_benchmark['compute_nodes'].keys()))
336 self.__record_header_put('profile', traffic_benchmark['profile'])
337 self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
338 self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
339 self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
340 self.__record_header_put('compute_nodes', list(traffic_benchmark['compute_nodes'].keys()))
341 with self._create_block(False):
343 if not self.config['no_traffic']:
344 self._put('Run Summary:')
346 with self._create_block(False):
347 self._put_table(self.__get_summary_table(traffic_benchmark['result']))
350 self._put(traffic_benchmark['result']['warning'])
354 for entry in list(traffic_benchmark['result'].items()):
355 if 'warning' in entry:
357 self.__chain_analysis_summarize(*entry)
360 def __chain_analysis_summarize(self, frame_size, analysis):
362 self._put('L2 frame size:', frame_size)
363 if self.config['ndr_run']:
364 self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
366 self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
367 analysis['ndr']['time_taken_sec'])})
368 if self.config['pdr_run']:
369 self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
371 self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
372 analysis['pdr']['time_taken_sec'])})
375 if not self.config['no_traffic'] and self.config['single_run']:
376 self._put('Run Config:')
378 with self._create_block(False):
379 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
380 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
382 self._put(analysis['run_config']['warning'])
385 if 'packet_path_stats' in analysis:
386 for dir in ['Forward', 'Reverse']:
387 self._put(dir + ' Chain Packet Counters and Latency:')
389 with self._create_block(False):
390 self._put_table(self._get_chain_table(analysis['packet_path_stats'][dir]))
393 def __get_summary_table(self, traffic_result):
394 if self.config['single_run']:
395 summary_table = Table(self.single_run_header)
397 summary_table = Table(self.ndr_pdr_header)
399 if self.config['ndr_run']:
400 for frame_size, analysis in list(traffic_result.items()):
401 if frame_size == 'warning':
407 analysis['ndr']['rate_bps'],
408 analysis['ndr']['rate_pps'],
409 analysis['ndr']['stats']['overall']['drop_percentage'],
410 analysis['ndr']['stats']['overall']['avg_delay_usec'],
411 analysis['ndr']['stats']['overall']['min_delay_usec'],
412 analysis['ndr']['stats']['overall']['max_delay_usec']
414 if not self.config.disable_hdrh:
415 self.extract_hdrh_percentiles(
416 analysis['ndr']['stats']['overall']['lat_percentile'], row_data)
417 summary_table.add_row(row_data)
421 'rate_bps': analysis['ndr']['rate_bps'],
422 'rate_pps': analysis['ndr']['rate_pps'],
423 'offered_tx_rate_bps': analysis['ndr']['stats']['offered_tx_rate_bps'],
424 'theoretical_tx_rate_pps': analysis['ndr']['stats']['theoretical_tx_rate_pps'],
425 'theoretical_tx_rate_bps': analysis['ndr']['stats']['theoretical_tx_rate_bps'],
426 'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
427 'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
428 'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
429 'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
431 if not self.config.disable_hdrh:
432 self.extract_hdrh_percentiles(
433 analysis['ndr']['stats']['overall']['lat_percentile'], ndr_data, True)
434 self.__record_data_put(frame_size, {'ndr': ndr_data})
435 if self.config['pdr_run']:
436 for frame_size, analysis in list(traffic_result.items()):
437 if frame_size == 'warning':
443 analysis['pdr']['rate_bps'],
444 analysis['pdr']['rate_pps'],
445 analysis['pdr']['stats']['overall']['drop_percentage'],
446 analysis['pdr']['stats']['overall']['avg_delay_usec'],
447 analysis['pdr']['stats']['overall']['min_delay_usec'],
448 analysis['pdr']['stats']['overall']['max_delay_usec']
450 if not self.config.disable_hdrh:
451 self.extract_hdrh_percentiles(
452 analysis['pdr']['stats']['overall']['lat_percentile'], row_data)
453 summary_table.add_row(row_data)
457 'rate_bps': analysis['pdr']['rate_bps'],
458 'rate_pps': analysis['pdr']['rate_pps'],
459 'offered_tx_rate_bps': analysis['pdr']['stats']['offered_tx_rate_bps'],
460 'theoretical_tx_rate_pps': analysis['pdr']['stats']['theoretical_tx_rate_pps'],
461 'theoretical_tx_rate_bps': analysis['pdr']['stats']['theoretical_tx_rate_bps'],
462 'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
463 'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
464 'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
465 'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
467 if not self.config.disable_hdrh:
468 self.extract_hdrh_percentiles(
469 analysis['pdr']['stats']['overall']['lat_percentile'], pdr_data, True)
470 self.__record_data_put(frame_size, {'pdr': pdr_data})
471 if self.config['single_run']:
472 for frame_size, analysis in list(traffic_result.items()):
475 analysis['stats']['overall']['drop_rate_percent'],
476 analysis['stats']['overall']['rx']['avg_delay_usec'],
477 analysis['stats']['overall']['rx']['min_delay_usec'],
478 analysis['stats']['overall']['rx']['max_delay_usec']
480 if not self.config.disable_hdrh:
481 self.extract_hdrh_percentiles(
482 analysis['stats']['overall']['rx']['lat_percentile'], row_data)
483 summary_table.add_row(row_data)
486 'type': 'single_run',
487 'offered_tx_rate_bps': analysis['stats']['offered_tx_rate_bps'],
488 'theoretical_tx_rate_pps': analysis['stats']['theoretical_tx_rate_pps'],
489 'theoretical_tx_rate_bps': analysis['stats']['theoretical_tx_rate_bps'],
490 'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
491 'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
492 'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
493 'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
495 if not self.config.disable_hdrh:
496 self.extract_hdrh_percentiles(
497 analysis['stats']['overall']['rx']['lat_percentile'], single_run_data, True)
498 self.__record_data_put(frame_size, {'single_run': single_run_data})
501 def extract_hdrh_percentiles(self, lat_percentile, data, add_key=False):
503 data['lat_percentile'] = {}
504 for percentile in self.config.lat_percentiles:
506 data['lat_percentile_' + str(percentile)] = lat_percentile[percentile]
508 data.append(lat_percentile[percentile])
510 def __get_config_table(self, run_config, frame_size):
511 config_table = Table(self.config_header)
512 for key, name in zip(self.direction_keys, self.direction_names):
513 if key not in run_config:
515 config_table.add_row([
517 run_config[key]['orig']['rate_bps'],
518 run_config[key]['tx']['rate_bps'],
519 run_config[key]['rx']['rate_bps'],
520 int(run_config[key]['orig']['rate_pps']),
521 int(run_config[key]['tx']['rate_pps']),
522 int(run_config[key]['rx']['rate_pps']),
524 self.__record_data_put(frame_size, {
525 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
526 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
527 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
528 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
529 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
530 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
535 def _get_chain_table(self, chain_stats):
536 """Retrieve the table for a direction.
539 'interfaces': ['Port0', 'drop %'', 'vhost0', 'Port1'],
541 '0': {'packets': [2000054, '-0.023%', 1999996, 1999996],
550 chains = chain_stats['chains']
551 _annotate_chain_stats(chains)
552 header = [('Chain', Formatter.standard)] + \
553 [(ifname, Formatter.standard) for ifname in chain_stats['interfaces']]
554 # add latency columns if available Avg, Min, Max and percentiles
556 lat_map = {'lat_avg_usec': 'Avg lat.',
557 'lat_min_usec': 'Min lat.',
558 'lat_max_usec': 'Max lat.'}
559 if 'lat_avg_usec' in chains['0']:
560 lat_keys = ['lat_avg_usec', 'lat_min_usec', 'lat_max_usec']
562 if not self.config.disable_hdrh:
563 lat_keys.append('lat_percentile')
564 for percentile in self.config.lat_percentiles:
565 lat_map['lat_' + str(percentile) + '_percentile'] = \
566 str(percentile) + ' %ile lat.'
569 header.append(lat_map[key], Formatter.standard)
571 table = Table(header)
572 for chain in sorted(list(chains.keys()), key=str):
573 row = [chain] + chains[chain]['packets']
574 for lat_key in lat_keys:
576 if lat_key != 'lat_percentile':
577 if chains[chain].get(lat_key, None):
578 row.append(Formatter.standard(chains[chain][lat_key]))
582 if not self.config.disable_hdrh:
583 if chains[chain].get(lat_key, None):
584 for percentile in chains[chain][lat_key]:
585 row.append(Formatter.standard(
586 chains[chain][lat_key][percentile]))
588 for percentile in self.config.lat_percentiles:
593 def __record_header_put(self, key, value):
595 self.record_header[key] = value
597 def __record_data_put(self, key, data):
599 if key not in self.record_data:
600 self.record_data[key] = {}
601 self.record_data[key].update(data)
603 def __record_send(self):
605 self.record_header["@timestamp"] = datetime.utcnow().replace(
606 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
607 for frame_size in self.record_data:
608 data = self.record_header
609 data['frame_size'] = frame_size
610 data.update(self.record_data[frame_size])
611 run_specific_data = {}
612 if 'single_run' in data:
613 run_specific_data['single_run'] = data['single_run']
614 del data['single_run']
616 run_specific_data['ndr'] = data['ndr']
617 run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
620 run_specific_data['pdr'] = data['pdr']
621 run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
623 for key in run_specific_data:
624 data_to_send = data.copy()
625 data_to_send.update(run_specific_data[key])
626 self.sender.record_send(data_to_send)
629 def __record_init(self):
630 # init is called after checking for sender
631 self.record_header = {
632 "runlogdate": self.sender.runlogdate,
633 "user_label": self.config['user_label']
635 self.record_data = {}