2 # Copyright 2016 Cisco Systems, Inc. All rights reserved.
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
17 from contextlib import contextmanager
18 from datetime import datetime
23 from tabulate import tabulate
25 def _annotate_chain_stats(chain_stats, nodrop_marker='=>'):
26 """Transform a plain chain stats into an annotated one.
30 0: {'packets': [2000054, 1999996, 1999996, 1999996],
39 0: {'packets': [2000054, -58 (-0.034%), '=>', 1999996],
47 In the case of shared net, some columns in packets array can have ''.
48 Some columns cab also be None which means the data is not available.
50 for stats in list(chain_stats.values()):
51 packets = stats['packets']
54 # keep the first counter
55 annotated_packets = [packets[0]]
56 # modify all remaining counters
57 prev_count = packets[0]
58 for index in range(1, count):
59 cur_count = packets[index]
61 # an empty string indicates an unknown counter for a shared interface
62 # do not annotate those
64 elif cur_count is None:
66 annotated_value = 'n/a'
68 drop = cur_count - prev_count
70 dr = (drop * 100.0) / prev_count if prev_count else 0
71 annotated_value = '{:+,} ({:+.4f}%)'.format(drop, dr)
74 # if last column we display the value
75 annotated_value = cur_count if index == count - 1 else nodrop_marker
76 prev_count = cur_count
77 annotated_packets.append(annotated_value)
79 stats['packets'] = annotated_packets
81 class Formatter(object):
82 """Collection of string formatter methods."""
90 return '{:,}'.format(data)
94 return lambda data: '%.{}f'.format(decimal) % (data)
98 if isinstance(data, int):
99 return Formatter.int(data)
100 if isinstance(data, float):
101 return Formatter.float(4)(data)
102 return Formatter.fixed(data)
105 def suffix(suffix_str):
106 return lambda data: Formatter.standard(data) + suffix_str
110 # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
111 # will convert it into bit format.
112 bit = 8.0 * bitmath.Bit(float(data))
113 bit = bit.best_prefix(bitmath.SI)
114 byte_to_bit_classes = {
124 bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
125 if bps.unit != 'Bit':
126 return bps.format("{value:.4f} {unit}ps")
127 return bps.format("{value:.4f} bps")
130 def percentage(data):
135 return Formatter.suffix('%')(Formatter.float(4)(data))
139 """ASCII readable table class."""
141 def __init__(self, header):
142 header_row, self.formatters = list(zip(*header))
143 self.data = [header_row]
144 self.columns = len(header_row)
146 def add_row(self, row):
147 assert self.columns == len(row)
149 for entry, formatter in zip(row, self.formatters):
150 formatted_row.append(formatter(entry))
151 self.data.append(formatted_row)
153 def get_string(self, indent=0):
154 spaces = ' ' * indent
155 table = tabulate(self.data,
160 return table.replace('\n', '\n' + spaces)
163 class Summarizer(object):
164 """Generic summarizer class."""
170 self.marker_stack = [False]
173 def __indent(self, marker):
174 self.indent_size += self.indent_per_level
175 self.marker_stack.append(marker)
177 def __unindent(self):
178 assert self.indent_size >= self.indent_per_level
179 self.indent_size -= self.indent_per_level
180 self.marker_stack.pop()
182 def __get_indent_string(self):
183 current_str = ' ' * self.indent_size
184 if self.marker_stack[-1]:
185 current_str = current_str[:-2] + '> '
188 def _put(self, *args):
189 self.str += self.__get_indent_string()
190 if args and isinstance(args[-1], dict):
191 self.str += ' '.join(map(str, args[:-1])) + '\n'
192 self._put_dict(args[-1])
194 self.str += ' '.join(map(str, args)) + '\n'
196 def _put_dict(self, data):
197 with self._create_block(False):
198 for key, value in list(data.items()):
199 if isinstance(value, dict):
201 self._put_dict(value)
203 self._put(key + ':', value)
205 def _put_table(self, table):
206 self.str += self.__get_indent_string()
207 self.str += table.get_string(self.indent_size) + '\n'
213 def _create_block(self, marker=True):
214 self.__indent(marker)
219 class NFVBenchSummarizer(Summarizer):
220 """Summarize nfvbench json result."""
223 ('-', Formatter.fixed),
224 ('L2 Frame Size', Formatter.standard),
225 ('Rate (fwd+rev)', Formatter.bits),
226 ('Rate (fwd+rev)', Formatter.suffix(' pps')),
227 ('Avg Drop Rate', Formatter.suffix('%')),
228 ('Avg Latency (usec)', Formatter.standard),
229 ('Min Latency (usec)', Formatter.standard),
230 ('Max Latency (usec)', Formatter.standard)
233 single_run_header = [
234 ('L2 Frame Size', Formatter.standard),
235 ('Drop Rate', Formatter.suffix('%')),
236 ('Avg Latency (usec)', Formatter.standard),
237 ('Min Latency (usec)', Formatter.standard),
238 ('Max Latency (usec)', Formatter.standard)
242 ('Direction', Formatter.standard),
243 ('Requested TX Rate (bps)', Formatter.bits),
244 ('Actual TX Rate (bps)', Formatter.bits),
245 ('RX Rate (bps)', Formatter.bits),
246 ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
247 ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
248 ('RX Rate (pps)', Formatter.suffix(' pps'))
251 direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
252 direction_names = ['Forward', 'Reverse', 'Total']
254 def __init__(self, result, sender):
255 """Create a summarizer instance."""
256 Summarizer.__init__(self)
258 self.config = self.result['config']
259 self.record_header = None
260 self.record_data = None
263 # add percentiles headers if hdrh enabled
264 if not self.config.disable_hdrh:
265 for percentile in self.config.lat_percentiles:
266 self.ndr_pdr_header.append((str(percentile) + ' %ile lat.', Formatter.standard))
267 self.single_run_header.append((str(percentile) + ' %ile lat.', Formatter.standard))
268 # if sender is available initialize record
273 def __get_openstack_spec(self, property):
275 return self.result['openstack_spec'][property]
279 def __summarize(self):
281 self._put('========== NFVBench Summary ==========')
282 self._put('Date:', self.result['date'])
283 self._put('NFVBench version', self.result['nfvbench_version'])
284 self._put('Openstack Neutron:', {
285 'vSwitch': self.__get_openstack_spec('vswitch'),
286 'Encapsulation': self.__get_openstack_spec('encaps')
288 self.__record_header_put('version', self.result['nfvbench_version'])
289 self.__record_header_put('vSwitch', self.__get_openstack_spec('vswitch'))
290 self.__record_header_put('Encapsulation', self.__get_openstack_spec('encaps'))
291 self._put('Benchmarks:')
292 with self._create_block():
293 self._put('Networks:')
294 with self._create_block():
295 network_benchmark = self.result['benchmarks']['network']
297 self._put('Components:')
298 with self._create_block():
299 self._put('Traffic Generator:')
300 with self._create_block(False):
301 self._put('Profile:', self.config['tg-name'])
302 self._put('Tool:', self.config['tg-tool'])
303 if network_benchmark['versions']:
304 self._put('Versions:')
305 with self._create_block():
306 for component, version in list(network_benchmark['versions'].items()):
307 self._put(component + ':', version)
309 if self.config['ndr_run'] or self.config['pdr_run']:
310 self._put('Measurement Parameters:')
311 with self._create_block(False):
312 if self.config['ndr_run']:
313 self._put('NDR:', self.config['measurement']['NDR'])
314 if self.config['pdr_run']:
315 self._put('PDR:', self.config['measurement']['PDR'])
316 self._put('Service chain:')
317 for result in list(network_benchmark['service_chain'].items()):
318 with self._create_block():
319 self.__chain_summarize(*result)
321 def __chain_summarize(self, chain_name, chain_benchmark):
322 self._put(chain_name + ':')
323 self.__record_header_put('service_chain', chain_name)
324 with self._create_block():
325 self._put('Traffic:')
326 with self._create_block(False):
327 self.__traffic_summarize(chain_benchmark['result'])
329 def __traffic_summarize(self, traffic_benchmark):
330 self._put('Profile:', traffic_benchmark['profile'])
331 self._put('Bidirectional:', traffic_benchmark['bidirectional'])
332 self._put('Flow count:', traffic_benchmark['flow_count'])
333 self._put('Service chains count:', traffic_benchmark['service_chain_count'])
334 self._put('Compute nodes:', list(traffic_benchmark['compute_nodes'].keys()))
336 self.__record_header_put('profile', traffic_benchmark['profile'])
337 self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
338 self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
339 self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
340 self.__record_header_put('compute_nodes', list(traffic_benchmark['compute_nodes'].keys()))
341 with self._create_block(False):
343 if not self.config['no_traffic']:
344 self._put('Run Summary:')
346 with self._create_block(False):
347 self._put_table(self.__get_summary_table(traffic_benchmark['result']))
350 self._put(traffic_benchmark['result']['warning'])
354 for entry in list(traffic_benchmark['result'].items()):
355 if 'warning' in entry:
357 self.__chain_analysis_summarize(*entry)
360 def __chain_analysis_summarize(self, frame_size, analysis):
362 self._put('L2 frame size:', frame_size)
363 if self.config['ndr_run']:
364 self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
366 self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
367 analysis['ndr']['time_taken_sec'])})
368 if self.config['pdr_run']:
369 self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
371 self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
372 analysis['pdr']['time_taken_sec'])})
375 if not self.config['no_traffic'] and self.config['single_run']:
376 self._put('Run Config:')
378 with self._create_block(False):
379 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
380 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
382 self._put(analysis['run_config']['warning'])
385 if 'packet_path_stats' in analysis:
386 for dir in ['Forward', 'Reverse']:
387 self._put(dir + ' Chain Packet Counters and Latency:')
389 with self._create_block(False):
390 self._put_table(self._get_chain_table(analysis['packet_path_stats'][dir]))
393 def __get_summary_table(self, traffic_result):
394 if self.config['single_run']:
395 summary_table = Table(self.single_run_header)
397 summary_table = Table(self.ndr_pdr_header)
399 if self.config['ndr_run']:
400 for frame_size, analysis in list(traffic_result.items()):
401 if frame_size == 'warning':
407 analysis['ndr']['rate_bps'],
408 analysis['ndr']['rate_pps'],
409 analysis['ndr']['stats']['overall']['drop_percentage'],
410 analysis['ndr']['stats']['overall']['avg_delay_usec'],
411 analysis['ndr']['stats']['overall']['min_delay_usec'],
412 analysis['ndr']['stats']['overall']['max_delay_usec']
414 if not self.config.disable_hdrh:
415 self.extract_hdrh_percentiles(
416 analysis['ndr']['stats']['overall']['lat_percentile'], row_data)
417 summary_table.add_row(row_data)
421 'rate_bps': analysis['ndr']['rate_bps'],
422 'rate_pps': analysis['ndr']['rate_pps'],
423 'offered_tx_rate_bps': analysis['ndr']['stats']['offered_tx_rate_bps'],
424 'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
425 'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
426 'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
427 'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
429 if not self.config.disable_hdrh:
430 self.extract_hdrh_percentiles(
431 analysis['ndr']['stats']['overall']['lat_percentile'], ndr_data, True)
432 self.__record_data_put(frame_size, {'ndr': ndr_data})
433 if self.config['pdr_run']:
434 for frame_size, analysis in list(traffic_result.items()):
435 if frame_size == 'warning':
441 analysis['pdr']['rate_bps'],
442 analysis['pdr']['rate_pps'],
443 analysis['pdr']['stats']['overall']['drop_percentage'],
444 analysis['pdr']['stats']['overall']['avg_delay_usec'],
445 analysis['pdr']['stats']['overall']['min_delay_usec'],
446 analysis['pdr']['stats']['overall']['max_delay_usec']
448 if not self.config.disable_hdrh:
449 self.extract_hdrh_percentiles(
450 analysis['pdr']['stats']['overall']['lat_percentile'], row_data)
451 summary_table.add_row(row_data)
455 'rate_bps': analysis['pdr']['rate_bps'],
456 'rate_pps': analysis['pdr']['rate_pps'],
457 'offered_tx_rate_bps': analysis['pdr']['stats']['offered_tx_rate_bps'],
458 'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
459 'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
460 'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
461 'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
463 if not self.config.disable_hdrh:
464 self.extract_hdrh_percentiles(
465 analysis['pdr']['stats']['overall']['lat_percentile'], pdr_data, True)
466 self.__record_data_put(frame_size, {'pdr': pdr_data})
467 if self.config['single_run']:
468 for frame_size, analysis in list(traffic_result.items()):
471 analysis['stats']['overall']['drop_rate_percent'],
472 analysis['stats']['overall']['rx']['avg_delay_usec'],
473 analysis['stats']['overall']['rx']['min_delay_usec'],
474 analysis['stats']['overall']['rx']['max_delay_usec']
476 if not self.config.disable_hdrh:
477 self.extract_hdrh_percentiles(
478 analysis['stats']['overall']['rx']['lat_percentile'], row_data)
479 summary_table.add_row(row_data)
481 'type': 'single_run',
482 'offered_tx_rate_bps': analysis['stats']['offered_tx_rate_bps'],
483 'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
484 'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
485 'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
486 'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
488 if not self.config.disable_hdrh:
489 self.extract_hdrh_percentiles(
490 analysis['stats']['overall']['rx']['lat_percentile'], single_run_data, True)
491 self.__record_data_put(frame_size, {'single_run': single_run_data})
494 def extract_hdrh_percentiles(self, lat_percentile, data, add_key=False):
496 data['lat_percentile'] = {}
497 for percentile in self.config.lat_percentiles:
499 data['lat_percentile_' + str(percentile)] = lat_percentile[percentile]
501 data.append(lat_percentile[percentile])
503 def __get_config_table(self, run_config, frame_size):
504 config_table = Table(self.config_header)
505 for key, name in zip(self.direction_keys, self.direction_names):
506 if key not in run_config:
508 config_table.add_row([
510 run_config[key]['orig']['rate_bps'],
511 run_config[key]['tx']['rate_bps'],
512 run_config[key]['rx']['rate_bps'],
513 int(run_config[key]['orig']['rate_pps']),
514 int(run_config[key]['tx']['rate_pps']),
515 int(run_config[key]['rx']['rate_pps']),
517 self.__record_data_put(frame_size, {
518 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
519 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
520 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
521 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
522 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
523 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
528 def _get_chain_table(self, chain_stats):
529 """Retrieve the table for a direction.
532 'interfaces': ['Port0', 'drop %'', 'vhost0', 'Port1'],
534 '0': {'packets': [2000054, '-0.023%', 1999996, 1999996],
543 chains = chain_stats['chains']
544 _annotate_chain_stats(chains)
545 header = [('Chain', Formatter.standard)] + \
546 [(ifname, Formatter.standard) for ifname in chain_stats['interfaces']]
547 # add latency columns if available Avg, Min, Max and percentiles
549 lat_map = {'lat_avg_usec': 'Avg lat.',
550 'lat_min_usec': 'Min lat.',
551 'lat_max_usec': 'Max lat.'}
552 if 'lat_avg_usec' in chains['0']:
553 lat_keys = ['lat_avg_usec', 'lat_min_usec', 'lat_max_usec', 'lat_percentile']
555 if not self.config.disable_hdrh:
556 for percentile in self.config.lat_percentiles:
557 lat_map['lat_' + str(percentile) + '_percentile'] = str(
558 percentile) + ' %ile lat.'
561 header.append((lat_map[key], Formatter.standard))
563 table = Table(header)
564 for chain in sorted(list(chains.keys()), key=str):
565 row = [chain] + chains[chain]['packets']
566 for lat_key in lat_keys:
567 if chains[chain].get(lat_key, None):
568 if lat_key == 'lat_percentile':
569 if not self.config.disable_hdrh:
570 for percentile in chains[chain][lat_key]:
571 row.append(Formatter.standard(chains[chain][lat_key][percentile]))
573 row.append(Formatter.standard(chains[chain][lat_key]))
579 def __record_header_put(self, key, value):
581 self.record_header[key] = value
583 def __record_data_put(self, key, data):
585 if key not in self.record_data:
586 self.record_data[key] = {}
587 self.record_data[key].update(data)
589 def __record_send(self):
591 self.record_header["@timestamp"] = datetime.utcnow().replace(
592 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
593 for frame_size in self.record_data:
594 data = self.record_header
595 data['frame_size'] = frame_size
596 data.update(self.record_data[frame_size])
597 run_specific_data = {}
598 if 'single_run' in data:
599 run_specific_data['single_run'] = data['single_run']
600 del data['single_run']
602 run_specific_data['ndr'] = data['ndr']
603 run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
606 run_specific_data['pdr'] = data['pdr']
607 run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
609 for key in run_specific_data:
610 data_to_send = data.copy()
611 data_to_send.update(run_specific_data[key])
612 self.sender.record_send(data_to_send)
615 def __record_init(self):
616 # init is called after checking for sender
617 self.record_header = {
618 "runlogdate": self.sender.runlogdate,
619 "user_label": self.config['user_label']
621 self.record_data = {}