2 # Copyright 2016 Cisco Systems, Inc. All rights reserved.
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
17 from contextlib import contextmanager
18 from datetime import datetime
23 from tabulate import tabulate
25 from specs import ChainType
28 class Formatter(object):
29 """Collection of string formatter methods"""
37 return '{:,}'.format(data)
41 return lambda data: '%.{}f'.format(decimal) % (data)
45 if isinstance(data, int):
46 return Formatter.int(data)
47 elif isinstance(data, float):
48 return Formatter.float(4)(data)
49 return Formatter.fixed(data)
52 def suffix(suffix_str):
53 return lambda data: Formatter.standard(data) + suffix_str
57 # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
58 # will convert it into bit format.
59 bit = 8.0 * bitmath.Bit(float(data))
60 bit = bit.best_prefix(bitmath.SI)
61 byte_to_bit_classes = {
71 bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
73 return bps.format("{value:.4f} {unit}ps")
74 return bps.format("{value:.4f} bps")
80 elif math.isnan(data):
82 return Formatter.suffix('%')(Formatter.float(4)(data))
86 """ASCII readable table class"""
88 def __init__(self, header):
89 header_row, self.formatters = zip(*header)
90 self.data = [header_row]
91 self.columns = len(header_row)
93 def add_row(self, row):
94 assert self.columns == len(row)
96 for entry, formatter in zip(row, self.formatters):
97 formatted_row.append(formatter(entry))
98 self.data.append(formatted_row)
100 def get_string(self, indent=0):
101 spaces = ' ' * indent
102 table = tabulate(self.data,
107 return table.replace('\n', '\n' + spaces)
110 class Summarizer(object):
111 """Generic summarizer class"""
117 self.marker_stack = [False]
120 def __indent(self, marker):
121 self.indent_size += self.indent_per_level
122 self.marker_stack.append(marker)
124 def __unindent(self):
125 assert self.indent_size >= self.indent_per_level
126 self.indent_size -= self.indent_per_level
127 self.marker_stack.pop()
129 def __get_indent_string(self):
130 current_str = ' ' * self.indent_size
131 if self.marker_stack[-1]:
132 current_str = current_str[:-2] + '> '
135 def _put(self, *args):
136 self.str += self.__get_indent_string()
137 if args and isinstance(args[-1], dict):
138 self.str += ' '.join(map(str, args[:-1])) + '\n'
139 self._put_dict(args[-1])
141 self.str += ' '.join(map(str, args)) + '\n'
143 def _put_dict(self, data):
144 with self._create_block(False):
145 for key, value in data.iteritems():
146 if isinstance(value, dict):
148 self._put_dict(value)
150 self._put(key + ':', value)
152 def _put_table(self, table):
153 self.str += self.__get_indent_string()
154 self.str += table.get_string(self.indent_size) + '\n'
160 def _create_block(self, marker=True):
161 self.__indent(marker)
166 class NFVBenchSummarizer(Summarizer):
167 """Summarize nfvbench json result"""
170 ('-', Formatter.fixed),
171 ('L2 Frame Size', Formatter.standard),
172 ('Rate (fwd+rev)', Formatter.bits),
173 ('Rate (fwd+rev)', Formatter.suffix(' pps')),
174 ('Avg Drop Rate', Formatter.suffix('%')),
175 ('Avg Latency (usec)', Formatter.standard),
176 ('Min Latency (usec)', Formatter.standard),
177 ('Max Latency (usec)', Formatter.standard)
180 single_run_header = [
181 ('L2 Frame Size', Formatter.standard),
182 ('Drop Rate', Formatter.suffix('%')),
183 ('Avg Latency (usec)', Formatter.standard),
184 ('Min Latency (usec)', Formatter.standard),
185 ('Max Latency (usec)', Formatter.standard)
189 ('Direction', Formatter.standard),
190 ('Requested TX Rate (bps)', Formatter.bits),
191 ('Actual TX Rate (bps)', Formatter.bits),
192 ('RX Rate (bps)', Formatter.bits),
193 ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
194 ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
195 ('RX Rate (pps)', Formatter.suffix(' pps'))
198 chain_analysis_header = [
199 ('Interface', Formatter.standard),
200 ('Device', Formatter.standard),
201 ('Packets (fwd)', Formatter.standard),
202 ('Drops (fwd)', Formatter.standard),
203 ('Drop% (fwd)', Formatter.percentage),
204 ('Packets (rev)', Formatter.standard),
205 ('Drops (rev)', Formatter.standard),
206 ('Drop% (rev)', Formatter.percentage)
209 direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
210 direction_names = ['Forward', 'Reverse', 'Total']
212 def __init__(self, result, sender):
213 Summarizer.__init__(self)
215 self.config = self.result['config']
216 self.record_header = None
217 self.record_data = None
219 # if sender is available initialize record
224 def __get_openstack_spec(self, property):
226 return self.result['openstack_spec'][property]
230 def __summarize(self):
232 self._put('========== NFVBench Summary ==========')
233 self._put('Date:', self.result['date'])
234 self._put('NFVBench version', self.result['nfvbench_version'])
235 self._put('Openstack Neutron:', {
236 'vSwitch': self.__get_openstack_spec('vswitch'),
237 'Encapsulation': self.__get_openstack_spec('encaps')
239 self.__record_header_put('version', self.result['nfvbench_version'])
240 self.__record_header_put('vSwitch', self.__get_openstack_spec('vswitch'))
241 self.__record_header_put('Encapsulation', self.__get_openstack_spec('encaps'))
242 self._put('Benchmarks:')
243 with self._create_block():
244 self._put('Networks:')
245 with self._create_block():
246 network_benchmark = self.result['benchmarks']['network']
248 self._put('Components:')
249 with self._create_block():
251 with self._create_block(False):
252 self._put('Type:', self.config['tor']['type'])
253 self._put('Traffic Generator:')
254 with self._create_block(False):
255 self._put('Profile:', self.config['generator_config']['name'])
256 self._put('Tool:', self.config['generator_config']['tool'])
257 if network_benchmark['versions']:
258 self._put('Versions:')
259 with self._create_block():
260 for component, version in network_benchmark['versions'].iteritems():
261 self._put(component + ':', version)
263 if self.config['ndr_run'] or self.config['pdr_run']:
264 self._put('Measurement Parameters:')
265 with self._create_block(False):
266 if self.config['ndr_run']:
267 self._put('NDR:', self.config['measurement']['NDR'])
268 if self.config['pdr_run']:
269 self._put('PDR:', self.config['measurement']['PDR'])
270 self._put('Service chain:')
271 for result in network_benchmark['service_chain'].iteritems():
272 with self._create_block():
273 self.__chain_summarize(*result)
275 def __chain_summarize(self, chain_name, chain_benchmark):
276 self._put(chain_name + ':')
277 if chain_name == ChainType.PVVP:
278 self._put('Mode:', chain_benchmark.get('mode'))
279 chain_name += "-" + chain_benchmark.get('mode')
280 self.__record_header_put('service_chain', chain_name)
281 with self._create_block():
282 self._put('Traffic:')
283 with self._create_block(False):
284 self.__traffic_summarize(chain_benchmark['result'])
286 def __traffic_summarize(self, traffic_benchmark):
287 self._put('Profile:', traffic_benchmark['profile'])
288 self._put('Bidirectional:', traffic_benchmark['bidirectional'])
289 self._put('Flow count:', traffic_benchmark['flow_count'])
290 self._put('Service chains count:', traffic_benchmark['service_chain_count'])
291 self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
293 self.__record_header_put('profile', traffic_benchmark['profile'])
294 self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
295 self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
296 self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
297 self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
298 with self._create_block(False):
300 if not self.config['no_traffic']:
301 self._put('Run Summary:')
303 with self._create_block(False):
304 self._put_table(self.__get_summary_table(traffic_benchmark['result']))
307 self._put(traffic_benchmark['result']['warning'])
311 for entry in traffic_benchmark['result'].iteritems():
312 if 'warning' in entry:
314 self.__chain_analysis_summarize(*entry)
317 def __chain_analysis_summarize(self, frame_size, analysis):
319 self._put('L2 frame size:', frame_size)
320 if 'actual_l2frame_size' in analysis:
321 self._put('Actual l2 frame size:', analysis['actual_l2frame_size'])
322 elif self.config['ndr_run'] and 'actual_l2frame_size' in analysis['ndr']:
323 self._put('Actual l2 frame size:', analysis['ndr']['actual_l2frame_size'])
324 elif self.config['pdr_run'] and 'actual_l2frame_size' in analysis['pdr']:
325 self._put('Actual l2 frame size:', analysis['pdr']['actual_l2frame_size'])
326 if 'analysis_duration_sec' in analysis:
327 self._put('Chain analysis duration:',
328 Formatter.float(3)(analysis['analysis_duration_sec']), 'seconds')
329 self.__record_data_put(frame_size, {'chain_analysis_duration': Formatter.float(3)(
330 analysis['analysis_duration_sec'])})
331 if self.config['ndr_run']:
332 self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
334 self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
335 analysis['ndr']['time_taken_sec'])})
336 if self.config['pdr_run']:
337 self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
339 self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
340 analysis['pdr']['time_taken_sec'])})
343 if not self.config['no_traffic'] and self.config['single_run']:
344 self._put('Run Config:')
346 with self._create_block(False):
347 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
348 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
350 self._put(analysis['run_config']['warning'])
353 if 'packet_analysis' in analysis:
354 self._put('Chain Analysis:')
356 with self._create_block(False):
357 self._put_table(self.__get_chain_analysis_table(analysis['packet_analysis']))
360 def __get_summary_table(self, traffic_result):
361 if self.config['single_run']:
362 summary_table = Table(self.single_run_header)
364 summary_table = Table(self.ndr_pdr_header)
366 if self.config['ndr_run']:
367 for frame_size, analysis in traffic_result.iteritems():
368 if frame_size == 'warning':
370 summary_table.add_row([
373 analysis['ndr']['rate_bps'],
374 analysis['ndr']['rate_pps'],
375 analysis['ndr']['stats']['overall']['drop_percentage'],
376 analysis['ndr']['stats']['overall']['avg_delay_usec'],
377 analysis['ndr']['stats']['overall']['min_delay_usec'],
378 analysis['ndr']['stats']['overall']['max_delay_usec']
380 self.__record_data_put(frame_size, {'ndr': {
382 'rate_bps': analysis['ndr']['rate_bps'],
383 'rate_pps': analysis['ndr']['rate_pps'],
384 'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
385 'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
386 'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
387 'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
389 if self.config['pdr_run']:
390 for frame_size, analysis in traffic_result.iteritems():
391 if frame_size == 'warning':
393 summary_table.add_row([
396 analysis['pdr']['rate_bps'],
397 analysis['pdr']['rate_pps'],
398 analysis['pdr']['stats']['overall']['drop_percentage'],
399 analysis['pdr']['stats']['overall']['avg_delay_usec'],
400 analysis['pdr']['stats']['overall']['min_delay_usec'],
401 analysis['pdr']['stats']['overall']['max_delay_usec']
403 self.__record_data_put(frame_size, {'pdr': {
405 'rate_bps': analysis['pdr']['rate_bps'],
406 'rate_pps': analysis['pdr']['rate_pps'],
407 'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
408 'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
409 'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
410 'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
412 if self.config['single_run']:
413 for frame_size, analysis in traffic_result.iteritems():
414 summary_table.add_row([
416 analysis['stats']['overall']['drop_rate_percent'],
417 analysis['stats']['overall']['rx']['avg_delay_usec'],
418 analysis['stats']['overall']['rx']['min_delay_usec'],
419 analysis['stats']['overall']['rx']['max_delay_usec']
421 self.__record_data_put(frame_size, {'single_run': {
422 'type': 'single_run',
423 'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
424 'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
425 'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
426 'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
430 def __get_config_table(self, run_config, frame_size):
431 config_table = Table(self.config_header)
432 for key, name in zip(self.direction_keys, self.direction_names):
433 if key not in run_config:
435 config_table.add_row([
437 run_config[key]['orig']['rate_bps'],
438 run_config[key]['tx']['rate_bps'],
439 run_config[key]['rx']['rate_bps'],
440 int(run_config[key]['orig']['rate_pps']),
441 int(run_config[key]['tx']['rate_pps']),
442 int(run_config[key]['rx']['rate_pps']),
444 self.__record_data_put(frame_size, {
445 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
446 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
447 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
448 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
449 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
450 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
455 def __get_chain_analysis_table(self, packet_analysis):
456 chain_analysis_table = Table(self.chain_analysis_header)
457 forward_analysis = packet_analysis['direction-forward']
458 reverse_analysis = packet_analysis['direction-reverse']
459 reverse_analysis.reverse()
460 for fwd, rev in zip(forward_analysis, reverse_analysis):
461 chain_analysis_table.add_row([
465 fwd.get('packet_drop_count', None),
466 fwd.get('packet_drop_percentage', None),
468 rev.get('packet_drop_count', None),
469 rev.get('packet_drop_percentage', None),
471 return chain_analysis_table
473 def __record_header_put(self, key, value):
475 self.record_header[key] = value
477 def __record_data_put(self, key, data):
479 if key not in self.record_data:
480 self.record_data[key] = {}
481 self.record_data[key].update(data)
483 def __record_send(self):
485 self.record_header["@timestamp"] = datetime.utcnow().replace(
486 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
487 for frame_size in self.record_data:
488 data = self.record_header
489 data['frame_size'] = frame_size
490 data.update(self.record_data[frame_size])
491 run_specific_data = {}
492 if 'single_run' in data:
493 run_specific_data['single_run'] = data['single_run']
494 del data['single_run']
496 run_specific_data['ndr'] = data['ndr']
497 run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
500 run_specific_data['pdr'] = data['pdr']
501 run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
503 for key in run_specific_data:
504 data_to_send = data.copy()
505 data_to_send.update(run_specific_data[key])
506 self.sender.record_send(data_to_send)
509 def __record_init(self):
510 # init is called after checking for sender
511 self.record_header = {
512 "runlogdate": self.sender.runlogdate,
513 "user_label": self.config['user_label']
515 self.record_data = {}