2 # Copyright 2016 Cisco Systems, Inc. All rights reserved.
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
17 from contextlib import contextmanager
18 from datetime import datetime
23 from tabulate import tabulate
25 from specs import ChainType
28 class Formatter(object):
29 """Collection of string formatter methods"""
37 return '{:,}'.format(data)
41 return lambda data: '%.{}f'.format(decimal) % (data)
45 if isinstance(data, int):
46 return Formatter.int(data)
47 elif isinstance(data, float):
48 return Formatter.float(4)(data)
49 return Formatter.fixed(data)
52 def suffix(suffix_str):
53 return lambda data: Formatter.standard(data) + suffix_str
57 # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
58 # will convert it into bit format.
59 bit = 8.0 * bitmath.Bit(float(data))
60 bit = bit.best_prefix(bitmath.SI)
61 byte_to_bit_classes = {
71 bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
73 return bps.format("{value:.4f} {unit}ps")
74 return bps.format("{value:.4f} bps")
80 elif math.isnan(data):
82 return Formatter.suffix('%')(Formatter.float(4)(data))
86 """ASCII readable table class"""
88 def __init__(self, header):
89 header_row, self.formatters = zip(*header)
90 self.data = [header_row]
91 self.columns = len(header_row)
93 def add_row(self, row):
94 assert self.columns == len(row)
96 for entry, formatter in zip(row, self.formatters):
97 formatted_row.append(formatter(entry))
98 self.data.append(formatted_row)
100 def get_string(self, indent=0):
101 spaces = ' ' * indent
102 table = tabulate(self.data,
107 return table.replace('\n', '\n' + spaces)
110 class Summarizer(object):
111 """Generic summarizer class"""
117 self.marker_stack = [False]
120 def __indent(self, marker):
121 self.indent_size += self.indent_per_level
122 self.marker_stack.append(marker)
124 def __unindent(self):
125 assert self.indent_size >= self.indent_per_level
126 self.indent_size -= self.indent_per_level
127 self.marker_stack.pop()
129 def __get_indent_string(self):
130 current_str = ' ' * self.indent_size
131 if self.marker_stack[-1]:
132 current_str = current_str[:-2] + '> '
135 def _put(self, *args):
136 self.str += self.__get_indent_string()
137 if args and isinstance(args[-1], dict):
138 self.str += ' '.join(map(str, args[:-1])) + '\n'
139 self._put_dict(args[-1])
141 self.str += ' '.join(map(str, args)) + '\n'
143 def _put_dict(self, data):
144 with self._create_block(False):
145 for key, value in data.iteritems():
146 if isinstance(value, dict):
148 self._put_dict(value)
150 self._put(key + ':', value)
152 def _put_table(self, table):
153 self.str += self.__get_indent_string()
154 self.str += table.get_string(self.indent_size) + '\n'
160 def _create_block(self, marker=True):
161 self.__indent(marker)
166 class NFVBenchSummarizer(Summarizer):
167 """Summarize nfvbench json result"""
170 ('-', Formatter.fixed),
171 ('L2 Frame Size', Formatter.standard),
172 ('Rate (fwd+rev)', Formatter.bits),
173 ('Rate (fwd+rev)', Formatter.suffix(' pps')),
174 ('Avg Drop Rate', Formatter.suffix('%')),
175 ('Avg Latency (usec)', Formatter.standard),
176 ('Min Latency (usec)', Formatter.standard),
177 ('Max Latency (usec)', Formatter.standard)
180 single_run_header = [
181 ('L2 Frame Size', Formatter.standard),
182 ('Drop Rate', Formatter.suffix('%')),
183 ('Avg Latency (usec)', Formatter.standard),
184 ('Min Latency (usec)', Formatter.standard),
185 ('Max Latency (usec)', Formatter.standard)
189 ('Direction', Formatter.standard),
190 ('Requested TX Rate (bps)', Formatter.bits),
191 ('Actual TX Rate (bps)', Formatter.bits),
192 ('RX Rate (bps)', Formatter.bits),
193 ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
194 ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
195 ('RX Rate (pps)', Formatter.suffix(' pps'))
198 chain_analysis_header = [
199 ('Interface', Formatter.standard),
200 ('Device', Formatter.standard),
201 ('Packets (fwd)', Formatter.standard),
202 ('Drops (fwd)', Formatter.standard),
203 ('Drop% (fwd)', Formatter.percentage),
204 ('Packets (rev)', Formatter.standard),
205 ('Drops (rev)', Formatter.standard),
206 ('Drop% (rev)', Formatter.percentage)
209 direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
210 direction_names = ['Forward', 'Reverse', 'Total']
212 def __init__(self, result, sender):
213 Summarizer.__init__(self)
215 self.config = self.result['config']
216 self.record_header = None
217 self.record_data = None
219 # if sender is available initialize record
224 def __get_openstack_spec(self, property):
226 return self.result['openstack_spec'][property]
230 def __summarize(self):
232 self._put('========== NFVBench Summary ==========')
233 self._put('Date:', self.result['date'])
234 self._put('NFVBench version', self.result['nfvbench_version'])
235 self._put('Openstack Neutron:', {
236 'vSwitch': self.__get_openstack_spec('vswitch'),
237 'Encapsulation': self.__get_openstack_spec('encaps')
239 self.__record_header_put('version', self.result['nfvbench_version'])
240 self.__record_header_put('vSwitch', self.__get_openstack_spec('vswitch'))
241 self.__record_header_put('Encapsulation', self.__get_openstack_spec('encaps'))
242 self._put('Benchmarks:')
243 with self._create_block():
244 self._put('Networks:')
245 with self._create_block():
246 network_benchmark = self.result['benchmarks']['network']
248 self._put('Components:')
249 with self._create_block():
251 with self._create_block(False):
252 self._put('Type:', self.config['tor']['type'])
253 self._put('Traffic Generator:')
254 with self._create_block(False):
255 self._put('Profile:', self.config['generator_config']['name'])
256 self._put('Tool:', self.config['generator_config']['tool'])
257 if network_benchmark['versions']:
258 self._put('Versions:')
259 with self._create_block():
260 for component, version in network_benchmark['versions'].iteritems():
261 self._put(component + ':', version)
263 if self.config['ndr_run'] or self.config['pdr_run']:
264 self._put('Measurement Parameters:')
265 with self._create_block(False):
266 if self.config['ndr_run']:
267 self._put('NDR:', self.config['measurement']['NDR'])
268 if self.config['pdr_run']:
269 self._put('PDR:', self.config['measurement']['PDR'])
270 self._put('Service chain:')
271 for result in network_benchmark['service_chain'].iteritems():
272 with self._create_block():
273 self.__chain_summarize(*result)
275 def __chain_summarize(self, chain_name, chain_benchmark):
276 self._put(chain_name + ':')
277 if chain_name == ChainType.PVVP:
278 self._put('Mode:', chain_benchmark.get('mode'))
279 chain_name += "-" + chain_benchmark.get('mode')
280 self.__record_header_put('service_chain', chain_name)
281 with self._create_block():
282 self._put('Traffic:')
283 with self._create_block(False):
284 self.__traffic_summarize(chain_benchmark['result'])
286 def __traffic_summarize(self, traffic_benchmark):
287 self._put('Profile:', traffic_benchmark['profile'])
288 self._put('Bidirectional:', traffic_benchmark['bidirectional'])
289 self._put('Flow count:', traffic_benchmark['flow_count'])
290 self._put('Service chains count:', traffic_benchmark['service_chain_count'])
291 self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
293 self.__record_header_put('profile', traffic_benchmark['profile'])
294 self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
295 self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
296 self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
297 self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
298 with self._create_block(False):
300 if not self.config['no_traffic']:
301 self._put('Run Summary:')
303 with self._create_block(False):
304 self._put_table(self.__get_summary_table(traffic_benchmark['result']))
307 self._put(traffic_benchmark['result']['warning'])
311 for entry in traffic_benchmark['result'].iteritems():
312 if 'warning' in entry:
314 self.__chain_analysis_summarize(*entry)
317 def __chain_analysis_summarize(self, frame_size, analysis):
319 self._put('L2 frame size:', frame_size)
320 if 'analysis_duration_sec' in analysis:
321 self._put('Chain analysis duration:',
322 Formatter.float(3)(analysis['analysis_duration_sec']), 'seconds')
323 self.__record_data_put(frame_size, {'chain_analysis_duration': Formatter.float(3)(
324 analysis['analysis_duration_sec'])})
325 if self.config['ndr_run']:
326 self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
328 self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
329 analysis['ndr']['time_taken_sec'])})
330 if self.config['pdr_run']:
331 self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
333 self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
334 analysis['pdr']['time_taken_sec'])})
337 if not self.config['no_traffic'] and self.config['single_run']:
338 self._put('Run Config:')
340 with self._create_block(False):
341 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
342 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
344 self._put(analysis['run_config']['warning'])
347 if 'packet_analysis' in analysis:
348 self._put('Chain Analysis:')
350 with self._create_block(False):
351 self._put_table(self.__get_chain_analysis_table(analysis['packet_analysis']))
354 def __get_summary_table(self, traffic_result):
355 if self.config['single_run']:
356 summary_table = Table(self.single_run_header)
358 summary_table = Table(self.ndr_pdr_header)
360 if self.config['ndr_run']:
361 for frame_size, analysis in traffic_result.iteritems():
362 if frame_size == 'warning':
364 summary_table.add_row([
367 analysis['ndr']['rate_bps'],
368 analysis['ndr']['rate_pps'],
369 analysis['ndr']['stats']['overall']['drop_percentage'],
370 analysis['ndr']['stats']['overall']['avg_delay_usec'],
371 analysis['ndr']['stats']['overall']['min_delay_usec'],
372 analysis['ndr']['stats']['overall']['max_delay_usec']
374 self.__record_data_put(frame_size, {'ndr': {
376 'rate_bps': analysis['ndr']['rate_bps'],
377 'rate_pps': analysis['ndr']['rate_pps'],
378 'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
379 'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
380 'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
381 'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
383 if self.config['pdr_run']:
384 for frame_size, analysis in traffic_result.iteritems():
385 if frame_size == 'warning':
387 summary_table.add_row([
390 analysis['pdr']['rate_bps'],
391 analysis['pdr']['rate_pps'],
392 analysis['pdr']['stats']['overall']['drop_percentage'],
393 analysis['pdr']['stats']['overall']['avg_delay_usec'],
394 analysis['pdr']['stats']['overall']['min_delay_usec'],
395 analysis['pdr']['stats']['overall']['max_delay_usec']
397 self.__record_data_put(frame_size, {'pdr': {
399 'rate_bps': analysis['pdr']['rate_bps'],
400 'rate_pps': analysis['pdr']['rate_pps'],
401 'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
402 'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
403 'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
404 'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
406 if self.config['single_run']:
407 for frame_size, analysis in traffic_result.iteritems():
408 summary_table.add_row([
410 analysis['stats']['overall']['drop_rate_percent'],
411 analysis['stats']['overall']['rx']['avg_delay_usec'],
412 analysis['stats']['overall']['rx']['min_delay_usec'],
413 analysis['stats']['overall']['rx']['max_delay_usec']
415 self.__record_data_put(frame_size, {'single_run': {
416 'type': 'single_run',
417 'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
418 'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
419 'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
420 'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
424 def __get_config_table(self, run_config, frame_size):
425 config_table = Table(self.config_header)
426 for key, name in zip(self.direction_keys, self.direction_names):
427 if key not in run_config:
429 config_table.add_row([
431 run_config[key]['orig']['rate_bps'],
432 run_config[key]['tx']['rate_bps'],
433 run_config[key]['rx']['rate_bps'],
434 int(run_config[key]['orig']['rate_pps']),
435 int(run_config[key]['tx']['rate_pps']),
436 int(run_config[key]['rx']['rate_pps']),
438 self.__record_data_put(frame_size, {
439 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
440 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
441 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
442 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
443 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
444 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
449 def __get_chain_analysis_table(self, packet_analysis):
450 chain_analysis_table = Table(self.chain_analysis_header)
451 forward_analysis = packet_analysis['direction-forward']
452 reverse_analysis = packet_analysis['direction-reverse']
453 reverse_analysis.reverse()
454 for fwd, rev in zip(forward_analysis, reverse_analysis):
455 chain_analysis_table.add_row([
459 fwd.get('packet_drop_count', None),
460 fwd.get('packet_drop_percentage', None),
462 rev.get('packet_drop_count', None),
463 rev.get('packet_drop_percentage', None),
465 return chain_analysis_table
467 def __record_header_put(self, key, value):
469 self.record_header[key] = value
471 def __record_data_put(self, key, data):
473 if key not in self.record_data:
474 self.record_data[key] = {}
475 self.record_data[key].update(data)
477 def __record_send(self):
479 self.record_header["@timestamp"] = datetime.utcnow().replace(
480 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
481 for frame_size in self.record_data:
482 data = self.record_header
483 data['frame_size'] = frame_size
484 data.update(self.record_data[frame_size])
485 run_specific_data = {}
486 if 'single_run' in data:
487 run_specific_data['single_run'] = data['single_run']
488 del data['single_run']
490 run_specific_data['ndr'] = data['ndr']
491 run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
494 run_specific_data['pdr'] = data['pdr']
495 run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
497 for key in run_specific_data:
498 data_to_send = data.copy()
499 data_to_send.update(run_specific_data[key])
500 self.sender.record_send(data_to_send)
503 def __record_init(self):
504 # init is called after checking for sender
505 self.record_header = {
506 "runlogdate": self.sender.runlogdate,
507 "user_label": self.config['user_label']
509 self.record_data = {}