2 # Copyright 2016 Cisco Systems, Inc. All rights reserved.
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
17 from contextlib import contextmanager
18 from datetime import datetime
23 from tabulate import tabulate
25 from specs import ChainType
28 class Formatter(object):
29 """Collection of string formatter methods"""
37 return '{:,}'.format(data)
41 return lambda data: '%.{}f'.format(decimal) % (data)
45 if isinstance(data, int):
46 return Formatter.int(data)
47 elif isinstance(data, float):
48 return Formatter.float(4)(data)
49 return Formatter.fixed(data)
52 def suffix(suffix_str):
53 return lambda data: Formatter.standard(data) + suffix_str
57 # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
58 # will convert it into bit format.
59 bit = 8.0 * bitmath.Bit(float(data))
60 bit = bit.best_prefix(bitmath.SI)
61 byte_to_bit_classes = {
71 bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
73 return bps.format("{value:.4f} {unit}ps")
74 return bps.format("{value:.4f} bps")
80 elif math.isnan(data):
82 return Formatter.suffix('%')(Formatter.float(4)(data))
86 """ASCII readable table class"""
88 def __init__(self, header):
89 header_row, self.formatters = zip(*header)
90 self.data = [header_row]
91 self.columns = len(header_row)
93 def add_row(self, row):
94 assert self.columns == len(row)
96 for entry, formatter in zip(row, self.formatters):
97 formatted_row.append(formatter(entry))
98 self.data.append(formatted_row)
100 def get_string(self, indent=0):
101 spaces = ' ' * indent
102 table = tabulate(self.data,
107 return table.replace('\n', '\n' + spaces)
110 class Summarizer(object):
111 """Generic summarizer class"""
117 self.marker_stack = [False]
120 def __indent(self, marker):
121 self.indent_size += self.indent_per_level
122 self.marker_stack.append(marker)
124 def __unindent(self):
125 assert self.indent_size >= self.indent_per_level
126 self.indent_size -= self.indent_per_level
127 self.marker_stack.pop()
129 def __get_indent_string(self):
130 current_str = ' ' * self.indent_size
131 if self.marker_stack[-1]:
132 current_str = current_str[:-2] + '> '
135 def _put(self, *args):
136 self.str += self.__get_indent_string()
137 if args and isinstance(args[-1], dict):
138 self.str += ' '.join(map(str, args[:-1])) + '\n'
139 self._put_dict(args[-1])
141 self.str += ' '.join(map(str, args)) + '\n'
143 def _put_dict(self, data):
144 with self._create_block(False):
145 for key, value in data.iteritems():
146 if isinstance(value, dict):
148 self._put_dict(value)
150 self._put(key + ':', value)
152 def _put_table(self, table):
153 self.str += self.__get_indent_string()
154 self.str += table.get_string(self.indent_size) + '\n'
160 def _create_block(self, marker=True):
161 self.__indent(marker)
166 class NFVBenchSummarizer(Summarizer):
167 """Summarize nfvbench json result"""
170 ('-', Formatter.fixed),
171 ('L2 Frame Size', Formatter.standard),
172 ('Rate (fwd+rev)', Formatter.bits),
173 ('Rate (fwd+rev)', Formatter.suffix(' pps')),
174 ('Avg Drop Rate', Formatter.suffix('%')),
175 ('Avg Latency (usec)', Formatter.standard),
176 ('Min Latency (usec)', Formatter.standard),
177 ('Max Latency (usec)', Formatter.standard)
180 single_run_header = [
181 ('L2 Frame Size', Formatter.standard),
182 ('Drop Rate', Formatter.suffix('%')),
183 ('Avg Latency (usec)', Formatter.standard),
184 ('Min Latency (usec)', Formatter.standard),
185 ('Max Latency (usec)', Formatter.standard)
189 ('Direction', Formatter.standard),
190 ('Requested TX Rate (bps)', Formatter.bits),
191 ('Actual TX Rate (bps)', Formatter.bits),
192 ('RX Rate (bps)', Formatter.bits),
193 ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
194 ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
195 ('RX Rate (pps)', Formatter.suffix(' pps'))
198 chain_analysis_header = [
199 ('Interface', Formatter.standard),
200 ('Device', Formatter.standard),
201 ('Packets (fwd)', Formatter.standard),
202 ('Drops (fwd)', Formatter.standard),
203 ('Drop% (fwd)', Formatter.percentage),
204 ('Packets (rev)', Formatter.standard),
205 ('Drops (rev)', Formatter.standard),
206 ('Drop% (rev)', Formatter.percentage)
209 direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
210 direction_names = ['Forward', 'Reverse', 'Total']
212 def __init__(self, result, sender):
213 Summarizer.__init__(self)
215 self.config = self.result['config']
216 self.record_header = None
217 self.record_data = None
219 # if sender is available initialize record
224 def __summarize(self):
226 self._put('========== NFVBench Summary ==========')
227 self._put('Date:', self.result['date'])
228 self._put('NFVBench version', self.result['nfvbench_version'])
229 self._put('Openstack Neutron:', {
230 'vSwitch': self.result['openstack_spec']['vswitch'],
231 'Encapsulation': self.result['openstack_spec']['encaps']
233 self.__record_header_put('version', self.result['nfvbench_version'])
234 self.__record_header_put('vSwitch', self.result['openstack_spec']['vswitch'])
235 self.__record_header_put('Encapsulation', self.result['openstack_spec']['encaps'])
236 self._put('Benchmarks:')
237 with self._create_block():
238 self._put('Networks:')
239 with self._create_block():
240 network_benchmark = self.result['benchmarks']['network']
242 self._put('Components:')
243 with self._create_block():
245 with self._create_block(False):
246 self._put('Type:', self.config['tor']['type'])
247 self._put('Traffic Generator:')
248 with self._create_block(False):
249 self._put('Profile:', self.config['generator_config']['name'])
250 self._put('Tool:', self.config['generator_config']['tool'])
251 if network_benchmark['versions']:
252 self._put('Versions:')
253 with self._create_block():
254 for component, version in network_benchmark['versions'].iteritems():
255 self._put(component + ':', version)
257 if self.config['ndr_run'] or self.config['pdr_run']:
258 self._put('Measurement Parameters:')
259 with self._create_block(False):
260 if self.config['ndr_run']:
261 self._put('NDR:', self.config['measurement']['NDR'])
262 if self.config['pdr_run']:
263 self._put('PDR:', self.config['measurement']['PDR'])
264 self._put('Service chain:')
265 for result in network_benchmark['service_chain'].iteritems():
266 with self._create_block():
267 self.__chain_summarize(*result)
269 def __chain_summarize(self, chain_name, chain_benchmark):
270 self._put(chain_name + ':')
271 if chain_name == ChainType.PVVP:
272 self._put('Mode:', chain_benchmark.get('mode'))
273 chain_name += "-" + chain_benchmark.get('mode')
274 self.__record_header_put('service_chain', chain_name)
275 with self._create_block():
276 self._put('Traffic:')
277 with self._create_block(False):
278 self.__traffic_summarize(chain_benchmark['result'])
280 def __traffic_summarize(self, traffic_benchmark):
281 self._put('Profile:', traffic_benchmark['profile'])
282 self._put('Bidirectional:', traffic_benchmark['bidirectional'])
283 self._put('Flow count:', traffic_benchmark['flow_count'])
284 self._put('Service chains count:', traffic_benchmark['service_chain_count'])
285 self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
287 self.__record_header_put('profile', traffic_benchmark['profile'])
288 self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
289 self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
290 self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
291 self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
292 with self._create_block(False):
294 if not self.config['no_traffic']:
295 self._put('Run Summary:')
297 with self._create_block(False):
298 self._put_table(self.__get_summary_table(traffic_benchmark['result']))
301 self._put(traffic_benchmark['result']['warning'])
305 for entry in traffic_benchmark['result'].iteritems():
306 if 'warning' in entry:
308 self.__chain_analysis_summarize(*entry)
311 def __chain_analysis_summarize(self, frame_size, analysis):
313 self._put('L2 frame size:', frame_size)
314 if 'analysis_duration_sec' in analysis:
315 self._put('Chain analysis duration:',
316 Formatter.float(3)(analysis['analysis_duration_sec']), 'seconds')
317 self.__record_data_put(frame_size, {'chain_analysis_duration': Formatter.float(3)(
318 analysis['analysis_duration_sec'])})
319 if self.config['ndr_run']:
320 self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
322 self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
323 analysis['ndr']['time_taken_sec'])})
324 if self.config['pdr_run']:
325 self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
327 self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
328 analysis['pdr']['time_taken_sec'])})
331 if not self.config['no_traffic'] and self.config['single_run']:
332 self._put('Run Config:')
334 with self._create_block(False):
335 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
336 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
338 self._put(analysis['run_config']['warning'])
341 if 'packet_analysis' in analysis:
342 self._put('Chain Analysis:')
344 with self._create_block(False):
345 self._put_table(self.__get_chain_analysis_table(analysis['packet_analysis']))
348 def __get_summary_table(self, traffic_result):
349 if self.config['single_run']:
350 summary_table = Table(self.single_run_header)
352 summary_table = Table(self.ndr_pdr_header)
354 if self.config['ndr_run']:
355 for frame_size, analysis in traffic_result.iteritems():
356 if frame_size == 'warning':
358 summary_table.add_row([
361 analysis['ndr']['rate_bps'],
362 analysis['ndr']['rate_pps'],
363 analysis['ndr']['stats']['overall']['drop_percentage'],
364 analysis['ndr']['stats']['overall']['avg_delay_usec'],
365 analysis['ndr']['stats']['overall']['min_delay_usec'],
366 analysis['ndr']['stats']['overall']['max_delay_usec']
368 self.__record_data_put(frame_size, {'ndr': {
370 'rate_bps': analysis['ndr']['rate_bps'],
371 'rate_pps': analysis['ndr']['rate_pps'],
372 'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
373 'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
374 'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
375 'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
377 if self.config['pdr_run']:
378 for frame_size, analysis in traffic_result.iteritems():
379 if frame_size == 'warning':
381 summary_table.add_row([
384 analysis['pdr']['rate_bps'],
385 analysis['pdr']['rate_pps'],
386 analysis['pdr']['stats']['overall']['drop_percentage'],
387 analysis['pdr']['stats']['overall']['avg_delay_usec'],
388 analysis['pdr']['stats']['overall']['min_delay_usec'],
389 analysis['pdr']['stats']['overall']['max_delay_usec']
391 self.__record_data_put(frame_size, {'pdr': {
393 'rate_bps': analysis['pdr']['rate_bps'],
394 'rate_pps': analysis['pdr']['rate_pps'],
395 'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
396 'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
397 'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
398 'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
400 if self.config['single_run']:
401 for frame_size, analysis in traffic_result.iteritems():
402 summary_table.add_row([
404 analysis['stats']['overall']['drop_rate_percent'],
405 analysis['stats']['overall']['rx']['avg_delay_usec'],
406 analysis['stats']['overall']['rx']['min_delay_usec'],
407 analysis['stats']['overall']['rx']['max_delay_usec']
409 self.__record_data_put(frame_size, {'single_run': {
410 'type': 'single_run',
411 'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
412 'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
413 'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
414 'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
418 def __get_config_table(self, run_config, frame_size):
419 config_table = Table(self.config_header)
420 for key, name in zip(self.direction_keys, self.direction_names):
421 if key not in run_config:
423 config_table.add_row([
425 run_config[key]['orig']['rate_bps'],
426 run_config[key]['tx']['rate_bps'],
427 run_config[key]['rx']['rate_bps'],
428 int(run_config[key]['orig']['rate_pps']),
429 int(run_config[key]['tx']['rate_pps']),
430 int(run_config[key]['rx']['rate_pps']),
432 self.__record_data_put(frame_size, {
433 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
434 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
435 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
436 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
437 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
438 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
443 def __get_chain_analysis_table(self, packet_analysis):
444 chain_analysis_table = Table(self.chain_analysis_header)
445 forward_analysis = packet_analysis['direction-forward']
446 reverse_analysis = packet_analysis['direction-reverse']
447 reverse_analysis.reverse()
448 for fwd, rev in zip(forward_analysis, reverse_analysis):
449 chain_analysis_table.add_row([
453 fwd.get('packet_drop_count', None),
454 fwd.get('packet_drop_percentage', None),
456 rev.get('packet_drop_count', None),
457 rev.get('packet_drop_percentage', None),
459 return chain_analysis_table
461 def __record_header_put(self, key, value):
463 self.record_header[key] = value
465 def __record_data_put(self, key, data):
467 if key not in self.record_data:
468 self.record_data[key] = {}
469 self.record_data[key].update(data)
471 def __record_send(self):
473 self.record_header["@timestamp"] = datetime.utcnow().replace(
474 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
475 for frame_size in self.record_data:
476 data = self.record_header
477 data['frame_size'] = frame_size
478 data.update(self.record_data[frame_size])
479 run_specific_data = {}
480 if 'single_run' in data:
481 run_specific_data['single_run'] = data['single_run']
482 del data['single_run']
484 run_specific_data['ndr'] = data['ndr']
485 run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
488 run_specific_data['pdr'] = data['pdr']
489 run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
491 for key in run_specific_data:
492 data_to_send = data.copy()
493 data_to_send.update(run_specific_data[key])
494 self.sender.record_send(data_to_send)
497 def __record_init(self):
498 # init is called after checking for sender
499 self.record_header = {
500 "runlogdate": self.sender.runlogdate,
501 "user_label": self.config['user_label']
503 self.record_data = {}