2 # Copyright 2016 Cisco Systems, Inc. All rights reserved.
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
17 from contextlib import contextmanager
18 from datetime import datetime
23 from specs import ChainType
24 from tabulate import tabulate
27 class Formatter(object):
28 """Collection of string formatter methods"""
36 return '{:,}'.format(data)
40 return lambda data: '%.{}f'.format(decimal) % (data)
44 if isinstance(data, int):
45 return Formatter.int(data)
46 elif isinstance(data, float):
47 return Formatter.float(4)(data)
48 return Formatter.fixed(data)
51 def suffix(suffix_str):
52 return lambda data: Formatter.standard(data) + suffix_str
56 # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
57 # will convert it into bit format.
58 bit = 8.0 * bitmath.Bit(float(data))
59 bit = bit.best_prefix(bitmath.SI)
60 byte_to_bit_classes = {
70 bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
72 return bps.format("{value:.4f} {unit}ps")
73 return bps.format("{value:.4f} bps")
79 elif math.isnan(data):
81 return Formatter.suffix('%')(Formatter.float(4)(data))
85 """ASCII readable table class"""
87 def __init__(self, header):
88 header_row, self.formatters = zip(*header)
89 self.data = [header_row]
90 self.columns = len(header_row)
92 def add_row(self, row):
93 assert self.columns == len(row)
95 for entry, formatter in zip(row, self.formatters):
96 formatted_row.append(formatter(entry))
97 self.data.append(formatted_row)
99 def get_string(self, indent=0):
100 spaces = ' ' * indent
101 table = tabulate(self.data,
106 return table.replace('\n', '\n' + spaces)
109 class Summarizer(object):
110 """Generic summarizer class"""
116 self.marker_stack = [False]
119 def __indent(self, marker):
120 self.indent_size += self.indent_per_level
121 self.marker_stack.append(marker)
123 def __unindent(self):
124 assert self.indent_size >= self.indent_per_level
125 self.indent_size -= self.indent_per_level
126 self.marker_stack.pop()
128 def __get_indent_string(self):
129 current_str = ' ' * self.indent_size
130 if self.marker_stack[-1]:
131 current_str = current_str[:-2] + '> '
134 def _put(self, *args):
135 self.str += self.__get_indent_string()
136 if args and isinstance(args[-1], dict):
137 self.str += ' '.join(map(str, args[:-1])) + '\n'
138 self._put_dict(args[-1])
140 self.str += ' '.join(map(str, args)) + '\n'
142 def _put_dict(self, data):
143 with self._create_block(False):
144 for key, value in data.iteritems():
145 if isinstance(value, dict):
147 self._put_dict(value)
149 self._put(key + ':', value)
151 def _put_table(self, table):
152 self.str += self.__get_indent_string()
153 self.str += table.get_string(self.indent_size) + '\n'
159 def _create_block(self, marker=True):
160 self.__indent(marker)
165 class NFVBenchSummarizer(Summarizer):
166 """Summarize nfvbench json result"""
169 ('-', Formatter.fixed),
170 ('L2 Frame Size', Formatter.standard),
171 ('Rate (fwd+rev)', Formatter.bits),
172 ('Rate (fwd+rev)', Formatter.suffix(' pps')),
173 ('Avg Drop Rate', Formatter.suffix('%')),
174 ('Avg Latency (usec)', Formatter.standard),
175 ('Min Latency (usec)', Formatter.standard),
176 ('Max Latency (usec)', Formatter.standard)
179 single_run_header = [
180 ('L2 Frame Size', Formatter.standard),
181 ('Drop Rate', Formatter.suffix('%')),
182 ('Avg Latency (usec)', Formatter.standard),
183 ('Min Latency (usec)', Formatter.standard),
184 ('Max Latency (usec)', Formatter.standard)
188 ('Direction', Formatter.standard),
189 ('Requested TX Rate (bps)', Formatter.bits),
190 ('Actual TX Rate (bps)', Formatter.bits),
191 ('RX Rate (bps)', Formatter.bits),
192 ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
193 ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
194 ('RX Rate (pps)', Formatter.suffix(' pps'))
197 chain_analysis_header = [
198 ('Interface', Formatter.standard),
199 ('Device', Formatter.standard),
200 ('Packets (fwd)', Formatter.standard),
201 ('Drops (fwd)', Formatter.standard),
202 ('Drop% (fwd)', Formatter.percentage),
203 ('Packets (rev)', Formatter.standard),
204 ('Drops (rev)', Formatter.standard),
205 ('Drop% (rev)', Formatter.percentage)
208 direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
209 direction_names = ['Forward', 'Reverse', 'Total']
211 def __init__(self, result, sender):
212 Summarizer.__init__(self)
214 self.config = self.result['config']
215 self.record_header = None
216 self.record_data = None
218 # if sender is available initialize record
223 def __summarize(self):
225 self._put('========== NFVBench Summary ==========')
226 self._put('Date:', self.result['date'])
227 self._put('NFVBench version', self.result['nfvbench_version'])
228 self._put('Openstack Neutron:', {
229 'vSwitch': self.result['openstack_spec']['vswitch'],
230 'Encapsulation': self.result['openstack_spec']['encaps']
232 self.__record_header_put('version', self.result['nfvbench_version'])
233 self.__record_header_put('vSwitch', self.result['openstack_spec']['vswitch'])
234 self.__record_header_put('Encapsulation', self.result['openstack_spec']['encaps'])
235 self._put('Benchmarks:')
236 with self._create_block():
237 self._put('Networks:')
238 with self._create_block():
239 network_benchmark = self.result['benchmarks']['network']
241 self._put('Components:')
242 with self._create_block():
244 with self._create_block(False):
245 self._put('Type:', self.config['tor']['type'])
246 self._put('Traffic Generator:')
247 with self._create_block(False):
248 self._put('Profile:', self.config['generator_config']['name'])
249 self._put('Tool:', self.config['generator_config']['tool'])
250 if network_benchmark['versions']:
251 self._put('Versions:')
252 with self._create_block():
253 for component, version in network_benchmark['versions'].iteritems():
254 self._put(component + ':', version)
256 if self.config['ndr_run'] or self.config['pdr_run']:
257 self._put('Measurement Parameters:')
258 with self._create_block(False):
259 if self.config['ndr_run']:
260 self._put('NDR:', self.config['measurement']['NDR'])
261 if self.config['pdr_run']:
262 self._put('PDR:', self.config['measurement']['PDR'])
263 self._put('Service chain:')
264 for result in network_benchmark['service_chain'].iteritems():
265 with self._create_block():
266 self.__chain_summarize(*result)
268 def __chain_summarize(self, chain_name, chain_benchmark):
269 self._put(chain_name + ':')
270 if chain_name == ChainType.PVVP:
271 self._put('Mode:', chain_benchmark.get('mode'))
272 chain_name += "-" + chain_benchmark.get('mode')
273 self.__record_header_put('service_chain', chain_name)
274 with self._create_block():
275 self._put('Traffic:')
276 with self._create_block(False):
277 self.__traffic_summarize(chain_benchmark['result'])
279 def __traffic_summarize(self, traffic_benchmark):
280 self._put('Profile:', traffic_benchmark['profile'])
281 self._put('Bidirectional:', traffic_benchmark['bidirectional'])
282 self._put('Flow count:', traffic_benchmark['flow_count'])
283 self._put('Service chains count:', traffic_benchmark['service_chain_count'])
284 self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
286 self.__record_header_put('profile', traffic_benchmark['profile'])
287 self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
288 self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
289 self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
290 self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
291 with self._create_block(False):
293 if not self.config['no_traffic']:
294 self._put('Run Summary:')
296 with self._create_block(False):
297 self._put_table(self.__get_summary_table(traffic_benchmark['result']))
300 self._put(traffic_benchmark['result']['warning'])
304 for entry in traffic_benchmark['result'].iteritems():
305 if 'warning' in entry:
307 self.__chain_analysis_summarize(*entry)
310 def __chain_analysis_summarize(self, frame_size, analysis):
312 self._put('L2 frame size:', frame_size)
313 if 'analysis_duration_sec' in analysis:
314 self._put('Chain analysis duration:',
315 Formatter.float(3)(analysis['analysis_duration_sec']), 'seconds')
316 self.__record_data_put(frame_size, {'chain_analysis_duration': Formatter.float(3)(
317 analysis['analysis_duration_sec'])})
318 if self.config['ndr_run']:
319 self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
321 self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
322 analysis['ndr']['time_taken_sec'])})
323 if self.config['pdr_run']:
324 self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
326 self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
327 analysis['pdr']['time_taken_sec'])})
330 if not self.config['no_traffic'] and self.config['single_run']:
331 self._put('Run Config:')
333 with self._create_block(False):
334 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
335 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
337 self._put(analysis['run_config']['warning'])
340 if 'packet_analysis' in analysis:
341 self._put('Chain Analysis:')
343 with self._create_block(False):
344 self._put_table(self.__get_chain_analysis_table(analysis['packet_analysis']))
347 def __get_summary_table(self, traffic_result):
348 if self.config['single_run']:
349 summary_table = Table(self.single_run_header)
351 summary_table = Table(self.ndr_pdr_header)
353 if self.config['ndr_run']:
354 for frame_size, analysis in traffic_result.iteritems():
355 if frame_size == 'warning':
357 summary_table.add_row([
360 analysis['ndr']['rate_bps'],
361 analysis['ndr']['rate_pps'],
362 analysis['ndr']['stats']['overall']['drop_percentage'],
363 analysis['ndr']['stats']['overall']['avg_delay_usec'],
364 analysis['ndr']['stats']['overall']['min_delay_usec'],
365 analysis['ndr']['stats']['overall']['max_delay_usec']
367 self.__record_data_put(frame_size, {'ndr': {
369 'rate_bps': analysis['ndr']['rate_bps'],
370 'rate_pps': analysis['ndr']['rate_pps'],
371 'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
372 'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
373 'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
374 'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
376 if self.config['pdr_run']:
377 for frame_size, analysis in traffic_result.iteritems():
378 if frame_size == 'warning':
380 summary_table.add_row([
383 analysis['pdr']['rate_bps'],
384 analysis['pdr']['rate_pps'],
385 analysis['pdr']['stats']['overall']['drop_percentage'],
386 analysis['pdr']['stats']['overall']['avg_delay_usec'],
387 analysis['pdr']['stats']['overall']['min_delay_usec'],
388 analysis['pdr']['stats']['overall']['max_delay_usec']
390 self.__record_data_put(frame_size, {'pdr': {
392 'rate_bps': analysis['pdr']['rate_bps'],
393 'rate_pps': analysis['pdr']['rate_pps'],
394 'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
395 'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
396 'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
397 'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
399 if self.config['single_run']:
400 for frame_size, analysis in traffic_result.iteritems():
401 summary_table.add_row([
403 analysis['stats']['overall']['drop_rate_percent'],
404 analysis['stats']['overall']['rx']['avg_delay_usec'],
405 analysis['stats']['overall']['rx']['min_delay_usec'],
406 analysis['stats']['overall']['rx']['max_delay_usec']
408 self.__record_data_put(frame_size, {'single_run': {
409 'type': 'single_run',
410 'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
411 'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
412 'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
413 'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
417 def __get_config_table(self, run_config, frame_size):
418 config_table = Table(self.config_header)
419 for key, name in zip(self.direction_keys, self.direction_names):
420 if key not in run_config:
422 config_table.add_row([
424 run_config[key]['orig']['rate_bps'],
425 run_config[key]['tx']['rate_bps'],
426 run_config[key]['rx']['rate_bps'],
427 int(run_config[key]['orig']['rate_pps']),
428 int(run_config[key]['tx']['rate_pps']),
429 int(run_config[key]['rx']['rate_pps']),
431 self.__record_data_put(frame_size, {
432 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
433 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
434 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
435 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
436 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
437 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
442 def __get_chain_analysis_table(self, packet_analysis):
443 chain_analysis_table = Table(self.chain_analysis_header)
444 forward_analysis = packet_analysis['direction-forward']
445 reverse_analysis = packet_analysis['direction-reverse']
446 reverse_analysis.reverse()
447 for fwd, rev in zip(forward_analysis, reverse_analysis):
448 chain_analysis_table.add_row([
452 fwd.get('packet_drop_count', None),
453 fwd.get('packet_drop_percentage', None),
455 rev.get('packet_drop_count', None),
456 rev.get('packet_drop_percentage', None),
458 return chain_analysis_table
460 def __record_header_put(self, key, value):
462 self.record_header[key] = value
464 def __record_data_put(self, key, data):
466 if key not in self.record_data:
467 self.record_data[key] = {}
468 self.record_data[key].update(data)
470 def __record_send(self):
472 self.record_header["@timestamp"] = datetime.utcnow().replace(
473 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
474 for frame_size in self.record_data:
475 data = self.record_header
476 data['frame_size'] = frame_size
477 data.update(self.record_data[frame_size])
478 run_specific_data = {}
479 if 'single_run' in data:
480 run_specific_data['single_run'] = data['single_run']
481 del data['single_run']
483 run_specific_data['ndr'] = data['ndr']
484 run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
487 run_specific_data['pdr'] = data['pdr']
488 run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
490 for key in run_specific_data:
491 data_to_send = data.copy()
492 data_to_send.update(run_specific_data[key])
493 self.sender.record_send(data_to_send)
496 def __record_init(self):
497 # init is called after checking for sender
498 self.record_header = {
499 "runlogdate": self.sender.runlogdate,
500 "user_label": self.config['user_label']
502 self.record_data = {}