2 # Copyright 2016 Cisco Systems, Inc. All rights reserved.
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
18 from contextlib import contextmanager
19 from datetime import datetime
22 from specs import ChainType
23 from tabulate import tabulate
26 class Formatter(object):
27 """Collection of string formatter methods"""
35 return '{:,}'.format(data)
39 return lambda data: '%.{}f'.format(decimal) % (data)
44 return Formatter.int(data)
45 elif type(data) == float:
46 return Formatter.float(4)(data)
48 return Formatter.fixed(data)
51 def suffix(suffix_str):
52 return lambda data: Formatter.standard(data) + suffix_str
56 # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
57 # will convert it into bit format.
58 bit = 8.0 * bitmath.Bit(float(data))
59 bit = bit.best_prefix(bitmath.SI)
60 byte_to_bit_classes = {
70 bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
72 return bps.format("{value:.4f} {unit}ps")
74 return bps.format("{value:.4f} bps")
80 elif math.isnan(data):
83 return Formatter.suffix('%')(Formatter.float(4)(data))
87 """ASCII readable table class"""
89 def __init__(self, header):
90 header_row, self.formatters = zip(*header)
91 self.data = [header_row]
92 self.columns = len(header_row)
94 def add_row(self, row):
95 assert (self.columns == len(row))
97 for entry, formatter in zip(row, self.formatters):
98 formatted_row.append(formatter(entry))
99 self.data.append(formatted_row)
101 def get_string(self, indent=0):
102 spaces = ' ' * indent
103 table = tabulate(self.data,
108 return table.replace('\n', '\n' + spaces)
111 class Summarizer(object):
112 """Generic summarizer class"""
118 self.marker_stack = [False]
121 def __indent(self, marker):
122 self.indent_size += self.indent_per_level
123 self.marker_stack.append(marker)
125 def __unindent(self):
126 assert (self.indent_size >= self.indent_per_level)
127 self.indent_size -= self.indent_per_level
128 self.marker_stack.pop()
130 def __get_indent_string(self):
131 current_str = ' ' * self.indent_size
132 if self.marker_stack[-1]:
133 current_str = current_str[:-2] + '> '
136 def _put(self, *args):
137 self.str += self.__get_indent_string()
138 if len(args) and type(args[-1]) == dict:
139 self.str += ' '.join(map(str, args[:-1])) + '\n'
140 self._put_dict(args[-1])
142 self.str += ' '.join(map(str, args)) + '\n'
144 def _put_dict(self, data):
145 with self._create_block(False):
146 for key, value in data.iteritems():
147 if type(value) == dict:
149 self._put_dict(value)
151 self._put(key + ':', value)
153 def _put_table(self, table):
154 self.str += self.__get_indent_string()
155 self.str += table.get_string(self.indent_size) + '\n'
161 def _create_block(self, marker=True):
162 self.__indent(marker)
167 class NFVBenchSummarizer(Summarizer):
168 """Summarize nfvbench json result"""
171 ('-', Formatter.fixed),
172 ('L2 Frame Size', Formatter.standard),
173 ('Rate (fwd+rev)', Formatter.bits),
174 ('Rate (fwd+rev)', Formatter.suffix(' pps')),
175 ('Avg Drop Rate', Formatter.suffix('%')),
176 ('Avg Latency (usec)', Formatter.standard),
177 ('Min Latency (usec)', Formatter.standard),
178 ('Max Latency (usec)', Formatter.standard)
181 single_run_header = [
182 ('L2 Frame Size', Formatter.standard),
183 ('Drop Rate', Formatter.suffix('%')),
184 ('Avg Latency (usec)', Formatter.standard),
185 ('Min Latency (usec)', Formatter.standard),
186 ('Max Latency (usec)', Formatter.standard)
190 ('Direction', Formatter.standard),
191 ('Requested TX Rate (bps)', Formatter.bits),
192 ('Actual TX Rate (bps)', Formatter.bits),
193 ('RX Rate (bps)', Formatter.bits),
194 ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
195 ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
196 ('RX Rate (pps)', Formatter.suffix(' pps'))
199 chain_analysis_header = [
200 ('Interface', Formatter.standard),
201 ('Device', Formatter.standard),
202 ('Packets (fwd)', Formatter.standard),
203 ('Drops (fwd)', Formatter.standard),
204 ('Drop% (fwd)', Formatter.percentage),
205 ('Packets (rev)', Formatter.standard),
206 ('Drops (rev)', Formatter.standard),
207 ('Drop% (rev)', Formatter.percentage)
210 direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
211 direction_names = ['Forward', 'Reverse', 'Total']
213 def __init__(self, result, sender):
214 Summarizer.__init__(self)
216 self.config = self.result['config']
217 self.record_header = None
218 self.record_data = None
220 # if sender is available initialize record
225 def __summarize(self):
227 self._put('========== NFVBench Summary ==========')
228 self._put('Date:', self.result['date'])
229 self._put('NFVBench version', self.result['nfvbench_version'])
230 self._put('Openstack Neutron:', {
231 'vSwitch': self.result['openstack_spec']['vswitch'],
232 'Encapsulation': self.result['openstack_spec']['encaps']
234 self.__record_header_put('version', self.result['nfvbench_version'])
235 self.__record_header_put('vSwitch', self.result['openstack_spec']['vswitch'])
236 self.__record_header_put('Encapsulation', self.result['openstack_spec']['encaps'])
237 self._put('Benchmarks:')
238 with self._create_block():
239 self._put('Networks:')
240 with self._create_block():
241 network_benchmark = self.result['benchmarks']['network']
243 self._put('Components:')
244 with self._create_block():
246 with self._create_block(False):
247 self._put('Type:', self.config['tor']['type'])
248 self._put('Traffic Generator:')
249 with self._create_block(False):
250 self._put('Profile:', self.config['generator_config']['name'])
251 self._put('Tool:', self.config['generator_config']['tool'])
252 if network_benchmark['versions']:
253 self._put('Versions:')
254 with self._create_block():
255 for component, version in network_benchmark['versions'].iteritems():
256 self._put(component + ':', version)
258 if self.config['ndr_run'] or self.config['pdr_run']:
259 self._put('Measurement Parameters:')
260 with self._create_block(False):
261 if self.config['ndr_run']:
262 self._put('NDR:', self.config['measurement']['NDR'])
263 if self.config['pdr_run']:
264 self._put('PDR:', self.config['measurement']['PDR'])
265 self._put('Service chain:')
266 for result in network_benchmark['service_chain'].iteritems():
267 with self._create_block():
268 self.__chain_summarize(*result)
270 def __chain_summarize(self, chain_name, chain_benchmark):
271 self._put(chain_name + ':')
272 if chain_name == ChainType.PVVP:
273 self._put('Mode:', chain_benchmark.get('mode'))
274 chain_name += "-" + chain_benchmark.get('mode')
275 self.__record_header_put('service_chain', chain_name)
276 with self._create_block():
277 self._put('Traffic:')
278 with self._create_block(False):
279 self.__traffic_summarize(chain_benchmark['result'])
281 def __traffic_summarize(self, traffic_benchmark):
282 self._put('Profile:', traffic_benchmark['profile'])
283 self._put('Bidirectional:', traffic_benchmark['bidirectional'])
284 self._put('Flow count:', traffic_benchmark['flow_count'])
285 self._put('Service chains count:', traffic_benchmark['service_chain_count'])
286 self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
288 self.__record_header_put('profile', traffic_benchmark['profile'])
289 self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
290 self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
291 self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
292 self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
293 with self._create_block(False):
295 if not self.config['no_traffic']:
296 self._put('Run Summary:')
298 with self._create_block(False):
299 self._put_table(self.__get_summary_table(traffic_benchmark['result']))
302 self._put(traffic_benchmark['result']['warning'])
306 for entry in traffic_benchmark['result'].iteritems():
307 if 'warning' in entry:
309 self.__chain_analysis_summarize(*entry)
312 def __chain_analysis_summarize(self, frame_size, analysis):
314 self._put('L2 frame size:', frame_size)
315 if 'analysis_duration_sec' in analysis:
316 self._put('Chain analysis duration:',
317 Formatter.float(3)(analysis['analysis_duration_sec']), 'seconds')
318 self.__record_data_put(frame_size, {'chain_analysis_duration': Formatter.float(3)(
319 analysis['analysis_duration_sec'])})
320 if self.config['ndr_run']:
321 self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
323 self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
324 analysis['ndr']['time_taken_sec'])})
325 if self.config['pdr_run']:
326 self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
328 self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
329 analysis['pdr']['time_taken_sec'])})
332 if not self.config['no_traffic'] and self.config['single_run']:
333 self._put('Run Config:')
335 with self._create_block(False):
336 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
337 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
339 self._put(analysis['run_config']['warning'])
342 if 'packet_analysis' in analysis:
343 self._put('Chain Analysis:')
345 with self._create_block(False):
346 self._put_table(self.__get_chain_analysis_table(analysis['packet_analysis']))
349 def __get_summary_table(self, traffic_result):
350 if self.config['single_run']:
351 summary_table = Table(self.single_run_header)
353 summary_table = Table(self.ndr_pdr_header)
355 if self.config['ndr_run']:
356 for frame_size, analysis in traffic_result.iteritems():
357 if frame_size == 'warning':
359 summary_table.add_row([
362 analysis['ndr']['rate_bps'],
363 analysis['ndr']['rate_pps'],
364 analysis['ndr']['stats']['overall']['drop_percentage'],
365 analysis['ndr']['stats']['overall']['avg_delay_usec'],
366 analysis['ndr']['stats']['overall']['min_delay_usec'],
367 analysis['ndr']['stats']['overall']['max_delay_usec']
369 self.__record_data_put(frame_size, {'ndr': {
371 'rate_bps': analysis['ndr']['rate_bps'],
372 'rate_pps': analysis['ndr']['rate_pps'],
373 'drop_percantage': analysis['ndr']['stats']['overall']['drop_percentage'],
374 'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
375 'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
376 'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
378 if self.config['pdr_run']:
379 for frame_size, analysis in traffic_result.iteritems():
380 if frame_size == 'warning':
382 summary_table.add_row([
385 analysis['pdr']['rate_bps'],
386 analysis['pdr']['rate_pps'],
387 analysis['pdr']['stats']['overall']['drop_percentage'],
388 analysis['pdr']['stats']['overall']['avg_delay_usec'],
389 analysis['pdr']['stats']['overall']['min_delay_usec'],
390 analysis['pdr']['stats']['overall']['max_delay_usec']
392 self.__record_data_put(frame_size, {'pdr': {
394 'rate_bps': analysis['pdr']['rate_bps'],
395 'rate_pps': analysis['pdr']['rate_pps'],
396 'drop_percantage': analysis['pdr']['stats']['overall']['drop_percentage'],
397 'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
398 'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
399 'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
401 if self.config['single_run']:
402 for frame_size, analysis in traffic_result.iteritems():
403 summary_table.add_row([
405 analysis['stats']['overall']['drop_rate_percent'],
406 analysis['stats']['overall']['rx']['avg_delay_usec'],
407 analysis['stats']['overall']['rx']['min_delay_usec'],
408 analysis['stats']['overall']['rx']['max_delay_usec']
410 self.__record_data_put(frame_size, {'single_run': {
411 'type': 'single_run',
412 'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
413 'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
414 'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
415 'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
419 def __get_config_table(self, run_config, frame_size):
420 config_table = Table(self.config_header)
421 for key, name in zip(self.direction_keys, self.direction_names):
422 if key not in run_config:
424 config_table.add_row([
426 run_config[key]['orig']['rate_bps'],
427 run_config[key]['tx']['rate_bps'],
428 run_config[key]['rx']['rate_bps'],
429 int(run_config[key]['orig']['rate_pps']),
430 int(run_config[key]['tx']['rate_pps']),
431 int(run_config[key]['rx']['rate_pps']),
433 self.__record_data_put(frame_size, {
434 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
435 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
436 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
437 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
438 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
439 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
444 def __get_chain_analysis_table(self, packet_analysis):
445 chain_analysis_table = Table(self.chain_analysis_header)
446 forward_analysis = packet_analysis['direction-forward']
447 reverse_analysis = packet_analysis['direction-reverse']
448 reverse_analysis.reverse()
449 for fwd, rev in zip(forward_analysis, reverse_analysis):
450 chain_analysis_table.add_row([
454 fwd.get('packet_drop_count', None),
455 fwd.get('packet_drop_percentage', None),
457 rev.get('packet_drop_count', None),
458 rev.get('packet_drop_percentage', None),
460 return chain_analysis_table
462 def __record_header_put(self, key, value):
464 self.record_header[key] = value
466 def __record_data_put(self, key, data):
468 if key not in self.record_data:
469 self.record_data[key] = {}
470 self.record_data[key].update(data)
472 def __record_send(self):
474 self.record_header["@timestamp"] = datetime.utcnow().replace(
475 tzinfo=pytz.utc).strftime(
476 "%Y-%m-%dT%H:%M:%S.%f%z")
477 for frame_size in self.record_data:
478 data = self.record_header
479 data['frame_size'] = frame_size
480 data.update(self.record_data[frame_size])
481 run_specific_data = {}
482 if 'single_run' in data:
483 run_specific_data['single_run'] = data['single_run']
484 del data['single_run']
486 run_specific_data['ndr'] = data['ndr']
487 run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
490 run_specific_data['pdr'] = data['pdr']
491 run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
493 for key in run_specific_data:
494 data_to_send = data.copy()
495 data_to_send.update(run_specific_data[key])
496 self.sender.record_send(data_to_send)
499 def __record_init(self):
500 # init is called after checking for sender
501 self.record_header = {
502 "runlogdate": self.sender.runlogdate,
503 "user_label": self.config['user_label']
505 self.record_data = {}