NFVBENCH-163: Add gratuitous ARP in case of L3 router mode
[nfvbench.git] / nfvbench / summarizer.py
1 #!/usr/bin/env python
2 # Copyright 2016 Cisco Systems, Inc.  All rights reserved.
3 #
4 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
5 #    not use this file except in compliance with the License. You may obtain
6 #    a copy of the License at
7 #
8 #         http://www.apache.org/licenses/LICENSE-2.0
9 #
10 #    Unless required by applicable law or agreed to in writing, software
11 #    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 #    License for the specific language governing permissions and limitations
14 #    under the License.
15 #
16
17 from contextlib import contextmanager
18 from datetime import datetime
19 import math
20
21 import bitmath
22 import pytz
23 from tabulate import tabulate
24
25 def _annotate_chain_stats(chain_stats, nodrop_marker='=>'):
26     """Transform a plain chain stats into an annotated one.
27
28     Example:
29     {
30          0: {'packets': [2000054, 1999996, 1999996, 1999996],
31              'lat_min_usec': 10,
32              'lat_max_usec': 187,
33              'lat_avg_usec': 45},
34          1: {...},
35          'total': {...}
36     }
37     should become:
38     {
39          0: {'packets': [2000054, -58 (-0.034%), '=>', 1999996],
40              'lat_min_usec': 10,
41              'lat_max_usec': 187,
42              'lat_avg_usec': 45},
43          1: {...},
44          'total': {...}
45     }
46
47     In the case of shared net, some columns in packets array can have ''.
48     Some columns cab also be None which means the data is not available.
49     """
50     for stats in list(chain_stats.values()):
51         packets = stats['packets']
52         count = len(packets)
53         if count > 1:
54             # keep the first counter
55             annotated_packets = [packets[0]]
56             # modify all remaining counters
57             prev_count = packets[0]
58             for index in range(1, count):
59                 cur_count = packets[index]
60                 if cur_count == '':
61                     # an empty string indicates an unknown counter for a shared interface
62                     # do not annotate those
63                     annotated_value = ''
64                 elif cur_count is None:
65                     # Not available
66                     annotated_value = 'n/a'
67                 else:
68                     drop = cur_count - prev_count
69                     if drop:
70                         dr = (drop * 100.0) / prev_count if prev_count else 0
71                         annotated_value = '{:+,} ({:+.4f}%)'.format(drop, dr)
72                     else:
73                         # no drop
74                         # if last column we display the value
75                         annotated_value = cur_count if index == count - 1 else nodrop_marker
76                     prev_count = cur_count
77                 annotated_packets.append(annotated_value)
78
79             stats['packets'] = annotated_packets
80
81 class Formatter(object):
82     """Collection of string formatter methods."""
83
84     @staticmethod
85     def fixed(data):
86         return data
87
88     @staticmethod
89     def int(data):
90         return '{:,}'.format(data)
91
92     @staticmethod
93     def float(decimal):
94         return lambda data: '%.{}f'.format(decimal) % (data)
95
96     @staticmethod
97     def standard(data):
98         if isinstance(data, int):
99             return Formatter.int(data)
100         if isinstance(data, float):
101             return Formatter.float(4)(data)
102         return Formatter.fixed(data)
103
104     @staticmethod
105     def suffix(suffix_str):
106         return lambda data: Formatter.standard(data) + suffix_str
107
108     @staticmethod
109     def bits(data):
110         # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
111         # will convert it into bit format.
112         bit = 8.0 * bitmath.Bit(float(data))
113         bit = bit.best_prefix(bitmath.SI)
114         byte_to_bit_classes = {
115             'kB': bitmath.kb,
116             'MB': bitmath.Mb,
117             'GB': bitmath.Gb,
118             'TB': bitmath.Tb,
119             'PB': bitmath.Pb,
120             'EB': bitmath.Eb,
121             'ZB': bitmath.Zb,
122             'YB': bitmath.Yb,
123         }
124         bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
125         if bps.unit != 'Bit':
126             return bps.format("{value:.4f} {unit}ps")
127         return bps.format("{value:.4f} bps")
128
129     @staticmethod
130     def percentage(data):
131         if data is None:
132             return ''
133         if math.isnan(data):
134             return '-'
135         return Formatter.suffix('%')(Formatter.float(4)(data))
136
137
138 class Table(object):
139     """ASCII readable table class."""
140
141     def __init__(self, header):
142         header_row, self.formatters = list(zip(*header))
143         self.data = [header_row]
144         self.columns = len(header_row)
145
146     def add_row(self, row):
147         assert self.columns == len(row)
148         formatted_row = []
149         for entry, formatter in zip(row, self.formatters):
150             formatted_row.append(formatter(entry))
151         self.data.append(formatted_row)
152
153     def get_string(self, indent=0):
154         spaces = ' ' * indent
155         table = tabulate(self.data,
156                          headers='firstrow',
157                          tablefmt='grid',
158                          stralign='center',
159                          floatfmt='.2f')
160         return table.replace('\n', '\n' + spaces)
161
162
163 class Summarizer(object):
164     """Generic summarizer class."""
165
166     indent_per_level = 2
167
168     def __init__(self):
169         self.indent_size = 0
170         self.marker_stack = [False]
171         self.str = ''
172
173     def __indent(self, marker):
174         self.indent_size += self.indent_per_level
175         self.marker_stack.append(marker)
176
177     def __unindent(self):
178         assert self.indent_size >= self.indent_per_level
179         self.indent_size -= self.indent_per_level
180         self.marker_stack.pop()
181
182     def __get_indent_string(self):
183         current_str = ' ' * self.indent_size
184         if self.marker_stack[-1]:
185             current_str = current_str[:-2] + '> '
186         return current_str
187
188     def _put(self, *args):
189         self.str += self.__get_indent_string()
190         if args and isinstance(args[-1], dict):
191             self.str += ' '.join(map(str, args[:-1])) + '\n'
192             self._put_dict(args[-1])
193         else:
194             self.str += ' '.join(map(str, args)) + '\n'
195
196     def _put_dict(self, data):
197         with self._create_block(False):
198             for key, value in list(data.items()):
199                 if isinstance(value, dict):
200                     self._put(key + ':')
201                     self._put_dict(value)
202                 else:
203                     self._put(key + ':', value)
204
205     def _put_table(self, table):
206         self.str += self.__get_indent_string()
207         self.str += table.get_string(self.indent_size) + '\n'
208
209     def __str__(self):
210         return self.str
211
212     @contextmanager
213     def _create_block(self, marker=True):
214         self.__indent(marker)
215         yield
216         self.__unindent()
217
218
219 class NFVBenchSummarizer(Summarizer):
220     """Summarize nfvbench json result."""
221
222     direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
223     direction_names = ['Forward', 'Reverse', 'Total']
224
225     def __init__(self, result, sender):
226         """Create a summarizer instance."""
227         Summarizer.__init__(self)
228         self.result = result
229         self.config = self.result['config']
230         self.record_header = None
231         self.record_data = None
232         self.sender = sender
233
234         self.ndr_pdr_header = [
235             ('-', Formatter.fixed),
236             ('L2 Frame Size', Formatter.standard),
237             ('Rate (fwd+rev)', Formatter.bits),
238             ('Rate (fwd+rev)', Formatter.suffix(' pps')),
239             ('Avg Drop Rate', Formatter.suffix('%')),
240             ('Avg Latency (usec)', Formatter.standard),
241             ('Min Latency (usec)', Formatter.standard),
242             ('Max Latency (usec)', Formatter.standard)
243         ]
244
245         self.single_run_header = [
246             ('L2 Frame Size', Formatter.standard),
247             ('Drop Rate', Formatter.suffix('%')),
248             ('Avg Latency (usec)', Formatter.standard),
249             ('Min Latency (usec)', Formatter.standard),
250             ('Max Latency (usec)', Formatter.standard)
251         ]
252
253         self.config_header = [
254             ('Direction', Formatter.standard),
255             ('Requested TX Rate (bps)', Formatter.bits),
256             ('Actual TX Rate (bps)', Formatter.bits),
257             ('RX Rate (bps)', Formatter.bits),
258             ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
259             ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
260             ('RX Rate (pps)', Formatter.suffix(' pps'))
261         ]
262
263         # add percentiles headers if hdrh enabled
264         if not self.config.disable_hdrh:
265             for percentile in self.config.lat_percentiles:
266                 # 'append' expects a single parameter => double parentheses
267                 self.ndr_pdr_header.append((str(percentile) + ' %ile lat.', Formatter.standard))
268                 self.single_run_header.append((str(percentile) + ' %ile lat.', Formatter.standard))
269
270         if self.config.periodic_gratuitous_arp:
271             self.direction_keys.insert(2, 'garp-direction-total')
272             self.direction_names.insert(2, 'Gratuitous ARP')
273
274         # if sender is available initialize record
275         if self.sender:
276             self.__record_init()
277         self.__summarize()
278
279     def __get_openstack_spec(self, property):
280         try:
281             return self.result['openstack_spec'][property]
282         except KeyError:
283             return ''
284
285     def __summarize(self):
286         self._put()
287         self._put('========== NFVBench Summary ==========')
288         self._put('Date:', self.result['date'])
289         self._put('NFVBench version', self.result['nfvbench_version'])
290         self._put('Openstack Neutron:', {
291             'vSwitch': self.__get_openstack_spec('vswitch'),
292             'Encapsulation': self.__get_openstack_spec('encaps')
293         })
294         self.__record_header_put('version', self.result['nfvbench_version'])
295         self.__record_header_put('vSwitch', self.__get_openstack_spec('vswitch'))
296         self.__record_header_put('Encapsulation', self.__get_openstack_spec('encaps'))
297         self._put('Benchmarks:')
298         with self._create_block():
299             self._put('Networks:')
300             with self._create_block():
301                 network_benchmark = self.result['benchmarks']['network']
302
303                 self._put('Components:')
304                 with self._create_block():
305                     self._put('Traffic Generator:')
306                     with self._create_block(False):
307                         self._put('Profile:', self.config['tg-name'])
308                         self._put('Tool:', self.config['tg-tool'])
309                     if network_benchmark['versions']:
310                         self._put('Versions:')
311                         with self._create_block():
312                             for component, version in list(network_benchmark['versions'].items()):
313                                 self._put(component + ':', version)
314
315                 if self.config['ndr_run'] or self.config['pdr_run']:
316                     self._put('Measurement Parameters:')
317                     with self._create_block(False):
318                         if self.config['ndr_run']:
319                             self._put('NDR:', self.config['measurement']['NDR'])
320                         if self.config['pdr_run']:
321                             self._put('PDR:', self.config['measurement']['PDR'])
322                 self._put('Service chain:')
323                 for result in list(network_benchmark['service_chain'].items()):
324                     with self._create_block():
325                         self.__chain_summarize(*result)
326
327     def __chain_summarize(self, chain_name, chain_benchmark):
328         self._put(chain_name + ':')
329         self.__record_header_put('service_chain', chain_name)
330         with self._create_block():
331             self._put('Traffic:')
332             with self._create_block(False):
333                 self.__traffic_summarize(chain_benchmark['result'])
334
335     def __traffic_summarize(self, traffic_benchmark):
336         self._put('Profile:', traffic_benchmark['profile'])
337         self._put('Bidirectional:', traffic_benchmark['bidirectional'])
338         self._put('Flow count:', traffic_benchmark['flow_count'])
339         self._put('Service chains count:', traffic_benchmark['service_chain_count'])
340         self._put('Compute nodes:', list(traffic_benchmark['compute_nodes'].keys()))
341
342         self.__record_header_put('profile', traffic_benchmark['profile'])
343         self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
344         self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
345         self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
346         self.__record_header_put('compute_nodes', list(traffic_benchmark['compute_nodes'].keys()))
347         with self._create_block(False):
348             self._put()
349             if not self.config['no_traffic']:
350                 self._put('Run Summary:')
351                 self._put()
352                 with self._create_block(False):
353                     self._put_table(self.__get_summary_table(traffic_benchmark['result']))
354                     try:
355                         self._put()
356                         self._put(traffic_benchmark['result']['warning'])
357                     except KeyError:
358                         pass
359
360             for entry in list(traffic_benchmark['result'].items()):
361                 if 'warning' in entry:
362                     continue
363                 self.__chain_analysis_summarize(*entry)
364             self.__record_send()
365
366     def __chain_analysis_summarize(self, frame_size, analysis):
367         self._put()
368         self._put('L2 frame size:', frame_size)
369         if self.config['ndr_run']:
370             self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
371                       'seconds')
372             self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
373                 analysis['ndr']['time_taken_sec'])})
374         if self.config['pdr_run']:
375             self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
376                       'seconds')
377             self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
378                 analysis['pdr']['time_taken_sec'])})
379         self._put()
380
381         if not self.config['no_traffic'] and self.config['single_run']:
382             self._put('Run Config:')
383             self._put()
384             with self._create_block(False):
385                 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
386                 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
387                     self._put()
388                     self._put(analysis['run_config']['warning'])
389                 self._put()
390
391         if 'packet_path_stats' in analysis:
392             for dir in ['Forward', 'Reverse']:
393                 self._put(dir + ' Chain Packet Counters and Latency:')
394                 self._put()
395                 with self._create_block(False):
396                     self._put_table(self._get_chain_table(analysis['packet_path_stats'][dir]))
397                     self._put()
398
399     def __get_summary_table(self, traffic_result):
400         if self.config['single_run']:
401             summary_table = Table(self.single_run_header)
402         else:
403             summary_table = Table(self.ndr_pdr_header)
404
405         if self.config['ndr_run']:
406             for frame_size, analysis in list(traffic_result.items()):
407                 if frame_size == 'warning':
408                     continue
409
410                 row_data = [
411                     'NDR',
412                     frame_size,
413                     analysis['ndr']['rate_bps'],
414                     analysis['ndr']['rate_pps'],
415                     analysis['ndr']['stats']['overall']['drop_percentage'],
416                     analysis['ndr']['stats']['overall']['avg_delay_usec'],
417                     analysis['ndr']['stats']['overall']['min_delay_usec'],
418                     analysis['ndr']['stats']['overall']['max_delay_usec']
419                 ]
420                 if not self.config.disable_hdrh:
421                     self.extract_hdrh_percentiles(
422                         analysis['ndr']['stats']['overall']['lat_percentile'], row_data)
423                 summary_table.add_row(row_data)
424
425                 ndr_data = {
426                     'type': 'NDR',
427                     'rate_bps': analysis['ndr']['rate_bps'],
428                     'rate_pps': analysis['ndr']['rate_pps'],
429                     'offered_tx_rate_bps': analysis['ndr']['stats']['offered_tx_rate_bps'],
430                     'theoretical_tx_rate_pps': analysis['ndr']['stats']['theoretical_tx_rate_pps'],
431                     'theoretical_tx_rate_bps': analysis['ndr']['stats']['theoretical_tx_rate_bps'],
432                     'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
433                     'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
434                     'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
435                     'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
436                 }
437                 if not self.config.disable_hdrh:
438                     self.extract_hdrh_percentiles(
439                         analysis['ndr']['stats']['overall']['lat_percentile'], ndr_data, True)
440                 self.__record_data_put(frame_size, {'ndr': ndr_data})
441         if self.config['pdr_run']:
442             for frame_size, analysis in list(traffic_result.items()):
443                 if frame_size == 'warning':
444                     continue
445
446                 row_data = [
447                     'PDR',
448                     frame_size,
449                     analysis['pdr']['rate_bps'],
450                     analysis['pdr']['rate_pps'],
451                     analysis['pdr']['stats']['overall']['drop_percentage'],
452                     analysis['pdr']['stats']['overall']['avg_delay_usec'],
453                     analysis['pdr']['stats']['overall']['min_delay_usec'],
454                     analysis['pdr']['stats']['overall']['max_delay_usec']
455                 ]
456                 if not self.config.disable_hdrh:
457                     self.extract_hdrh_percentiles(
458                         analysis['pdr']['stats']['overall']['lat_percentile'], row_data)
459                 summary_table.add_row(row_data)
460
461                 pdr_data = {
462                     'type': 'PDR',
463                     'rate_bps': analysis['pdr']['rate_bps'],
464                     'rate_pps': analysis['pdr']['rate_pps'],
465                     'offered_tx_rate_bps': analysis['pdr']['stats']['offered_tx_rate_bps'],
466                     'theoretical_tx_rate_pps': analysis['pdr']['stats']['theoretical_tx_rate_pps'],
467                     'theoretical_tx_rate_bps': analysis['pdr']['stats']['theoretical_tx_rate_bps'],
468                     'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
469                     'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
470                     'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
471                     'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
472                 }
473                 if not self.config.disable_hdrh:
474                     self.extract_hdrh_percentiles(
475                         analysis['pdr']['stats']['overall']['lat_percentile'], pdr_data, True)
476                 self.__record_data_put(frame_size, {'pdr': pdr_data})
477         if self.config['single_run']:
478             for frame_size, analysis in list(traffic_result.items()):
479                 row_data = [
480                     frame_size,
481                     analysis['stats']['overall']['drop_rate_percent'],
482                     analysis['stats']['overall']['rx']['avg_delay_usec'],
483                     analysis['stats']['overall']['rx']['min_delay_usec'],
484                     analysis['stats']['overall']['rx']['max_delay_usec']
485                 ]
486                 if not self.config.disable_hdrh:
487                     self.extract_hdrh_percentiles(
488                         analysis['stats']['overall']['rx']['lat_percentile'], row_data)
489                 summary_table.add_row(row_data)
490
491                 single_run_data = {
492                     'type': 'single_run',
493                     'offered_tx_rate_bps': analysis['stats']['offered_tx_rate_bps'],
494                     'theoretical_tx_rate_pps': analysis['stats']['theoretical_tx_rate_pps'],
495                     'theoretical_tx_rate_bps': analysis['stats']['theoretical_tx_rate_bps'],
496                     'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
497                     'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
498                     'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
499                     'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
500                 }
501                 if not self.config.disable_hdrh:
502                     self.extract_hdrh_percentiles(
503                         analysis['stats']['overall']['rx']['lat_percentile'], single_run_data, True)
504                 self.__record_data_put(frame_size, {'single_run': single_run_data})
505         return summary_table
506
507     def extract_hdrh_percentiles(self, lat_percentile, data, add_key=False):
508         if add_key:
509             data['lat_percentile'] = {}
510         for percentile in self.config.lat_percentiles:
511             if add_key:
512                 try:
513                     data['lat_percentile_' + str(percentile)] = lat_percentile[percentile]
514                 except TypeError:
515                     data['lat_percentile_' + str(percentile)] = "n/a"
516             else:
517                 try:
518                     data.append(lat_percentile[percentile])
519                 except TypeError:
520                     data.append("n/a")
521
522     def __get_config_table(self, run_config, frame_size):
523         config_table = Table(self.config_header)
524         for key, name in zip(self.direction_keys, self.direction_names):
525             if key not in run_config:
526                 continue
527             config_table.add_row([
528                 name,
529                 run_config[key]['orig']['rate_bps'],
530                 run_config[key]['tx']['rate_bps'],
531                 run_config[key]['rx']['rate_bps'],
532                 int(run_config[key]['orig']['rate_pps']),
533                 int(run_config[key]['tx']['rate_pps']),
534                 int(run_config[key]['rx']['rate_pps']),
535             ])
536             self.__record_data_put(frame_size, {
537                 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
538                 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
539                 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
540                 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
541                 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
542                 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
543
544             })
545         return config_table
546
547     def _get_chain_table(self, chain_stats):
548         """Retrieve the table for a direction.
549
550         chain_stats: {
551              'interfaces': ['Port0', 'drop %'', 'vhost0', 'Port1'],
552              'chains': {
553                  '0': {'packets': [2000054, '-0.023%', 1999996, 1999996],
554                      'lat_min_usec': 10,
555                      'lat_max_usec': 187,
556                      'lat_avg_usec': 45},
557                  '1': {...},
558                  'total': {...}
559              }
560         }
561         """
562         chains = chain_stats['chains']
563         _annotate_chain_stats(chains)
564         header = [('Chain', Formatter.standard)] + \
565                  [(ifname, Formatter.standard) for ifname in chain_stats['interfaces']]
566         # add latency columns if available Avg, Min, Max and percentiles
567         lat_keys = []
568         lat_map = {'lat_avg_usec': 'Avg lat.',
569                    'lat_min_usec': 'Min lat.',
570                    'lat_max_usec': 'Max lat.'}
571         if 'lat_avg_usec' in chains['0']:
572             lat_keys = ['lat_avg_usec', 'lat_min_usec', 'lat_max_usec']
573
574             if not self.config.disable_hdrh:
575                 lat_keys.append('lat_percentile')
576                 for percentile in self.config.lat_percentiles:
577                     lat_map['lat_' + str(percentile) + '_percentile'] = \
578                         str(percentile) + ' %ile lat.'
579
580             for key in lat_map:
581                 # 'append' expects a single parameter => double parentheses
582                 header.append((lat_map[key], Formatter.standard))
583
584         table = Table(header)
585         for chain in sorted(list(chains.keys()), key=str):
586             row = [chain] + chains[chain]['packets']
587             for lat_key in lat_keys:
588
589                 if lat_key != 'lat_percentile':
590                     if chains[chain].get(lat_key, None):
591                         row.append(Formatter.standard(chains[chain][lat_key]))
592                     else:
593                         row.append('n/a')
594                 else:
595                     if not self.config.disable_hdrh:
596                         if chains[chain].get(lat_key, None):
597                             for percentile in chains[chain][lat_key]:
598                                 row.append(Formatter.standard(
599                                     chains[chain][lat_key][percentile]))
600                         else:
601                             for _ in self.config.lat_percentiles:
602                                 row.append('n/a')
603             table.add_row(row)
604         return table
605
606     def __record_header_put(self, key, value):
607         if self.sender:
608             self.record_header[key] = value
609
610     def __record_data_put(self, key, data):
611         if self.sender:
612             if key not in self.record_data:
613                 self.record_data[key] = {}
614             self.record_data[key].update(data)
615
616     def __record_send(self):
617         if self.sender:
618             self.record_header["@timestamp"] = datetime.utcnow().replace(
619                 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
620             for frame_size in self.record_data:
621                 data = self.record_header
622                 data['frame_size'] = frame_size
623                 data.update(self.record_data[frame_size])
624                 run_specific_data = {}
625                 if 'single_run' in data:
626                     run_specific_data['single_run'] = data['single_run']
627                     del data['single_run']
628                 if 'ndr' in data:
629                     run_specific_data['ndr'] = data['ndr']
630                     run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
631                     del data['ndr']
632                 if 'pdr' in data:
633                     run_specific_data['pdr'] = data['pdr']
634                     run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
635                     del data['pdr']
636                 for key in run_specific_data:
637                     data_to_send = data.copy()
638                     data_to_send.update(run_specific_data[key])
639                     self.sender.record_send(data_to_send)
640             self.__record_init()
641
642     def __record_init(self):
643         # init is called after checking for sender
644         self.record_header = {
645             "runlogdate": self.sender.runlogdate,
646             "user_label": self.config['user_label']
647         }
648         self.record_data = {}