NFVBENCH-193: Correct a critical bug introduced in commit NFVBENCH-192
[nfvbench.git] / nfvbench / summarizer.py
1 #!/usr/bin/env python
2 # Copyright 2016 Cisco Systems, Inc.  All rights reserved.
3 #
4 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
5 #    not use this file except in compliance with the License. You may obtain
6 #    a copy of the License at
7 #
8 #         http://www.apache.org/licenses/LICENSE-2.0
9 #
10 #    Unless required by applicable law or agreed to in writing, software
11 #    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 #    License for the specific language governing permissions and limitations
14 #    under the License.
15 #
16
17 from contextlib import contextmanager
18 from datetime import datetime
19 import math
20
21 import bitmath
22 import pytz
23 from tabulate import tabulate
24
25 def _annotate_chain_stats(chain_stats, nodrop_marker='=>'):
26     """Transform a plain chain stats into an annotated one.
27
28     Example:
29     {
30          0: {'packets': [2000054, 1999996, 1999996, 1999996],
31              'lat_min_usec': 10,
32              'lat_max_usec': 187,
33              'lat_avg_usec': 45},
34          1: {...},
35          'total': {...}
36     }
37     should become:
38     {
39          0: {'packets': [2000054, -58 (-0.034%), '=>', 1999996],
40              'lat_min_usec': 10,
41              'lat_max_usec': 187,
42              'lat_avg_usec': 45},
43          1: {...},
44          'total': {...}
45     }
46
47     In the case of shared net, some columns in packets array can have ''.
48     Some columns cab also be None which means the data is not available.
49     """
50     for stats in list(chain_stats.values()):
51         packets = stats['packets']
52         count = len(packets)
53         if count > 1:
54             # keep the first counter
55             annotated_packets = [packets[0]]
56             # modify all remaining counters
57             prev_count = packets[0]
58             for index in range(1, count):
59                 cur_count = packets[index]
60                 if cur_count == '':
61                     # an empty string indicates an unknown counter for a shared interface
62                     # do not annotate those
63                     annotated_value = ''
64                 elif cur_count is None:
65                     # Not available
66                     annotated_value = 'n/a'
67                 else:
68                     drop = cur_count - prev_count
69                     if drop:
70                         dr = (drop * 100.0) / prev_count if prev_count else 0
71                         annotated_value = '{:+,} ({:+.4f}%)'.format(drop, dr)
72                     else:
73                         # no drop
74                         # if last column we display the value
75                         annotated_value = cur_count if index == count - 1 else nodrop_marker
76                     prev_count = cur_count
77                 annotated_packets.append(annotated_value)
78
79             stats['packets'] = annotated_packets
80
81 class Formatter(object):
82     """Collection of string formatter methods."""
83
84     @staticmethod
85     def fixed(data):
86         return data
87
88     @staticmethod
89     def int(data):
90         return '{:,}'.format(data)
91
92     @staticmethod
93     def float(decimal):
94         return lambda data: '%.{}f'.format(decimal) % (data)
95
96     @staticmethod
97     def standard(data):
98         if isinstance(data, int):
99             return Formatter.int(data)
100         if isinstance(data, float):
101             return Formatter.float(4)(data)
102         return Formatter.fixed(data)
103
104     @staticmethod
105     def suffix(suffix_str):
106         return lambda data: Formatter.standard(data) + suffix_str
107
108     @staticmethod
109     def bits(data):
110         # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
111         # will convert it into bit format.
112         bit = 8.0 * bitmath.Bit(float(data))
113         bit = bit.best_prefix(bitmath.SI)
114         byte_to_bit_classes = {
115             'kB': bitmath.kb,
116             'MB': bitmath.Mb,
117             'GB': bitmath.Gb,
118             'TB': bitmath.Tb,
119             'PB': bitmath.Pb,
120             'EB': bitmath.Eb,
121             'ZB': bitmath.Zb,
122             'YB': bitmath.Yb,
123         }
124         bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
125         if bps.unit != 'Bit':
126             return bps.format("{value:.4f} {unit}ps")
127         return bps.format("{value:.4f} bps")
128
129     @staticmethod
130     def percentage(data):
131         if data is None:
132             return ''
133         if math.isnan(data):
134             return '-'
135         return Formatter.suffix('%')(Formatter.float(4)(data))
136
137
138 class Table(object):
139     """ASCII readable table class."""
140
141     def __init__(self, header):
142         header_row, self.formatters = list(zip(*header))
143         self.data = [header_row]
144         self.columns = len(header_row)
145
146     def add_row(self, row):
147         assert self.columns == len(row)
148         formatted_row = []
149         for entry, formatter in zip(row, self.formatters):
150             formatted_row.append(formatter(entry))
151         self.data.append(formatted_row)
152
153     def get_string(self, indent=0):
154         spaces = ' ' * indent
155         table = tabulate(self.data,
156                          headers='firstrow',
157                          tablefmt='grid',
158                          stralign='center',
159                          floatfmt='.2f')
160         return table.replace('\n', '\n' + spaces)
161
162
163 class Summarizer(object):
164     """Generic summarizer class."""
165
166     indent_per_level = 2
167
168     def __init__(self):
169         self.indent_size = 0
170         self.marker_stack = [False]
171         self.str = ''
172
173     def __indent(self, marker):
174         self.indent_size += self.indent_per_level
175         self.marker_stack.append(marker)
176
177     def __unindent(self):
178         assert self.indent_size >= self.indent_per_level
179         self.indent_size -= self.indent_per_level
180         self.marker_stack.pop()
181
182     def __get_indent_string(self):
183         current_str = ' ' * self.indent_size
184         if self.marker_stack[-1]:
185             current_str = current_str[:-2] + '> '
186         return current_str
187
188     def _put(self, *args):
189         self.str += self.__get_indent_string()
190         if args and isinstance(args[-1], dict):
191             self.str += ' '.join(map(str, args[:-1])) + '\n'
192             self._put_dict(args[-1])
193         else:
194             self.str += ' '.join(map(str, args)) + '\n'
195
196     def _put_dict(self, data):
197         with self._create_block(False):
198             for key, value in list(data.items()):
199                 if isinstance(value, dict):
200                     self._put(key + ':')
201                     self._put_dict(value)
202                 else:
203                     self._put(key + ':', value)
204
205     def _put_table(self, table):
206         self.str += self.__get_indent_string()
207         self.str += table.get_string(self.indent_size) + '\n'
208
209     def __str__(self):
210         return self.str
211
212     @contextmanager
213     def _create_block(self, marker=True):
214         self.__indent(marker)
215         yield
216         self.__unindent()
217
218
219 class NFVBenchSummarizer(Summarizer):
220     """Summarize nfvbench json result."""
221
222     ndr_pdr_header = [
223         ('-', Formatter.fixed),
224         ('L2 Frame Size', Formatter.standard),
225         ('Rate (fwd+rev)', Formatter.bits),
226         ('Rate (fwd+rev)', Formatter.suffix(' pps')),
227         ('Avg Drop Rate', Formatter.suffix('%')),
228         ('Avg Latency (usec)', Formatter.standard),
229         ('Min Latency (usec)', Formatter.standard),
230         ('Max Latency (usec)', Formatter.standard)
231     ]
232
233     single_run_header = [
234         ('L2 Frame Size', Formatter.standard),
235         ('Drop Rate', Formatter.suffix('%')),
236         ('Avg Latency (usec)', Formatter.standard),
237         ('Min Latency (usec)', Formatter.standard),
238         ('Max Latency (usec)', Formatter.standard)
239     ]
240
241     config_header = [
242         ('Direction', Formatter.standard),
243         ('Requested TX Rate (bps)', Formatter.bits),
244         ('Actual TX Rate (bps)', Formatter.bits),
245         ('RX Rate (bps)', Formatter.bits),
246         ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
247         ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
248         ('RX Rate (pps)', Formatter.suffix(' pps'))
249     ]
250
251     direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
252     direction_names = ['Forward', 'Reverse', 'Total']
253
254     def __init__(self, result, sender):
255         """Create a summarizer instance."""
256         Summarizer.__init__(self)
257         self.result = result
258         self.config = self.result['config']
259         self.record_header = None
260         self.record_data = None
261         self.sender = sender
262
263         # add percentiles headers if hdrh enabled
264         if not self.config.disable_hdrh:
265             for percentile in self.config.lat_percentiles:
266                 # 'append' expects a single parameter => double parentheses
267                 self.ndr_pdr_header.append((str(percentile) + ' %ile lat.', Formatter.standard))
268                 self.single_run_header.append((str(percentile) + ' %ile lat.', Formatter.standard))
269         # if sender is available initialize record
270         if self.sender:
271             self.__record_init()
272         self.__summarize()
273
274     def __get_openstack_spec(self, property):
275         try:
276             return self.result['openstack_spec'][property]
277         except KeyError:
278             return ''
279
280     def __summarize(self):
281         self._put()
282         self._put('========== NFVBench Summary ==========')
283         self._put('Date:', self.result['date'])
284         self._put('NFVBench version', self.result['nfvbench_version'])
285         self._put('Openstack Neutron:', {
286             'vSwitch': self.__get_openstack_spec('vswitch'),
287             'Encapsulation': self.__get_openstack_spec('encaps')
288         })
289         self.__record_header_put('version', self.result['nfvbench_version'])
290         self.__record_header_put('vSwitch', self.__get_openstack_spec('vswitch'))
291         self.__record_header_put('Encapsulation', self.__get_openstack_spec('encaps'))
292         self._put('Benchmarks:')
293         with self._create_block():
294             self._put('Networks:')
295             with self._create_block():
296                 network_benchmark = self.result['benchmarks']['network']
297
298                 self._put('Components:')
299                 with self._create_block():
300                     self._put('Traffic Generator:')
301                     with self._create_block(False):
302                         self._put('Profile:', self.config['tg-name'])
303                         self._put('Tool:', self.config['tg-tool'])
304                     if network_benchmark['versions']:
305                         self._put('Versions:')
306                         with self._create_block():
307                             for component, version in list(network_benchmark['versions'].items()):
308                                 self._put(component + ':', version)
309
310                 if self.config['ndr_run'] or self.config['pdr_run']:
311                     self._put('Measurement Parameters:')
312                     with self._create_block(False):
313                         if self.config['ndr_run']:
314                             self._put('NDR:', self.config['measurement']['NDR'])
315                         if self.config['pdr_run']:
316                             self._put('PDR:', self.config['measurement']['PDR'])
317                 self._put('Service chain:')
318                 for result in list(network_benchmark['service_chain'].items()):
319                     with self._create_block():
320                         self.__chain_summarize(*result)
321
322     def __chain_summarize(self, chain_name, chain_benchmark):
323         self._put(chain_name + ':')
324         self.__record_header_put('service_chain', chain_name)
325         with self._create_block():
326             self._put('Traffic:')
327             with self._create_block(False):
328                 self.__traffic_summarize(chain_benchmark['result'])
329
330     def __traffic_summarize(self, traffic_benchmark):
331         self._put('Profile:', traffic_benchmark['profile'])
332         self._put('Bidirectional:', traffic_benchmark['bidirectional'])
333         self._put('Flow count:', traffic_benchmark['flow_count'])
334         self._put('Service chains count:', traffic_benchmark['service_chain_count'])
335         self._put('Compute nodes:', list(traffic_benchmark['compute_nodes'].keys()))
336
337         self.__record_header_put('profile', traffic_benchmark['profile'])
338         self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
339         self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
340         self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
341         self.__record_header_put('compute_nodes', list(traffic_benchmark['compute_nodes'].keys()))
342         with self._create_block(False):
343             self._put()
344             if not self.config['no_traffic']:
345                 self._put('Run Summary:')
346                 self._put()
347                 with self._create_block(False):
348                     self._put_table(self.__get_summary_table(traffic_benchmark['result']))
349                     try:
350                         self._put()
351                         self._put(traffic_benchmark['result']['warning'])
352                     except KeyError:
353                         pass
354
355             for entry in list(traffic_benchmark['result'].items()):
356                 if 'warning' in entry:
357                     continue
358                 self.__chain_analysis_summarize(*entry)
359             self.__record_send()
360
361     def __chain_analysis_summarize(self, frame_size, analysis):
362         self._put()
363         self._put('L2 frame size:', frame_size)
364         if self.config['ndr_run']:
365             self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
366                       'seconds')
367             self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
368                 analysis['ndr']['time_taken_sec'])})
369         if self.config['pdr_run']:
370             self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
371                       'seconds')
372             self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
373                 analysis['pdr']['time_taken_sec'])})
374         self._put()
375
376         if not self.config['no_traffic'] and self.config['single_run']:
377             self._put('Run Config:')
378             self._put()
379             with self._create_block(False):
380                 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
381                 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
382                     self._put()
383                     self._put(analysis['run_config']['warning'])
384                 self._put()
385
386         if 'packet_path_stats' in analysis:
387             for dir in ['Forward', 'Reverse']:
388                 self._put(dir + ' Chain Packet Counters and Latency:')
389                 self._put()
390                 with self._create_block(False):
391                     self._put_table(self._get_chain_table(analysis['packet_path_stats'][dir]))
392                     self._put()
393
394     def __get_summary_table(self, traffic_result):
395         if self.config['single_run']:
396             summary_table = Table(self.single_run_header)
397         else:
398             summary_table = Table(self.ndr_pdr_header)
399
400         if self.config['ndr_run']:
401             for frame_size, analysis in list(traffic_result.items()):
402                 if frame_size == 'warning':
403                     continue
404
405                 row_data = [
406                     'NDR',
407                     frame_size,
408                     analysis['ndr']['rate_bps'],
409                     analysis['ndr']['rate_pps'],
410                     analysis['ndr']['stats']['overall']['drop_percentage'],
411                     analysis['ndr']['stats']['overall']['avg_delay_usec'],
412                     analysis['ndr']['stats']['overall']['min_delay_usec'],
413                     analysis['ndr']['stats']['overall']['max_delay_usec']
414                 ]
415                 if not self.config.disable_hdrh:
416                     self.extract_hdrh_percentiles(
417                         analysis['ndr']['stats']['overall']['lat_percentile'], row_data)
418                 summary_table.add_row(row_data)
419
420                 ndr_data = {
421                     'type': 'NDR',
422                     'rate_bps': analysis['ndr']['rate_bps'],
423                     'rate_pps': analysis['ndr']['rate_pps'],
424                     'offered_tx_rate_bps': analysis['ndr']['stats']['offered_tx_rate_bps'],
425                     'theoretical_tx_rate_pps': analysis['ndr']['stats']['theoretical_tx_rate_pps'],
426                     'theoretical_tx_rate_bps': analysis['ndr']['stats']['theoretical_tx_rate_bps'],
427                     'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
428                     'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
429                     'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
430                     'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
431                 }
432                 if not self.config.disable_hdrh:
433                     self.extract_hdrh_percentiles(
434                         analysis['ndr']['stats']['overall']['lat_percentile'], ndr_data, True)
435                 self.__record_data_put(frame_size, {'ndr': ndr_data})
436         if self.config['pdr_run']:
437             for frame_size, analysis in list(traffic_result.items()):
438                 if frame_size == 'warning':
439                     continue
440
441                 row_data = [
442                     'PDR',
443                     frame_size,
444                     analysis['pdr']['rate_bps'],
445                     analysis['pdr']['rate_pps'],
446                     analysis['pdr']['stats']['overall']['drop_percentage'],
447                     analysis['pdr']['stats']['overall']['avg_delay_usec'],
448                     analysis['pdr']['stats']['overall']['min_delay_usec'],
449                     analysis['pdr']['stats']['overall']['max_delay_usec']
450                 ]
451                 if not self.config.disable_hdrh:
452                     self.extract_hdrh_percentiles(
453                         analysis['pdr']['stats']['overall']['lat_percentile'], row_data)
454                 summary_table.add_row(row_data)
455
456                 pdr_data = {
457                     'type': 'PDR',
458                     'rate_bps': analysis['pdr']['rate_bps'],
459                     'rate_pps': analysis['pdr']['rate_pps'],
460                     'offered_tx_rate_bps': analysis['pdr']['stats']['offered_tx_rate_bps'],
461                     'theoretical_tx_rate_pps': analysis['pdr']['stats']['theoretical_tx_rate_pps'],
462                     'theoretical_tx_rate_bps': analysis['pdr']['stats']['theoretical_tx_rate_bps'],
463                     'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
464                     'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
465                     'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
466                     'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
467                 }
468                 if not self.config.disable_hdrh:
469                     self.extract_hdrh_percentiles(
470                         analysis['pdr']['stats']['overall']['lat_percentile'], pdr_data, True)
471                 self.__record_data_put(frame_size, {'pdr': pdr_data})
472         if self.config['single_run']:
473             for frame_size, analysis in list(traffic_result.items()):
474                 row_data = [
475                     frame_size,
476                     analysis['stats']['overall']['drop_rate_percent'],
477                     analysis['stats']['overall']['rx']['avg_delay_usec'],
478                     analysis['stats']['overall']['rx']['min_delay_usec'],
479                     analysis['stats']['overall']['rx']['max_delay_usec']
480                 ]
481                 if not self.config.disable_hdrh:
482                     self.extract_hdrh_percentiles(
483                         analysis['stats']['overall']['rx']['lat_percentile'], row_data)
484                 summary_table.add_row(row_data)
485
486                 single_run_data = {
487                     'type': 'single_run',
488                     'offered_tx_rate_bps': analysis['stats']['offered_tx_rate_bps'],
489                     'theoretical_tx_rate_pps': analysis['stats']['theoretical_tx_rate_pps'],
490                     'theoretical_tx_rate_bps': analysis['stats']['theoretical_tx_rate_bps'],
491                     'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
492                     'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
493                     'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
494                     'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
495                 }
496                 if not self.config.disable_hdrh:
497                     self.extract_hdrh_percentiles(
498                         analysis['stats']['overall']['rx']['lat_percentile'], single_run_data, True)
499                 self.__record_data_put(frame_size, {'single_run': single_run_data})
500         return summary_table
501
502     def extract_hdrh_percentiles(self, lat_percentile, data, add_key=False):
503         if add_key:
504             data['lat_percentile'] = {}
505         for percentile in self.config.lat_percentiles:
506             if add_key:
507                 data['lat_percentile_' + str(percentile)] = lat_percentile[percentile]
508             else:
509                 data.append(lat_percentile[percentile])
510
511     def __get_config_table(self, run_config, frame_size):
512         config_table = Table(self.config_header)
513         for key, name in zip(self.direction_keys, self.direction_names):
514             if key not in run_config:
515                 continue
516             config_table.add_row([
517                 name,
518                 run_config[key]['orig']['rate_bps'],
519                 run_config[key]['tx']['rate_bps'],
520                 run_config[key]['rx']['rate_bps'],
521                 int(run_config[key]['orig']['rate_pps']),
522                 int(run_config[key]['tx']['rate_pps']),
523                 int(run_config[key]['rx']['rate_pps']),
524             ])
525             self.__record_data_put(frame_size, {
526                 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
527                 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
528                 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
529                 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
530                 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
531                 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
532
533             })
534         return config_table
535
536     def _get_chain_table(self, chain_stats):
537         """Retrieve the table for a direction.
538
539         chain_stats: {
540              'interfaces': ['Port0', 'drop %'', 'vhost0', 'Port1'],
541              'chains': {
542                  '0': {'packets': [2000054, '-0.023%', 1999996, 1999996],
543                      'lat_min_usec': 10,
544                      'lat_max_usec': 187,
545                      'lat_avg_usec': 45},
546                  '1': {...},
547                  'total': {...}
548              }
549         }
550         """
551         chains = chain_stats['chains']
552         _annotate_chain_stats(chains)
553         header = [('Chain', Formatter.standard)] + \
554                  [(ifname, Formatter.standard) for ifname in chain_stats['interfaces']]
555         # add latency columns if available Avg, Min, Max and percentiles
556         lat_keys = []
557         lat_map = {'lat_avg_usec': 'Avg lat.',
558                    'lat_min_usec': 'Min lat.',
559                    'lat_max_usec': 'Max lat.'}
560         if 'lat_avg_usec' in chains['0']:
561             lat_keys = ['lat_avg_usec', 'lat_min_usec', 'lat_max_usec']
562
563             if not self.config.disable_hdrh:
564                 lat_keys.append('lat_percentile')
565                 for percentile in self.config.lat_percentiles:
566                     lat_map['lat_' + str(percentile) + '_percentile'] = \
567                         str(percentile) + ' %ile lat.'
568
569             for key in lat_map:
570                 # 'append' expects a single parameter => double parentheses
571                 header.append((lat_map[key], Formatter.standard))
572
573         table = Table(header)
574         for chain in sorted(list(chains.keys()), key=str):
575             row = [chain] + chains[chain]['packets']
576             for lat_key in lat_keys:
577
578                 if lat_key != 'lat_percentile':
579                     if chains[chain].get(lat_key, None):
580                         row.append(Formatter.standard(chains[chain][lat_key]))
581                     else:
582                         row.append('n/a')
583                 else:
584                     if not self.config.disable_hdrh:
585                         if chains[chain].get(lat_key, None):
586                             for percentile in chains[chain][lat_key]:
587                                 row.append(Formatter.standard(
588                                     chains[chain][lat_key][percentile]))
589                         else:
590                             for percentile in self.config.lat_percentiles:
591                                 row.append('n/a')
592             table.add_row(row)
593         return table
594
595     def __record_header_put(self, key, value):
596         if self.sender:
597             self.record_header[key] = value
598
599     def __record_data_put(self, key, data):
600         if self.sender:
601             if key not in self.record_data:
602                 self.record_data[key] = {}
603             self.record_data[key].update(data)
604
605     def __record_send(self):
606         if self.sender:
607             self.record_header["@timestamp"] = datetime.utcnow().replace(
608                 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
609             for frame_size in self.record_data:
610                 data = self.record_header
611                 data['frame_size'] = frame_size
612                 data.update(self.record_data[frame_size])
613                 run_specific_data = {}
614                 if 'single_run' in data:
615                     run_specific_data['single_run'] = data['single_run']
616                     del data['single_run']
617                 if 'ndr' in data:
618                     run_specific_data['ndr'] = data['ndr']
619                     run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
620                     del data['ndr']
621                 if 'pdr' in data:
622                     run_specific_data['pdr'] = data['pdr']
623                     run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
624                     del data['pdr']
625                 for key in run_specific_data:
626                     data_to_send = data.copy()
627                     data_to_send.update(run_specific_data[key])
628                     self.sender.record_send(data_to_send)
629             self.__record_init()
630
631     def __record_init(self):
632         # init is called after checking for sender
633         self.record_header = {
634             "runlogdate": self.sender.runlogdate,
635             "user_label": self.config['user_label']
636         }
637         self.record_data = {}