NFVBENCH-192: Complete/fix hdrh related processings to consider all cases
[nfvbench.git] / nfvbench / summarizer.py
1 #!/usr/bin/env python
2 # Copyright 2016 Cisco Systems, Inc.  All rights reserved.
3 #
4 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
5 #    not use this file except in compliance with the License. You may obtain
6 #    a copy of the License at
7 #
8 #         http://www.apache.org/licenses/LICENSE-2.0
9 #
10 #    Unless required by applicable law or agreed to in writing, software
11 #    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 #    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 #    License for the specific language governing permissions and limitations
14 #    under the License.
15 #
16
17 from contextlib import contextmanager
18 from datetime import datetime
19 import math
20
21 import bitmath
22 import pytz
23 from tabulate import tabulate
24
25 def _annotate_chain_stats(chain_stats, nodrop_marker='=>'):
26     """Transform a plain chain stats into an annotated one.
27
28     Example:
29     {
30          0: {'packets': [2000054, 1999996, 1999996, 1999996],
31              'lat_min_usec': 10,
32              'lat_max_usec': 187,
33              'lat_avg_usec': 45},
34          1: {...},
35          'total': {...}
36     }
37     should become:
38     {
39          0: {'packets': [2000054, -58 (-0.034%), '=>', 1999996],
40              'lat_min_usec': 10,
41              'lat_max_usec': 187,
42              'lat_avg_usec': 45},
43          1: {...},
44          'total': {...}
45     }
46
47     In the case of shared net, some columns in packets array can have ''.
48     Some columns cab also be None which means the data is not available.
49     """
50     for stats in list(chain_stats.values()):
51         packets = stats['packets']
52         count = len(packets)
53         if count > 1:
54             # keep the first counter
55             annotated_packets = [packets[0]]
56             # modify all remaining counters
57             prev_count = packets[0]
58             for index in range(1, count):
59                 cur_count = packets[index]
60                 if cur_count == '':
61                     # an empty string indicates an unknown counter for a shared interface
62                     # do not annotate those
63                     annotated_value = ''
64                 elif cur_count is None:
65                     # Not available
66                     annotated_value = 'n/a'
67                 else:
68                     drop = cur_count - prev_count
69                     if drop:
70                         dr = (drop * 100.0) / prev_count if prev_count else 0
71                         annotated_value = '{:+,} ({:+.4f}%)'.format(drop, dr)
72                     else:
73                         # no drop
74                         # if last column we display the value
75                         annotated_value = cur_count if index == count - 1 else nodrop_marker
76                     prev_count = cur_count
77                 annotated_packets.append(annotated_value)
78
79             stats['packets'] = annotated_packets
80
81 class Formatter(object):
82     """Collection of string formatter methods."""
83
84     @staticmethod
85     def fixed(data):
86         return data
87
88     @staticmethod
89     def int(data):
90         return '{:,}'.format(data)
91
92     @staticmethod
93     def float(decimal):
94         return lambda data: '%.{}f'.format(decimal) % (data)
95
96     @staticmethod
97     def standard(data):
98         if isinstance(data, int):
99             return Formatter.int(data)
100         if isinstance(data, float):
101             return Formatter.float(4)(data)
102         return Formatter.fixed(data)
103
104     @staticmethod
105     def suffix(suffix_str):
106         return lambda data: Formatter.standard(data) + suffix_str
107
108     @staticmethod
109     def bits(data):
110         # By default, `best_prefix` returns a value in byte format, this hack (multiply by 8.0)
111         # will convert it into bit format.
112         bit = 8.0 * bitmath.Bit(float(data))
113         bit = bit.best_prefix(bitmath.SI)
114         byte_to_bit_classes = {
115             'kB': bitmath.kb,
116             'MB': bitmath.Mb,
117             'GB': bitmath.Gb,
118             'TB': bitmath.Tb,
119             'PB': bitmath.Pb,
120             'EB': bitmath.Eb,
121             'ZB': bitmath.Zb,
122             'YB': bitmath.Yb,
123         }
124         bps = byte_to_bit_classes.get(bit.unit, bitmath.Bit).from_other(bit) / 8.0
125         if bps.unit != 'Bit':
126             return bps.format("{value:.4f} {unit}ps")
127         return bps.format("{value:.4f} bps")
128
129     @staticmethod
130     def percentage(data):
131         if data is None:
132             return ''
133         if math.isnan(data):
134             return '-'
135         return Formatter.suffix('%')(Formatter.float(4)(data))
136
137
138 class Table(object):
139     """ASCII readable table class."""
140
141     def __init__(self, header):
142         header_row, self.formatters = list(zip(*header))
143         self.data = [header_row]
144         self.columns = len(header_row)
145
146     def add_row(self, row):
147         assert self.columns == len(row)
148         formatted_row = []
149         for entry, formatter in zip(row, self.formatters):
150             formatted_row.append(formatter(entry))
151         self.data.append(formatted_row)
152
153     def get_string(self, indent=0):
154         spaces = ' ' * indent
155         table = tabulate(self.data,
156                          headers='firstrow',
157                          tablefmt='grid',
158                          stralign='center',
159                          floatfmt='.2f')
160         return table.replace('\n', '\n' + spaces)
161
162
163 class Summarizer(object):
164     """Generic summarizer class."""
165
166     indent_per_level = 2
167
168     def __init__(self):
169         self.indent_size = 0
170         self.marker_stack = [False]
171         self.str = ''
172
173     def __indent(self, marker):
174         self.indent_size += self.indent_per_level
175         self.marker_stack.append(marker)
176
177     def __unindent(self):
178         assert self.indent_size >= self.indent_per_level
179         self.indent_size -= self.indent_per_level
180         self.marker_stack.pop()
181
182     def __get_indent_string(self):
183         current_str = ' ' * self.indent_size
184         if self.marker_stack[-1]:
185             current_str = current_str[:-2] + '> '
186         return current_str
187
188     def _put(self, *args):
189         self.str += self.__get_indent_string()
190         if args and isinstance(args[-1], dict):
191             self.str += ' '.join(map(str, args[:-1])) + '\n'
192             self._put_dict(args[-1])
193         else:
194             self.str += ' '.join(map(str, args)) + '\n'
195
196     def _put_dict(self, data):
197         with self._create_block(False):
198             for key, value in list(data.items()):
199                 if isinstance(value, dict):
200                     self._put(key + ':')
201                     self._put_dict(value)
202                 else:
203                     self._put(key + ':', value)
204
205     def _put_table(self, table):
206         self.str += self.__get_indent_string()
207         self.str += table.get_string(self.indent_size) + '\n'
208
209     def __str__(self):
210         return self.str
211
212     @contextmanager
213     def _create_block(self, marker=True):
214         self.__indent(marker)
215         yield
216         self.__unindent()
217
218
219 class NFVBenchSummarizer(Summarizer):
220     """Summarize nfvbench json result."""
221
222     ndr_pdr_header = [
223         ('-', Formatter.fixed),
224         ('L2 Frame Size', Formatter.standard),
225         ('Rate (fwd+rev)', Formatter.bits),
226         ('Rate (fwd+rev)', Formatter.suffix(' pps')),
227         ('Avg Drop Rate', Formatter.suffix('%')),
228         ('Avg Latency (usec)', Formatter.standard),
229         ('Min Latency (usec)', Formatter.standard),
230         ('Max Latency (usec)', Formatter.standard)
231     ]
232
233     single_run_header = [
234         ('L2 Frame Size', Formatter.standard),
235         ('Drop Rate', Formatter.suffix('%')),
236         ('Avg Latency (usec)', Formatter.standard),
237         ('Min Latency (usec)', Formatter.standard),
238         ('Max Latency (usec)', Formatter.standard)
239     ]
240
241     config_header = [
242         ('Direction', Formatter.standard),
243         ('Requested TX Rate (bps)', Formatter.bits),
244         ('Actual TX Rate (bps)', Formatter.bits),
245         ('RX Rate (bps)', Formatter.bits),
246         ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
247         ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
248         ('RX Rate (pps)', Formatter.suffix(' pps'))
249     ]
250
251     direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
252     direction_names = ['Forward', 'Reverse', 'Total']
253
254     def __init__(self, result, sender):
255         """Create a summarizer instance."""
256         Summarizer.__init__(self)
257         self.result = result
258         self.config = self.result['config']
259         self.record_header = None
260         self.record_data = None
261         self.sender = sender
262
263         # add percentiles headers if hdrh enabled
264         if not self.config.disable_hdrh:
265             for percentile in self.config.lat_percentiles:
266                 self.ndr_pdr_header.append(str(percentile) + ' %ile lat.', Formatter.standard)
267                 self.single_run_header.append(str(percentile) + ' %ile lat.', Formatter.standard)
268         # if sender is available initialize record
269         if self.sender:
270             self.__record_init()
271         self.__summarize()
272
273     def __get_openstack_spec(self, property):
274         try:
275             return self.result['openstack_spec'][property]
276         except KeyError:
277             return ''
278
279     def __summarize(self):
280         self._put()
281         self._put('========== NFVBench Summary ==========')
282         self._put('Date:', self.result['date'])
283         self._put('NFVBench version', self.result['nfvbench_version'])
284         self._put('Openstack Neutron:', {
285             'vSwitch': self.__get_openstack_spec('vswitch'),
286             'Encapsulation': self.__get_openstack_spec('encaps')
287         })
288         self.__record_header_put('version', self.result['nfvbench_version'])
289         self.__record_header_put('vSwitch', self.__get_openstack_spec('vswitch'))
290         self.__record_header_put('Encapsulation', self.__get_openstack_spec('encaps'))
291         self._put('Benchmarks:')
292         with self._create_block():
293             self._put('Networks:')
294             with self._create_block():
295                 network_benchmark = self.result['benchmarks']['network']
296
297                 self._put('Components:')
298                 with self._create_block():
299                     self._put('Traffic Generator:')
300                     with self._create_block(False):
301                         self._put('Profile:', self.config['tg-name'])
302                         self._put('Tool:', self.config['tg-tool'])
303                     if network_benchmark['versions']:
304                         self._put('Versions:')
305                         with self._create_block():
306                             for component, version in list(network_benchmark['versions'].items()):
307                                 self._put(component + ':', version)
308
309                 if self.config['ndr_run'] or self.config['pdr_run']:
310                     self._put('Measurement Parameters:')
311                     with self._create_block(False):
312                         if self.config['ndr_run']:
313                             self._put('NDR:', self.config['measurement']['NDR'])
314                         if self.config['pdr_run']:
315                             self._put('PDR:', self.config['measurement']['PDR'])
316                 self._put('Service chain:')
317                 for result in list(network_benchmark['service_chain'].items()):
318                     with self._create_block():
319                         self.__chain_summarize(*result)
320
321     def __chain_summarize(self, chain_name, chain_benchmark):
322         self._put(chain_name + ':')
323         self.__record_header_put('service_chain', chain_name)
324         with self._create_block():
325             self._put('Traffic:')
326             with self._create_block(False):
327                 self.__traffic_summarize(chain_benchmark['result'])
328
329     def __traffic_summarize(self, traffic_benchmark):
330         self._put('Profile:', traffic_benchmark['profile'])
331         self._put('Bidirectional:', traffic_benchmark['bidirectional'])
332         self._put('Flow count:', traffic_benchmark['flow_count'])
333         self._put('Service chains count:', traffic_benchmark['service_chain_count'])
334         self._put('Compute nodes:', list(traffic_benchmark['compute_nodes'].keys()))
335
336         self.__record_header_put('profile', traffic_benchmark['profile'])
337         self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
338         self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
339         self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
340         self.__record_header_put('compute_nodes', list(traffic_benchmark['compute_nodes'].keys()))
341         with self._create_block(False):
342             self._put()
343             if not self.config['no_traffic']:
344                 self._put('Run Summary:')
345                 self._put()
346                 with self._create_block(False):
347                     self._put_table(self.__get_summary_table(traffic_benchmark['result']))
348                     try:
349                         self._put()
350                         self._put(traffic_benchmark['result']['warning'])
351                     except KeyError:
352                         pass
353
354             for entry in list(traffic_benchmark['result'].items()):
355                 if 'warning' in entry:
356                     continue
357                 self.__chain_analysis_summarize(*entry)
358             self.__record_send()
359
360     def __chain_analysis_summarize(self, frame_size, analysis):
361         self._put()
362         self._put('L2 frame size:', frame_size)
363         if self.config['ndr_run']:
364             self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
365                       'seconds')
366             self.__record_data_put(frame_size, {'ndr_search_duration': Formatter.float(0)(
367                 analysis['ndr']['time_taken_sec'])})
368         if self.config['pdr_run']:
369             self._put('PDR search duration:', Formatter.float(0)(analysis['pdr']['time_taken_sec']),
370                       'seconds')
371             self.__record_data_put(frame_size, {'pdr_search_duration': Formatter.float(0)(
372                 analysis['pdr']['time_taken_sec'])})
373         self._put()
374
375         if not self.config['no_traffic'] and self.config['single_run']:
376             self._put('Run Config:')
377             self._put()
378             with self._create_block(False):
379                 self._put_table(self.__get_config_table(analysis['run_config'], frame_size))
380                 if 'warning' in analysis['run_config'] and analysis['run_config']['warning']:
381                     self._put()
382                     self._put(analysis['run_config']['warning'])
383                 self._put()
384
385         if 'packet_path_stats' in analysis:
386             for dir in ['Forward', 'Reverse']:
387                 self._put(dir + ' Chain Packet Counters and Latency:')
388                 self._put()
389                 with self._create_block(False):
390                     self._put_table(self._get_chain_table(analysis['packet_path_stats'][dir]))
391                     self._put()
392
393     def __get_summary_table(self, traffic_result):
394         if self.config['single_run']:
395             summary_table = Table(self.single_run_header)
396         else:
397             summary_table = Table(self.ndr_pdr_header)
398
399         if self.config['ndr_run']:
400             for frame_size, analysis in list(traffic_result.items()):
401                 if frame_size == 'warning':
402                     continue
403
404                 row_data = [
405                     'NDR',
406                     frame_size,
407                     analysis['ndr']['rate_bps'],
408                     analysis['ndr']['rate_pps'],
409                     analysis['ndr']['stats']['overall']['drop_percentage'],
410                     analysis['ndr']['stats']['overall']['avg_delay_usec'],
411                     analysis['ndr']['stats']['overall']['min_delay_usec'],
412                     analysis['ndr']['stats']['overall']['max_delay_usec']
413                 ]
414                 if not self.config.disable_hdrh:
415                     self.extract_hdrh_percentiles(
416                         analysis['ndr']['stats']['overall']['lat_percentile'], row_data)
417                 summary_table.add_row(row_data)
418
419                 ndr_data = {
420                     'type': 'NDR',
421                     'rate_bps': analysis['ndr']['rate_bps'],
422                     'rate_pps': analysis['ndr']['rate_pps'],
423                     'offered_tx_rate_bps': analysis['ndr']['stats']['offered_tx_rate_bps'],
424                     'theoretical_tx_rate_pps': analysis['ndr']['stats']['theoretical_tx_rate_pps'],
425                     'theoretical_tx_rate_bps': analysis['ndr']['stats']['theoretical_tx_rate_bps'],
426                     'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
427                     'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
428                     'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
429                     'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
430                 }
431                 if not self.config.disable_hdrh:
432                     self.extract_hdrh_percentiles(
433                         analysis['ndr']['stats']['overall']['lat_percentile'], ndr_data, True)
434                 self.__record_data_put(frame_size, {'ndr': ndr_data})
435         if self.config['pdr_run']:
436             for frame_size, analysis in list(traffic_result.items()):
437                 if frame_size == 'warning':
438                     continue
439
440                 row_data = [
441                     'PDR',
442                     frame_size,
443                     analysis['pdr']['rate_bps'],
444                     analysis['pdr']['rate_pps'],
445                     analysis['pdr']['stats']['overall']['drop_percentage'],
446                     analysis['pdr']['stats']['overall']['avg_delay_usec'],
447                     analysis['pdr']['stats']['overall']['min_delay_usec'],
448                     analysis['pdr']['stats']['overall']['max_delay_usec']
449                 ]
450                 if not self.config.disable_hdrh:
451                     self.extract_hdrh_percentiles(
452                         analysis['pdr']['stats']['overall']['lat_percentile'], row_data)
453                 summary_table.add_row(row_data)
454
455                 pdr_data = {
456                     'type': 'PDR',
457                     'rate_bps': analysis['pdr']['rate_bps'],
458                     'rate_pps': analysis['pdr']['rate_pps'],
459                     'offered_tx_rate_bps': analysis['pdr']['stats']['offered_tx_rate_bps'],
460                     'theoretical_tx_rate_pps': analysis['pdr']['stats']['theoretical_tx_rate_pps'],
461                     'theoretical_tx_rate_bps': analysis['pdr']['stats']['theoretical_tx_rate_bps'],
462                     'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
463                     'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
464                     'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
465                     'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
466                 }
467                 if not self.config.disable_hdrh:
468                     self.extract_hdrh_percentiles(
469                         analysis['pdr']['stats']['overall']['lat_percentile'], pdr_data, True)
470                 self.__record_data_put(frame_size, {'pdr': pdr_data})
471         if self.config['single_run']:
472             for frame_size, analysis in list(traffic_result.items()):
473                 row_data = [
474                     frame_size,
475                     analysis['stats']['overall']['drop_rate_percent'],
476                     analysis['stats']['overall']['rx']['avg_delay_usec'],
477                     analysis['stats']['overall']['rx']['min_delay_usec'],
478                     analysis['stats']['overall']['rx']['max_delay_usec']
479                 ]
480                 if not self.config.disable_hdrh:
481                     self.extract_hdrh_percentiles(
482                         analysis['stats']['overall']['rx']['lat_percentile'], row_data)
483                 summary_table.add_row(row_data)
484
485                 single_run_data = {
486                     'type': 'single_run',
487                     'offered_tx_rate_bps': analysis['stats']['offered_tx_rate_bps'],
488                     'theoretical_tx_rate_pps': analysis['stats']['theoretical_tx_rate_pps'],
489                     'theoretical_tx_rate_bps': analysis['stats']['theoretical_tx_rate_bps'],
490                     'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
491                     'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
492                     'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
493                     'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
494                 }
495                 if not self.config.disable_hdrh:
496                     self.extract_hdrh_percentiles(
497                         analysis['stats']['overall']['rx']['lat_percentile'], single_run_data, True)
498                 self.__record_data_put(frame_size, {'single_run': single_run_data})
499         return summary_table
500
501     def extract_hdrh_percentiles(self, lat_percentile, data, add_key=False):
502         if add_key:
503             data['lat_percentile'] = {}
504         for percentile in self.config.lat_percentiles:
505             if add_key:
506                 data['lat_percentile_' + str(percentile)] = lat_percentile[percentile]
507             else:
508                 data.append(lat_percentile[percentile])
509
510     def __get_config_table(self, run_config, frame_size):
511         config_table = Table(self.config_header)
512         for key, name in zip(self.direction_keys, self.direction_names):
513             if key not in run_config:
514                 continue
515             config_table.add_row([
516                 name,
517                 run_config[key]['orig']['rate_bps'],
518                 run_config[key]['tx']['rate_bps'],
519                 run_config[key]['rx']['rate_bps'],
520                 int(run_config[key]['orig']['rate_pps']),
521                 int(run_config[key]['tx']['rate_pps']),
522                 int(run_config[key]['rx']['rate_pps']),
523             ])
524             self.__record_data_put(frame_size, {
525                 name.lower() + "_orig_rate_bps": int(run_config[key]['orig']['rate_bps']),
526                 name.lower() + "_tx_rate_bps": int(run_config[key]['tx']['rate_bps']),
527                 name.lower() + "_rx_rate_bps": int(run_config[key]['rx']['rate_bps']),
528                 name.lower() + "_orig_rate_pps": int(run_config[key]['orig']['rate_pps']),
529                 name.lower() + "_tx_rate_pps": int(run_config[key]['tx']['rate_pps']),
530                 name.lower() + "_rx_rate_pps": int(run_config[key]['rx']['rate_pps']),
531
532             })
533         return config_table
534
535     def _get_chain_table(self, chain_stats):
536         """Retrieve the table for a direction.
537
538         chain_stats: {
539              'interfaces': ['Port0', 'drop %'', 'vhost0', 'Port1'],
540              'chains': {
541                  '0': {'packets': [2000054, '-0.023%', 1999996, 1999996],
542                      'lat_min_usec': 10,
543                      'lat_max_usec': 187,
544                      'lat_avg_usec': 45},
545                  '1': {...},
546                  'total': {...}
547              }
548         }
549         """
550         chains = chain_stats['chains']
551         _annotate_chain_stats(chains)
552         header = [('Chain', Formatter.standard)] + \
553                  [(ifname, Formatter.standard) for ifname in chain_stats['interfaces']]
554         # add latency columns if available Avg, Min, Max and percentiles
555         lat_keys = []
556         lat_map = {'lat_avg_usec': 'Avg lat.',
557                    'lat_min_usec': 'Min lat.',
558                    'lat_max_usec': 'Max lat.'}
559         if 'lat_avg_usec' in chains['0']:
560             lat_keys = ['lat_avg_usec', 'lat_min_usec', 'lat_max_usec']
561
562             if not self.config.disable_hdrh:
563                 lat_keys.append('lat_percentile')
564                 for percentile in self.config.lat_percentiles:
565                     lat_map['lat_' + str(percentile) + '_percentile'] = \
566                         str(percentile) + ' %ile lat.'
567
568             for key in lat_map:
569                 header.append(lat_map[key], Formatter.standard)
570
571         table = Table(header)
572         for chain in sorted(list(chains.keys()), key=str):
573             row = [chain] + chains[chain]['packets']
574             for lat_key in lat_keys:
575
576                 if lat_key != 'lat_percentile':
577                     if chains[chain].get(lat_key, None):
578                         row.append(Formatter.standard(chains[chain][lat_key]))
579                     else:
580                         row.append('n/a')
581                 else:
582                     if not self.config.disable_hdrh:
583                         if chains[chain].get(lat_key, None):
584                             for percentile in chains[chain][lat_key]:
585                                 row.append(Formatter.standard(
586                                     chains[chain][lat_key][percentile]))
587                         else:
588                             for percentile in self.config.lat_percentiles:
589                                 row.append('n/a')
590             table.add_row(row)
591         return table
592
593     def __record_header_put(self, key, value):
594         if self.sender:
595             self.record_header[key] = value
596
597     def __record_data_put(self, key, data):
598         if self.sender:
599             if key not in self.record_data:
600                 self.record_data[key] = {}
601             self.record_data[key].update(data)
602
603     def __record_send(self):
604         if self.sender:
605             self.record_header["@timestamp"] = datetime.utcnow().replace(
606                 tzinfo=pytz.utc).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
607             for frame_size in self.record_data:
608                 data = self.record_header
609                 data['frame_size'] = frame_size
610                 data.update(self.record_data[frame_size])
611                 run_specific_data = {}
612                 if 'single_run' in data:
613                     run_specific_data['single_run'] = data['single_run']
614                     del data['single_run']
615                 if 'ndr' in data:
616                     run_specific_data['ndr'] = data['ndr']
617                     run_specific_data['ndr']['drop_limit'] = self.config['measurement']['NDR']
618                     del data['ndr']
619                 if 'pdr' in data:
620                     run_specific_data['pdr'] = data['pdr']
621                     run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
622                     del data['pdr']
623                 for key in run_specific_data:
624                     data_to_send = data.copy()
625                     data_to_send.update(run_specific_data[key])
626                     self.sender.record_send(data_to_send)
627             self.__record_init()
628
629     def __record_init(self):
630         # init is called after checking for sender
631         self.record_header = {
632             "runlogdate": self.sender.runlogdate,
633             "user_label": self.config['user_label']
634         }
635         self.record_data = {}