Merge "Fix KPI data reported by ProxBinSearchProfile"
[yardstick.git] / yardstick / network_services / traffic_profile / prox_binsearch.py
1 # Copyright (c) 2016-2017 Intel Corporation
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 #      http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 """ Fixed traffic profile definitions """
15
16 from __future__ import absolute_import
17
18 import logging
19 import datetime
20 import time
21
22 from yardstick.network_services.traffic_profile.prox_profile import ProxProfile
23 from yardstick.network_services import constants
24 from yardstick.common import constants as overall_constants
25
26 LOG = logging.getLogger(__name__)
27
28
29 class ProxBinSearchProfile(ProxProfile):
30     """
31     This profile adds a single stream at the beginning of the traffic session
32     """
33
34     def __init__(self, tp_config):
35         super(ProxBinSearchProfile, self).__init__(tp_config)
36         self.current_lower = self.lower_bound
37         self.current_upper = self.upper_bound
38
39     @property
40     def delta(self):
41         return self.current_upper - self.current_lower
42
43     @property
44     def mid_point(self):
45         return (self.current_lower + self.current_upper) / 2
46
47     def bounds_iterator(self, logger=None):
48         self.current_lower = self.lower_bound
49         self.current_upper = self.upper_bound
50
51         test_value = self.current_upper
52         while abs(self.delta) >= self.precision:
53             if logger:
54                 logger.debug("New interval [%s, %s), precision: %d", self.current_lower,
55                              self.current_upper, self.step_value)
56                 logger.info("Testing with value %s", test_value)
57
58             yield test_value
59             test_value = self.mid_point
60
61     def run_test_with_pkt_size(self, traffic_gen, pkt_size, duration):
62         """Run the test for a single packet size.
63
64         :param traffic_gen: traffic generator instance
65         :type traffic_gen: TrafficGen
66         :param  pkt_size: The packet size to test with.
67         :type pkt_size: int
68         :param  duration: The duration for each try.
69         :type duration: int
70
71         """
72
73         LOG.info("Testing with packet size %d", pkt_size)
74
75         # Binary search assumes the lower value of the interval is
76         # successful and the upper value is a failure.
77         # The first value that is tested, is the maximum value. If that
78         # succeeds, no more searching is needed. If it fails, a regular
79         # binary search is performed.
80         #
81         # The test_value used for the first iteration of binary search
82         # is adjusted so that the delta between this test_value and the
83         # upper bound is a power-of-2 multiple of precision. In the
84         # optimistic situation where this first test_value results in a
85         # success, the binary search will complete on an integer multiple
86         # of the precision, rather than on a fraction of it.
87
88         theor_max_thruput = actual_max_thruput = 0
89
90         result_samples = {}
91
92         # Store one time only value in influxdb
93         single_samples = {
94             "test_duration": traffic_gen.scenario_helper.scenario_cfg["runner"]["duration"],
95             "test_precision": self.params["traffic_profile"]["test_precision"],
96             "tolerated_loss": self.params["traffic_profile"]["tolerated_loss"],
97             "duration": duration
98         }
99         self.queue.put(single_samples)
100         self.prev_time = time.time()
101
102         # throughput and packet loss from the most recent successful test
103         successful_pkt_loss = 0.0
104         line_speed = traffic_gen.scenario_helper.all_options.get(
105             "interface_speed_gbps", constants.NIC_GBPS_DEFAULT) * constants.ONE_GIGABIT_IN_BITS
106
107         ok_retry = traffic_gen.scenario_helper.scenario_cfg["runner"].get("confirmation", 0)
108         for step_id, test_value in enumerate(self.bounds_iterator(LOG)):
109             pos_retry = 0
110             neg_retry = 0
111             total_retry = 0
112
113             LOG.info("Checking MAX %s MIN %s TEST %s",
114                 self.current_upper, self.lower_bound, test_value)
115             while (pos_retry <= ok_retry) and (neg_retry <= ok_retry):
116
117                 total_retry = total_retry + 1
118                 result, port_samples = self._profile_helper.run_test(pkt_size, duration,
119                                                                      test_value,
120                                                                      self.tolerated_loss,
121                                                                      line_speed)
122                 if (total_retry > (ok_retry * 3)) and (ok_retry is not 0):
123                     LOG.info("Failure.!! .. RETRY EXCEEDED ... decrease lower bound")
124
125                     successful_pkt_loss = result.pkt_loss
126                     samples = result.get_samples(pkt_size, successful_pkt_loss, port_samples)
127
128                     self.current_upper = test_value
129                     neg_retry = total_retry
130                 elif result.success:
131                     if (pos_retry < ok_retry) and (ok_retry is not 0):
132                         neg_retry = 0
133                         LOG.info("Success! ... confirm retry")
134
135                         successful_pkt_loss = result.pkt_loss
136                         samples = result.get_samples(pkt_size, successful_pkt_loss, port_samples)
137
138                     else:
139                         LOG.info("Success! Increasing lower bound")
140                         self.current_lower = test_value
141
142                         successful_pkt_loss = result.pkt_loss
143                         samples = result.get_samples(pkt_size, successful_pkt_loss, port_samples)
144
145                         # store results with success tag in influxdb
146                         success_samples = \
147                             {'Success_' + key: value for key, value in samples.items()}
148
149                         success_samples["Success_rx_total"] = int(result.rx_total)
150                         success_samples["Success_tx_total"] = int(result.tx_total)
151                         success_samples["Success_can_be_lost"] = int(result.can_be_lost)
152                         success_samples["Success_drop_total"] = int(result.drop_total)
153                         success_samples["Success_RxThroughput"] = samples["RxThroughput"]
154                         success_samples["Success_RxThroughput_gbps"] = \
155                             (samples["RxThroughput"] / 1000) * ((pkt_size + 20)* 8)
156                         LOG.info(">>>##>>Collect SUCCESS TG KPIs %s %s",
157                                  datetime.datetime.now(), success_samples)
158                         self.queue.put(success_samples, True, overall_constants.QUEUE_PUT_TIMEOUT)
159
160                         # Store Actual throughput for result samples
161                         actual_max_thruput = success_samples["Success_RxThroughput"]
162
163                     pos_retry = pos_retry + 1
164
165                 else:
166                     if (neg_retry < ok_retry) and (ok_retry is not 0):
167
168                         pos_retry = 0
169                         LOG.info("failure! ... confirm retry")
170                     else:
171                         LOG.info("Failure... Decreasing upper bound")
172                         self.current_upper = test_value
173
174                     neg_retry = neg_retry + 1
175                     samples = result.get_samples(pkt_size, successful_pkt_loss, port_samples)
176
177                 if theor_max_thruput < samples["TxThroughput"]:
178                     theor_max_thruput = samples['TxThroughput']
179                     self.queue.put({'theor_max_throughput': theor_max_thruput})
180
181                 LOG.info(">>>##>>Collect TG KPIs %s %s", datetime.datetime.now(), samples)
182                 samples["MAX_Rate"] = self.current_upper
183                 samples["MIN_Rate"] = self.current_lower
184                 samples["Test_Rate"] = test_value
185                 samples["Step_Id"] = step_id
186                 samples["Confirmation_Retry"] = total_retry
187                 self.queue.put(samples, True, overall_constants.QUEUE_PUT_TIMEOUT)
188
189         LOG.info(">>>##>> Result Reached PktSize %s Theor_Max_Thruput %s Actual_throughput %s",
190                  pkt_size, theor_max_thruput, actual_max_thruput)
191         result_samples["Result_pktSize"] = pkt_size
192         result_samples["Result_theor_max_throughput"] = theor_max_thruput
193         result_samples["Result_Actual_throughput"] = actual_max_thruput
194         self.queue.put(result_samples)