1 # Copyright (c) 2016-2017 Intel Corporation
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 """ Fixed traffic profile definitions """
16 from __future__ import absolute_import
22 from yardstick.network_services.traffic_profile.prox_profile import ProxProfile
23 from yardstick.network_services import constants
25 LOG = logging.getLogger(__name__)
28 class ProxBinSearchProfile(ProxProfile):
30 This profile adds a single stream at the beginning of the traffic session
33 def __init__(self, tp_config):
34 super(ProxBinSearchProfile, self).__init__(tp_config)
35 self.current_lower = self.lower_bound
36 self.current_upper = self.upper_bound
40 return self.current_upper - self.current_lower
44 return (self.current_lower + self.current_upper) / 2
46 def bounds_iterator(self, logger=None):
47 self.current_lower = self.lower_bound
48 self.current_upper = self.upper_bound
50 test_value = self.current_upper
51 while abs(self.delta) >= self.precision:
53 logger.debug("New interval [%s, %s), precision: %d", self.current_lower,
54 self.current_upper, self.step_value)
55 logger.info("Testing with value %s", test_value)
58 test_value = self.mid_point
60 def run_test_with_pkt_size(self, traffic_gen, pkt_size, duration):
61 """Run the test for a single packet size.
63 :param traffic_gen: traffic generator instance
64 :type traffic_gen: TrafficGen
65 :param pkt_size: The packet size to test with.
67 :param duration: The duration for each try.
72 LOG.info("Testing with packet size %d", pkt_size)
74 # Binary search assumes the lower value of the interval is
75 # successful and the upper value is a failure.
76 # The first value that is tested, is the maximum value. If that
77 # succeeds, no more searching is needed. If it fails, a regular
78 # binary search is performed.
80 # The test_value used for the first iteration of binary search
81 # is adjusted so that the delta between this test_value and the
82 # upper bound is a power-of-2 multiple of precision. In the
83 # optimistic situation where this first test_value results in a
84 # success, the binary search will complete on an integer multiple
85 # of the precision, rather than on a fraction of it.
91 # Store one time only value in influxdb
93 "test_duration" : traffic_gen.scenario_helper.scenario_cfg["runner"]["duration"],
94 "test_precision" : self.params["traffic_profile"]["test_precision"],
95 "tolerated_loss" : self.params["traffic_profile"]["tolerated_loss"],
98 self.queue.put(single_samples)
99 self.prev_time = time.time()
101 # throughput and packet loss from the most recent successful test
102 successful_pkt_loss = 0.0
103 line_speed = traffic_gen.scenario_helper.all_options.get(
104 "interface_speed_gbps", constants.NIC_GBPS_DEFAULT) * constants.ONE_GIGABIT_IN_BITS
105 for test_value in self.bounds_iterator(LOG):
106 result, port_samples = self._profile_helper.run_test(pkt_size, duration,
110 self.curr_time = time.time()
111 diff_time = self.curr_time - self.prev_time
112 self.prev_time = self.curr_time
115 LOG.debug("Success! Increasing lower bound")
116 self.current_lower = test_value
117 successful_pkt_loss = result.pkt_loss
118 samples = result.get_samples(pkt_size, successful_pkt_loss, port_samples)
119 samples["TxThroughput"] = samples["TxThroughput"] * 1000 * 1000
121 # store results with success tag in influxdb
122 success_samples = {'Success_' + key: value for key, value in samples.items()}
124 success_samples["Success_rx_total"] = int(result.rx_total / diff_time)
125 success_samples["Success_tx_total"] = int(result.tx_total / diff_time)
126 success_samples["Success_can_be_lost"] = int(result.can_be_lost / diff_time)
127 success_samples["Success_drop_total"] = int(result.drop_total / diff_time)
128 self.queue.put(success_samples)
130 # Store Actual throughput for result samples
131 result_samples["Result_Actual_throughput"] = \
132 success_samples["Success_RxThroughput"]
134 LOG.debug("Failure... Decreasing upper bound")
135 self.current_upper = test_value
136 samples = result.get_samples(pkt_size, successful_pkt_loss, port_samples)
140 if isinstance(tmp, dict):
142 samples[k][k2] = int(samples[k][k2] / diff_time)
144 if theor_max_thruput < samples["TxThroughput"]:
145 theor_max_thruput = samples['TxThroughput']
146 self.queue.put({'theor_max_throughput': theor_max_thruput})
148 LOG.debug("Collect TG KPIs %s %s", datetime.datetime.now(), samples)
149 self.queue.put(samples)
151 result_samples["Result_pktSize"] = pkt_size
152 result_samples["Result_theor_max_throughput"] = theor_max_thruput/ (1000 * 1000)
153 self.queue.put(result_samples)