import yaml
import requests
import time
+import os
import copy
from past.utils import old_div
from rapid_log import RapidLog
inf = float("inf")
from datetime import datetime as dt
+_CURR_DIR = os.path.dirname(os.path.realpath(__file__))
+
class RapidTest(object):
"""
Class to manage the testing
self.test['maxr'] = 1
if 'maxz' not in self.test.keys():
self.test['maxz'] = inf
- with open('format.yaml') as f:
+ with open(os.path.join(_CURR_DIR,'format.yaml')) as f:
self.data_format = yaml.load(f, Loader=yaml.FullLoader)
@staticmethod
if v in variables.keys():
data_format[k] = variables[v]
- def post_data(self, test, variables):
+ def post_data(self, variables):
+ test_type = type(self).__name__
var = copy.deepcopy(self.data_format)
self.parse_data_format_dict(var, variables)
- if var.keys() >= {'URL', test, 'Format'}:
+ if var.keys() >= {'URL', test_type, 'Format'}:
URL=''
for value in var['URL'].values():
URL = URL + value
HEADERS = {'X-Requested-With': 'Python requests', 'Content-type': 'application/rapid'}
if var['Format'] == 'PushGateway':
- data = "\n".join("{} {}".format(k, v) for k, v in var[test].items()) + "\n"
+ data = "\n".join("{} {}".format(k, v) for k, v in var[test_type].items()) + "\n"
response = requests.post(url=URL, data=data,headers=HEADERS)
elif var['Format'] == 'Xtesting':
- data = var[test]
+ data = var[test_type]
response = requests.post(url=URL, json=data)
if (response.status_code >= 300):
RapidLog.info('Cannot send metrics to {}'.format(URL))
RapidLog.info(data)
- return (var[test])
+ return (var[test_type])
@staticmethod
def report_result(flow_number, size, data, prefix):
elapsed_time_str = ' NA |'
else:
elapsed_time_str = '{:>3.0f} |'.format(data['actual_duration'])
+ if data['mis_ordered'] is None:
+ mis_ordered_str = ' NA '
+ else:
+ mis_ordered_str = '{:>9.0f} '.format(data['mis_ordered'])
return(flow_number_str + '{:>5.1f}'.format(data['speed']) + '% ' + prefix['speed']
+ '{:>6.3f}'.format(RapidTest.get_pps(data['speed'],size)) + ' Mpps|' +
pps_req_tx_str + pps_tx_str + bcolors.ENDC + pps_sut_tx_str +
+ ' us | ' + '{:>9.0f}'.format(data['abs_tx']) + ' | {:>9.0f}'.format(data['abs_rx']) +
' | '+ prefix['abs_drop_rate']+ '{:>9.0f}'.format(data['abs_tx']-data['abs_rx']) +
tot_drop_str + prefix['drop_rate'] +
- '{:>5.2f}'.format(100*old_div(float(data['abs_tx']-data['abs_rx']),data['abs_tx'])) + bcolors.ENDC +
+ '{:>5.2f}'.format(100*old_div(float(data['abs_tx']-data['abs_rx']),data['abs_tx'])) + ' |' +
+ prefix['mis_ordered'] + mis_ordered_str + bcolors.ENDC +
' |' + elapsed_time_str)
def run_iteration(self, requested_duration, flow_number, size, speed):
BUCKET_SIZE_EXP = self.gen_machine.bucket_size_exp
+ sleep_time = self.test['sleep_time']
LAT_PERCENTILE = self.test['lat_percentile']
iteration_data= {}
time_loop_data= {}
iteration_data['r'] = 0;
- sleep_time = 2
+
while (iteration_data['r'] < self.test['maxr']):
self.gen_machine.start_latency_cores()
time.sleep(sleep_time)
t1_dp_tx = t1_tx - t1_non_dp_tx
self.gen_machine.set_generator_speed(0)
self.gen_machine.start_gen_cores()
- if self.background_machines:
- self.set_background_speed(self.background_machines, 0)
- self.start_background_traffic(self.background_machines)
+ self.set_background_speed(self.background_machines, 0)
+ self.start_background_traffic(self.background_machines)
if 'ramp_step' in self.test.keys():
ramp_speed = self.test['ramp_step']
else:
ramp_speed = speed
while ramp_speed < speed:
self.gen_machine.set_generator_speed(ramp_speed)
- if self.background_machines:
- self.set_background_speed(self.background_machines, ramp_speed)
+ self.set_background_speed(self.background_machines, ramp_speed)
time.sleep(2)
ramp_speed = ramp_speed + self.test['ramp_step']
self.gen_machine.set_generator_speed(speed)
- if self.background_machines:
- self.set_background_speed(self.background_machines, speed)
- iteration_data['speed'] = time_loop_data['speed'] = speed
+ self.set_background_speed(self.background_machines, speed)
+ iteration_data['speed'] = speed
+ time_loop_data['speed'] = speed
time.sleep(2) ## Needs to be 2 seconds since this 1 sec is the time that PROX uses to refresh the stats. Note that this can be changed in PROX!! Don't do it.
start_bg_gen_stats = []
for bg_gen_machine in self.background_machines:
if self.sut_machine!=None:
t3_sut_rx, t3_sut_non_dp_rx, t3_sut_tx, t3_sut_non_dp_tx, t3_sut_drop, t3_sut_tx_fail, t3_sut_tsc, sut_tsc_hz = self.sut_machine.core_stats()
if t3_sut_tsc != t2_sut_tsc:
- single_sut_core_measurement_duration = (t3_sut_tsc - t2_sut_tsc) * 1.0 / tsc_hz # time difference between the 2 measurements, expressed in seconds.
+ single_sut_core_measurement_duration = (t3_sut_tsc - t2_sut_tsc) * 1.0 / sut_tsc_hz # time difference between the 2 measurements, expressed in seconds.
tot_sut_core_measurement_duration = tot_sut_core_measurement_duration + single_sut_core_measurement_duration
tot_sut_rx += t3_sut_rx - t2_sut_rx
tot_sut_non_dp_rx += t3_sut_non_dp_rx - t2_sut_non_dp_rx
time_loop_data['Flows'] = flow_number
time_loop_data['Size'] = size
time_loop_data['RequestedSpeed'] = RapidTest.get_pps(speed, size)
- _ = self.post_data('rapid_flowsizetest', time_loop_data)
+ _ = self.post_data(time_loop_data)
end_bg_gen_stats = []
for bg_gen_machine in self.background_machines:
bg_rx, bg_non_dp_rx, bg_tx, bg_non_dp_tx, _, _, bg_tsc, bg_hz = bg_gen_machine.core_stats()
"bg_hz" : bg_hz
}
end_bg_gen_stats.append(dict(bg_gen_stat))
- if self.background_machines:
- self.stop_background_traffic(self.background_machines)
+ self.stop_background_traffic(self.background_machines)
i = 0
bg_rates =[]
while i < len(end_bg_gen_stats):
iteration_data['avg_bg_rate'] = None
#Stop generating
self.gen_machine.stop_gen_cores()
+ time.sleep(3.5)
+ self.gen_machine.stop_latency_cores()
iteration_data['r'] += 1
iteration_data['lat_avg'] = old_div(iteration_data['lat_avg'], float(tot_lat_measurement_duration))
iteration_data['lat_used'] = old_div(iteration_data['lat_used'], float(tot_lat_measurement_duration))