Add life-cycle throughputs testcase
[bottlenecks.git] / testsuites / posca / testcase_script / posca_factor_system_bandwidth.py
index 488f36d..9d8b0ec 100644 (file)
@@ -14,9 +14,13 @@ This test is using yardstick as a tool to begin test.'''
 
 import os
 import time
+import uuid
+import json
 import utils.logger as log
-import utils.infra_setup.runner.yardstick as Runner
 from utils.parser import Parser as conf_parser
+import utils.env_prepare.stack_prepare as stack_prepare
+import testsuites.posca.testcase_dashboard.system_bandwidth as DashBoard
+import utils.infra_setup.runner.docker_env as docker_env
 # --------------------------------------------------
 # logging configuration
 # --------------------------------------------------
@@ -31,68 +35,122 @@ test_dict = {
         "testcase": "netperf_bottlenecks"
     }
 }
+testfile = os.path.basename(__file__)
+testcase, file_format = os.path.splitext(testfile)
 
 
-def env_pre():
-    Runner.Create_Incluxdb()
+def env_pre(con_dic):
+    LOG.info("yardstick environment prepare!")
+    stack_prepare._prepare_env_daemon(True)
 
 
-def do_test(test_config, con_dic):
-    test_dict['args']['opts']['task-args'] = test_config
-    Task_id = Runner.Send_Data(test_dict, con_dic['runner_config'])
-    time.sleep(con_dic['test_config']['test_time'])
-    Data_Reply = Runner.Get_Reply(con_dic['runner_config'], Task_id)
-    try:
-        test_date =\
-            Data_Reply[con_dic['runner_config']['yardstick_testcase']][0]
-    except IndexError:
-        test_date = do_test(test_config, con_dic)
-    return test_date
+def config_to_result(test_config, test_result):
+    testdata = {}
+    parser_result = test_result["benchmark"]["data"]
+    test_result.update(test_config)
+    test_result.update(parser_result)
+    test_result["throughput"] = float(test_result["throughput"])
+    test_result["remote_cpu_util"] = float(test_result["remote_cpu_util"])
+    test_result["local_cpu_util"] = float(test_result["local_cpu_util"])
+    test_result["mean_latency"] = float(test_result["mean_latency"])
+    testdata["data_body"] = test_result
+    testdata["testcase"] = testcase
+    return testdata
 
 
-def run(con_dic):
+def testcase_parser(out_file="yardstick.out", **parameter_info):
+    cmd = ('yardstick task start /home/opnfv/repos/yardstick/'
+           'samples/netperf_bottlenecks.yaml --output-file ' + out_file)
+    cmd = cmd + " --task-args " + '"' + str(parameter_info) + '"'
+    LOG.info("yardstick test cmd is: %s" % cmd)
+    return cmd
+
+
+def do_test(test_config, Use_Dashboard, context_conf):
+    yardstick_container = docker_env.yardstick_info['container']
+    out_file = ("/tmp/yardstick_" + str(uuid.uuid4()) + ".out")
+    cmd = testcase_parser(out_file=out_file, **test_config)
+    stdout = docker_env.docker_exec_cmd(yardstick_container, cmd)
+    LOG.info(stdout)
+    loop_value = 0
+    while loop_value < 60:
+        time.sleep(2)
+        loop_value = loop_value + 1
+        with open(out_file) as f:
+            data = json.load(f)
+            if data["status"] == 1:
+                LOG.info("Success run yardstick netperf_bottlenecks test!")
+                break
+            elif data["status"] == 2:
+                LOG.error("Failed to run yardstick netperf_bottlenecks test!")
+                exit()
+
+    save_data = config_to_result(test_config, data['result'][1])
+    if Use_Dashboard is True:
+        DashBoard.dashboard_send_data(context_conf, save_data)
+
+    return save_data["data_body"]
+
+
+def run(test_config):
+    con_dic = test_config["load_manager"]
+    Use_Dashboard = False
+    env_pre(None)
+    if test_config["contexts"]["yardstick_ip"] is None:
+        con_dic["contexts"]["yardstick_ip"] =\
+            conf_parser.ip_parser("yardstick_test_ip")
+
+    if "dashboard" in test_config["contexts"].keys():
+        if test_config["contexts"]["dashboard_ip"] is None:
+            test_config["contexts"]["dashboard_ip"] =\
+                conf_parser.ip_parser("dashboard")
+        LOG.info("Create Dashboard data")
+        Use_Dashboard = True
+        DashBoard.dashboard_system_bandwidth(test_config["contexts"])
+
     data = {}
-    rx_pkt_a = con_dic['test_config']['rx_pkt_sizes'].split(',')
-    tx_pkt_a = con_dic['test_config']['tx_pkt_sizes'].split(',')
+    rx_pkt_a = con_dic['scenarios']['rx_pkt_sizes'].split(',')
+    tx_pkt_a = con_dic['scenarios']['tx_pkt_sizes'].split(',')
     data["rx_pkt_sizes"] = rx_pkt_a
     data["tx_pkt_sizes"] = tx_pkt_a
     con_dic["result_file"] = os.path.dirname(
         os.path.abspath(__file__)) + "/test_case/result"
-    date_id = 0
     cur_role_result = 1
     pre_role_result = 1
     pre_reply = {}
     data_return = {}
     data_max = {}
     data_return["throughput"] = 1
-    if con_dic["runner_config"]["yardstick_test_ip"] is None:
-        con_dic["runner_config"]["yardstick_test_ip"] =\
-            conf_parser.ip_parser("yardstick_test_ip")
+
     for test_x in data["tx_pkt_sizes"]:
         data_max["throughput"] = 1
         bandwidth_tmp = 1
         for test_y in data["rx_pkt_sizes"]:
-            test_config = {
+            case_config = {
                 "tx_msg_size": float(test_x),
                 "rx_msg_size": float(test_y),
-                "test_time": con_dic['test_config']['test_time']
+                "test_time": con_dic['scenarios']['test_times'],
+                "pod_info": conf_parser.bottlenecks_config["pod_info"]
             }
-            date_id = date_id + 1
-            data_reply = do_test(test_config, con_dic)
-            bandwidth = float(data_reply["throughput"])
+            data_reply = do_test(case_config, Use_Dashboard,
+                                 test_config["contexts"])
+
+            conf_parser.result_to_file(data_reply, test_config["out_file"])
+            bandwidth = data_reply["throughput"]
             if (data_max["throughput"] < bandwidth):
                 data_max = data_reply
             if (abs(bandwidth_tmp - bandwidth) / bandwidth_tmp < 0.025):
-                print(pre_reply)
+                LOG.info("this group of data has reached top output")
                 break
             else:
                 pre_reply = data_reply
                 bandwidth_tmp = bandwidth
         cur_role_result = float(pre_reply["throughput"])
         if (abs(pre_role_result - cur_role_result) / pre_role_result < 0.025):
-            print("date_id is %d,package return at line 111\n" % date_id)
+            LOG.info("The performance increases slowly")
         if data_return["throughput"] < data_max["throughput"]:
             data_return = data_max
         pre_role_result = cur_role_result
-    print("date_id is %d,id return success\n" % date_id)
+    LOG.info("Find bottlenecks of this config")
+    LOG.info("The max data is %d", data_return["throughput"])
     return data_return