830ff73f61d26d2c227737555d3e9fdb3e4a146a
[bottlenecks.git] / testsuites / posca / testcase_script / posca_feature_testpmd_scale_up.py
1 #!/usr/bin/env python
2 ##############################################################################
3 # Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
4 #
5 # All rights reserved. This program and the accompanying materials
6 # are made available under the terms of the Apache License, Version 2.0
7 # which accompanies this distribution, and is available at
8 # http://www.apache.org/licenses/LICENSE-2.0
9 ##############################################################################
10 '''This file realize the function of run systembandwidth script.
11 for example this contain two part first run_script,
12 second is algorithm, this part is about how to judge the bottlenecks.
13 This test is using yardstick as a tool to begin test.'''
14
15 import os
16 import time
17 import uuid
18 import json
19 import utils.logger as log
20 from utils.parser import Parser as conf_parser
21 import utils.env_prepare.stack_prepare as stack_prepare
22 import utils.infra_setup.runner.docker_env as docker_env
23 import utils.infra_setup.runner.yardstick as yardstick_task
24
25 # --------------------------------------------------
26 # logging configuration
27 # --------------------------------------------------
28 LOG = log.Logger(__name__).getLogger()
29
30 testfile = os.path.basename(__file__)
31 testcase, file_format = os.path.splitext(testfile)
32 cidr = "/home/opnfv/repos/yardstick/samples/pvp_throughput_bottlenecks.yaml"
33 runner_DEBUG = True
34
35
36 def env_pre(con_dic):
37     LOG.info("yardstick environment prepare!")
38     stack_prepare._prepare_env_daemon(True)
39
40
41 def config_to_result(test_config, test_result):
42     final_data = []
43     print(test_result)
44     out_data = test_result["result"]["testcases"]
45     test_data = out_data["pvp_throughput_bottlenecks"]["tc_data"]
46     for result in test_data:
47         testdata = {}
48         testdata["vcpu"] = test_config["vcpu"]
49         testdata["memory"] = test_config["memory"]
50         testdata["nrFlows"] = result["data"]["nrFlows"]
51         testdata["packet_size"] = result["data"]["packet_size"]
52         testdata["throughput"] = result["data"]["throughput_rx_mbps"]
53         final_data.append(testdata)
54     return final_data
55
56
57 def testcase_parser(out_file="yardstick.out", **parameter_info):
58     cmd = yardstick_task.yardstick_command_parser(debug=runner_DEBUG,
59                                                   cidr=cidr,
60                                                   outfile=out_file,
61                                                   parameter=parameter_info)
62     return cmd
63
64
65 def do_test(test_config, Use_Dashboard, context_conf):
66     yardstick_container = docker_env.yardstick_info['container']
67     out_file = ("/tmp/yardstick_" + str(uuid.uuid4()) + ".out")
68     cmd = testcase_parser(out_file=out_file, **test_config)
69     print(cmd)
70     stdout = docker_env.docker_exec_cmd(yardstick_container, cmd)
71     LOG.info(stdout)
72     loop_value = 0
73     while loop_value < 60:
74         time.sleep(2)
75         loop_value = loop_value + 1
76         with open(out_file) as f:
77             data = json.load(f)
78             if data["status"] == 1:
79                 LOG.info("yardstick run success")
80                 break
81             elif data["status"] == 2:
82                 LOG.error("yardstick error exit")
83                 exit()
84     # data = json.load(output)
85
86     save_data = config_to_result(test_config, data)
87     if Use_Dashboard is True:
88         print("use dashboard")
89         # DashBoard.dashboard_send_data(context_conf, save_data)
90
91     # return save_data["data_body"]
92     return save_data
93
94
95 def run(test_config):
96     load_config = test_config["load_manager"]
97     scenarios_conf = load_config["scenarios"]
98     Use_Dashboard = False
99
100     env_pre(None)
101     if test_config["contexts"]["yardstick_ip"] is None:
102         load_config["contexts"]["yardstick_ip"] =\
103             conf_parser.ip_parser("yardstick_test_ip")
104
105     if "dashboard" in test_config["contexts"].keys():
106         if test_config["contexts"]["dashboard_ip"] is None:
107             test_config["contexts"]["dashboard_ip"] =\
108                 conf_parser.ip_parser("dashboard")
109         LOG.info("Create Dashboard data")
110         Use_Dashboard = True
111         # DashBoard.dashboard_system_bandwidth(test_config["contexts"])
112
113     cpus = conf_parser.str_to_list(scenarios_conf["cpus"])
114     mems = conf_parser.str_to_list(scenarios_conf["mems"])
115     pkt_size = conf_parser.str_to_list(scenarios_conf["pkt_size"])
116     multistream = conf_parser.str_to_list(scenarios_conf["multistream"])
117     search_interval = scenarios_conf["search_interval"]
118
119     load_config["result_file"] = os.path.dirname(
120         os.path.abspath(__file__)) + "/test_case/result"
121
122     if len(cpus) != len(mems):
123         LOG.error("the cpus and mems config data number is not same!")
124         os._exit()
125
126     result = []
127
128     for i in range(0, len(cpus)):
129         case_config = {"vcpu": cpus[i],
130                        "memory": int(mems[i]) * 1024,
131                        "multistreams": multistream,
132                        "pktsize": pkt_size,
133                        "search_interval": search_interval}
134
135         data_reply = do_test(case_config, Use_Dashboard,
136                              test_config["contexts"])
137         result.append(data_reply)
138
139     LOG.info("Finished bottlenecks testcase")
140     LOG.info("The result data is %s", result)
141     return result