1 ##############################################################################
2 # Copyright (c) 2016 Huawei Technologies Co.,Ltd.
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
16 from oslo_serialization import jsonutils
18 from yardstick.benchmark.scenarios import base
20 LOG = logging.getLogger(__name__)
23 class StorPerf(base.Scenario):
24 """Execute StorPerf benchmark.
25 Once the StorPerf container has been started and the ReST API exposed,
26 you can interact directly with it using the ReST API. StorPerf comes with a
27 Swagger interface that is accessible through the exposed port at:
28 http://StorPerf:5000/swagger/index.html
31 target = [device or path] (Optional):
32 The path to either an attached storage device (/dev/vdb, etc) or a
33 directory path (/opt/storperf) that will be used to execute the performance
34 test. In the case of a device, the entire device will be used.
35 If not specified, the current directory will be used.
37 workload = [workload module] (Optional):
38 If not specified, the default is to run all workloads.
39 The workload types are:
40 rs: 100% Read, sequential data
41 ws: 100% Write, sequential data
42 rr: 100% Read, random access
43 wr: 100% Write, random access
44 rw: 70% Read / 30% write, random access
47 Do not perform SSD style preconditioning.
50 Do not perform a warmup prior to measurements.
52 report = [job_id] (Optional):
53 Query the status of the supplied job_id and report on metrics.
54 If a workload is supplied, will report on only that subset.
57 __scenario_type__ = "StorPerf"
59 def __init__(self, scenario_cfg, context_cfg):
60 """Scenario construction."""
61 super(StorPerf, self).__init__()
62 self.scenario_cfg = scenario_cfg
63 self.context_cfg = context_cfg
64 self.options = self.scenario_cfg["options"]
66 self.target = self.options.get("StorPerf_ip", None)
67 self.query_interval = self.options.get("query_interval", 10)
68 # Maximum allowed job time
69 self.timeout = self.options.get('timeout', 3600)
71 self.setup_done = False
72 self.job_completed = False
74 def _query_setup_state(self):
75 """Query the stack status."""
76 LOG.info("Querying the stack state...")
77 setup_query = requests.get('http://%s:5000/api/v1.0/configurations'
80 setup_query_content = jsonutils.loads(
82 if setup_query_content["stack_created"]:
83 self.setup_done = True
84 LOG.debug("stack_created: %s",
85 setup_query_content["stack_created"])
88 """Set the configuration."""
90 env_args_payload_list = ["agent_count", "agent_flavor",
91 "public_network", "agent_image",
94 for env_argument in env_args_payload_list:
96 env_args[env_argument] = self.options[env_argument]
100 LOG.info("Creating a stack on node %s with parameters %s",
101 self.target, env_args)
102 setup_res = requests.post('http://%s:5000/api/v1.0/configurations'
103 % self.target, json=env_args)
105 setup_res_content = jsonutils.loads(
108 if setup_res.status_code != 200:
109 raise RuntimeError("Failed to create a stack, error message:",
110 setup_res_content["message"])
111 elif setup_res.status_code == 200:
112 LOG.info("stack_id: %s", setup_res_content["stack_id"])
114 while not self.setup_done:
115 self._query_setup_state()
116 time.sleep(self.query_interval)
118 def _query_job_state(self, job_id):
119 """Query the status of the supplied job_id and report on metrics"""
120 LOG.info("Fetching report for %s...", job_id)
121 report_res = requests.get('http://{}:5000/api/v1.0/jobs'.format
123 params={'id': job_id, 'type': 'status'})
125 report_res_content = jsonutils.loads(
128 if report_res.status_code != 200:
129 raise RuntimeError("Failed to fetch report, error message:",
130 report_res_content["message"])
132 job_status = report_res_content["Status"]
134 LOG.debug("Job is: %s...", job_status)
135 self.job_completed = job_status == "Completed"
137 # TODO: Support using StorPerf ReST API to read Job ETA.
139 # if job_status == "completed":
140 # self.job_completed = True
142 # elif job_status == "running":
143 # ETA = report_res_content['time']
147 def run(self, result):
148 """Execute StorPerf benchmark"""
149 if not self.setup_done:
152 metadata = {"build_tag": "latest", "test_case": "opnfv_yardstick_tc074"}
153 metadata_payload_dict = {"pod_name": "NODE_NAME",
154 "scenario_name": "DEPLOY_SCENARIO",
155 "version": "YARDSTICK_BRANCH"}
157 for key, value in metadata_payload_dict.items():
159 metadata[key] = os.environ[value]
163 job_args = {"metadata": metadata}
164 job_args_payload_list = ["block_sizes", "queue_depths", "deadline",
165 "target", "nossd", "nowarm", "workload"]
167 for job_argument in job_args_payload_list:
169 job_args[job_argument] = self.options[job_argument]
173 LOG.info("Starting a job with parameters %s", job_args)
174 job_res = requests.post('http://%s:5000/api/v1.0/jobs' % self.target,
177 job_res_content = jsonutils.loads(job_res.content)
179 if job_res.status_code != 200:
180 raise RuntimeError("Failed to start a job, error message:",
181 job_res_content["message"])
182 elif job_res.status_code == 200:
183 job_id = job_res_content["job_id"]
184 LOG.info("Started job id: %s...", job_id)
186 while not self.job_completed:
187 self._query_job_state(job_id)
188 time.sleep(self.query_interval)
190 terminate_res = requests.delete('http://%s:5000/api/v1.0/jobs' %
193 if terminate_res.status_code != 200:
194 terminate_res_content = jsonutils.loads(
195 terminate_res.content)
196 raise RuntimeError("Failed to start a job, error message:",
197 terminate_res_content["message"])
199 # TODO: Support using ETA to polls for completion.
200 # Read ETA, next poll in 1/2 ETA time slot.
201 # If ETA is greater than the maximum allowed job time,
202 # then terminate job immediately.
204 # while not self.job_completed:
205 # esti_time = self._query_state(job_id)
206 # if esti_time > self.timeout:
207 # terminate_res = requests.delete('http://%s:5000/api/v1.0
208 # /jobs' % self.target)
210 # time.sleep(int(esti_time)/2)
212 result_res = requests.get('http://%s:5000/api/v1.0/jobs?id=%s' %
213 (self.target, job_id))
214 result_res_content = jsonutils.loads(
217 result.update(result_res_content)
220 """Deletes the agent configuration and the stack"""
221 teardown_res = requests.delete('http://%s:5000/api/v1.0/\
222 configurations' % self.target)
224 if teardown_res.status_code == 400:
225 teardown_res_content = jsonutils.loads(
226 teardown_res.content)
227 raise RuntimeError("Failed to reset environment, error message:",
228 teardown_res_content['message'])
230 self.setup_done = False