1 ##############################################################################
2 # Copyright (c) 2016 Huawei Technologies Co.,Ltd.
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
15 from oslo_serialization import jsonutils
18 from yardstick.benchmark.scenarios import base
21 LOG = logging.getLogger(__name__)
24 class StorPerf(base.Scenario):
25 """Execute StorPerf benchmark.
26 Once the StorPerf container has been started and the ReST API exposed,
27 you can interact directly with it using the ReST API. StorPerf comes with a
28 Swagger interface that is accessible through the exposed port at:
29 http://StorPerf:5000/swagger/index.html
32 target = [device or path] (Optional):
33 The path to either an attached storage device (/dev/vdb, etc) or a
34 directory path (/opt/storperf) that will be used to execute the performance
35 test. In the case of a device, the entire device will be used.
36 If not specified, the current directory will be used.
38 workload = [workload module] (Optional):
39 If not specified, the default is to run all workloads.
40 The workload types are:
41 rs: 100% Read, sequential data
42 ws: 100% Write, sequential data
43 rr: 100% Read, random access
44 wr: 100% Write, random access
45 rw: 70% Read / 30% write, random access
47 report = [job_id] (Optional):
48 Query the status of the supplied job_id and report on metrics.
49 If a workload is supplied, will report on only that subset.
52 __scenario_type__ = "StorPerf"
54 def __init__(self, scenario_cfg, context_cfg):
55 """Scenario construction."""
56 super(StorPerf, self).__init__()
57 self.scenario_cfg = scenario_cfg
58 self.context_cfg = context_cfg
59 self.options = self.scenario_cfg["options"]
61 self.target = self.options.get("StorPerf_ip", None)
62 self.query_interval = self.options.get("query_interval", 10)
63 # Maximum allowed job time
64 self.timeout = self.options.get('timeout', 3600)
66 self.setup_done = False
67 self.job_completed = False
69 def _query_setup_state(self):
70 """Query the stack status."""
71 LOG.info("Querying the stack state...")
72 setup_query = requests.get('http://%s:5000/api/v1.0/configurations'
75 setup_query_content = jsonutils.loads(
77 if ("stack_created" in setup_query_content and
78 setup_query_content["stack_created"]):
79 LOG.debug("stack_created: %s",
80 setup_query_content["stack_created"])
86 """Set the configuration."""
88 env_args_payload_list = ["agent_count", "agent_flavor",
89 "public_network", "agent_image",
90 "volume_size", "volume_type",
91 "volume_count", "availability_zone",
92 "stack_name", "subnet_CIDR"]
94 for env_argument in env_args_payload_list:
96 env_args[env_argument] = self.options[env_argument]
100 LOG.info("Creating a stack on node %s with parameters %s",
101 self.target, env_args)
102 setup_res = requests.post('http://%s:5000/api/v1.0/configurations'
103 % self.target, json=env_args)
106 if setup_res.status_code != 200:
107 LOG.error("Failed to create stack. %s: %s",
108 setup_res.status_code, setup_res.content)
109 raise RuntimeError("Failed to create stack. %s: %s" %
110 (setup_res.status_code, setup_res.content))
111 elif setup_res.status_code == 200:
112 setup_res_content = jsonutils.loads(setup_res.content)
113 LOG.info("stack_id: %s", setup_res_content["stack_id"])
115 while not self._query_setup_state():
116 time.sleep(self.query_interval)
118 # We do not want to load the results of the disk initialization,
119 # so it is not added to the results here.
120 self.initialize_disks()
121 self.setup_done = True
123 def _query_job_state(self, job_id):
124 """Query the status of the supplied job_id and report on metrics"""
125 LOG.info("Fetching report for %s...", job_id)
126 report_res = requests.get('http://%s:5000/api/v1.0/jobs' % self.target,
127 params={'id': job_id, 'type': 'status'})
129 report_res_content = jsonutils.loads(
132 if report_res.status_code != 200:
133 LOG.error("Failed to fetch report, error message: %s",
134 report_res_content["message"])
135 raise RuntimeError("Failed to fetch report, error message:",
136 report_res_content["message"])
138 job_status = report_res_content["Status"]
140 LOG.debug("Job is: %s...", job_status)
141 self.job_completed = job_status == "Completed"
143 # TODO: Support using StorPerf ReST API to read Job ETA.
145 # if job_status == "completed":
146 # self.job_completed = True
148 # elif job_status == "running":
149 # ETA = report_res_content['time']
153 def run(self, result):
154 """Execute StorPerf benchmark"""
155 if not self.setup_done:
158 metadata = {"build_tag": "latest",
159 "test_case": "opnfv_yardstick_tc074"}
160 metadata_payload_dict = {"pod_name": "NODE_NAME",
161 "scenario_name": "DEPLOY_SCENARIO",
162 "version": "YARDSTICK_BRANCH"}
164 for key, value in metadata_payload_dict.items():
166 metadata[key] = os.environ[value]
170 job_args = {"metadata": metadata}
171 job_args_payload_list = ["block_sizes", "queue_depths", "deadline",
172 "target", "workload", "workloads",
173 "agent_count", "steady_state_samples"]
174 job_args["deadline"] = self.options["timeout"]
176 for job_argument in job_args_payload_list:
178 job_args[job_argument] = self.options[job_argument]
184 if ("workloads" in job_args and
185 job_args["workloads"] is not None and
186 len(job_args["workloads"])) > 0:
189 LOG.info("Starting a job with parameters %s", job_args)
190 job_res = requests.post('http://%s:5000/api/%s/jobs' % (self.target,
191 api_version), json=job_args)
193 if job_res.status_code != 200:
194 LOG.error("Failed to start job. %s: %s",
195 job_res.status_code, job_res.content)
196 raise RuntimeError("Failed to start job. %s: %s" %
197 (job_res.status_code, job_res.content))
198 elif job_res.status_code == 200:
199 job_res_content = jsonutils.loads(job_res.content)
200 job_id = job_res_content["job_id"]
201 LOG.info("Started job id: %s...", job_id)
203 while not self.job_completed:
204 self._query_job_state(job_id)
205 time.sleep(self.query_interval)
207 # TODO: Support using ETA to polls for completion.
208 # Read ETA, next poll in 1/2 ETA time slot.
209 # If ETA is greater than the maximum allowed job time,
210 # then terminate job immediately.
212 # while not self.job_completed:
213 # esti_time = self._query_state(job_id)
214 # if esti_time > self.timeout:
215 # terminate_res = requests.delete('http://%s:5000/api/v1.0
216 # /jobs' % self.target)
218 # time.sleep(int(esti_time)/2)
220 result_res = requests.get('http://%s:5000/api/v1.0/jobs?type='
221 'metadata&id=%s' % (self.target, job_id))
222 result_res_content = jsonutils.loads(result_res.content)
223 if 'report' in result_res_content and \
224 'steady_state' in result_res_content['report']['details']:
225 res = result_res_content['report']['details']['steady_state']
226 steady_state = res.values()[0]
227 LOG.info("Job %s completed with steady state %s",
228 job_id, steady_state)
230 result_res = requests.get('http://%s:5000/api/v1.0/jobs?id=%s' %
231 (self.target, job_id))
232 result_res_content = jsonutils.loads(
234 result.update(result_res_content)
236 def initialize_disks(self):
237 """Fills the target with random data prior to executing workloads"""
240 job_args_payload_list = ["target"]
242 for job_argument in job_args_payload_list:
244 job_args[job_argument] = self.options[job_argument]
248 LOG.info("Starting initialization with parameters %s", job_args)
249 job_res = requests.post('http://%s:5000/api/v1.0/initializations' %
250 self.target, json=job_args)
253 if job_res.status_code != 200:
254 LOG.error("Failed to start initialization job, error message: %s: %s",
255 job_res.status_code, job_res.content)
256 raise RuntimeError("Failed to start initialization job, error message: %s: %s" %
257 (job_res.status_code, job_res.content))
258 elif job_res.status_code == 200:
259 job_res_content = jsonutils.loads(job_res.content)
260 job_id = job_res_content["job_id"]
261 LOG.info("Started initialization as job id: %s...", job_id)
263 while not self.job_completed:
264 self._query_job_state(job_id)
265 time.sleep(self.query_interval)
267 self.job_completed = False
270 """Deletes the agent configuration and the stack"""
271 teardown_res = requests.delete(
272 'http://%s:5000/api/v1.0/configurations' % self.target)
274 if teardown_res.status_code == 400:
275 teardown_res_content = jsonutils.loads(
276 teardown_res.json_data)
277 LOG.error("Failed to reset environment, error message: %s",
278 teardown_res_content['message'])
279 raise RuntimeError("Failed to reset environment, error message:",
280 teardown_res_content['message'])
282 self.setup_done = False