1 # Copyright 2014: Mirantis Inc.
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
16 # yardstick comment: this is a modified copy of
17 # rally/rally/benchmark/runners/constant.py
19 """A runner that runs a configurable number of times before it returns
22 from __future__ import absolute_import
25 import multiprocessing
31 from yardstick.benchmark.runners import base
33 LOG = logging.getLogger(__name__)
36 QUEUE_PUT_TIMEOUT = 10
39 def _worker_process(queue, cls, method_name, scenario_cfg,
40 context_cfg, aborted, output_queue):
44 runner_cfg = scenario_cfg['runner']
46 interval = runner_cfg.get("interval", 1)
47 iterations = runner_cfg.get("iterations", 1)
48 run_step = runner_cfg.get("run_step", "setup,run,teardown")
50 delta = runner_cfg.get("delta", 2)
51 LOG.info("worker START, iterations %d times, class %s", iterations, cls)
53 runner_cfg['runner_id'] = os.getpid()
55 benchmark = cls(scenario_cfg, context_cfg)
56 if "setup" in run_step:
59 method = getattr(benchmark, method_name)
62 if "sla" in scenario_cfg:
63 sla_action = scenario_cfg["sla"].get("action", "assert")
67 LOG.debug("runner=%(runner)s seq=%(sequence)s START",
68 {"runner": runner_cfg["runner_id"],
69 "sequence": sequence})
74 benchmark.pre_run_wait_time(interval)
78 except AssertionError as assertion:
79 # SLA validation failed in scenario, determine what to do now
80 if sla_action == "assert":
82 elif sla_action == "monitor":
83 LOG.warning("SLA validation failed: %s", assertion.args)
84 errors = assertion.args
85 elif sla_action == "rate-control":
87 scenario_cfg['options']['rate']
89 scenario_cfg.setdefault('options', {})
90 scenario_cfg['options']['rate'] = 100
92 scenario_cfg['options']['rate'] -= delta
95 except Exception: # pylint: disable=broad-except
96 errors = traceback.format_exc()
100 # add timeout for put so we don't block test
101 # if we do timeout we don't care about dropping individual KPIs
102 output_queue.put(result, True, QUEUE_PUT_TIMEOUT)
104 benchmark.post_run_wait_time(interval)
107 'timestamp': time.time(),
108 'sequence': sequence,
113 queue.put(benchmark_output, True, QUEUE_PUT_TIMEOUT)
115 LOG.debug("runner=%(runner)s seq=%(sequence)s END",
116 {"runner": runner_cfg["runner_id"],
117 "sequence": sequence})
121 if (errors and sla_action is None) or \
122 (sequence > iterations or aborted.is_set()):
123 LOG.info("worker END")
125 if "teardown" in run_step:
129 # catch any exception in teardown and convert to simple exception
130 # never pass exceptions back to multiprocessing, because some exceptions can
132 # https://bugs.python.org/issue9400
136 LOG.debug("queue.qsize() = %s", queue.qsize())
137 LOG.debug("output_queue.qsize() = %s", output_queue.qsize())
140 class IterationRunner(base.Runner):
141 """Run a scenario for a configurable number of times
143 If the scenario ends before the time has elapsed, it will be started again.
146 iterations - amount of times the scenario will be run for
150 interval - time to wait between each scenario invocation
155 __execution_type__ = 'Iteration'
157 def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
158 name = "{}-{}-{}".format(self.__execution_type__, scenario_cfg.get("type"), os.getpid())
159 self.process = multiprocessing.Process(
161 target=_worker_process,
162 args=(self.result_queue, cls, method, scenario_cfg,
163 context_cfg, self.aborted, self.output_queue))