1 # Copyright 2014: Mirantis Inc.
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
16 # yardstick comment: this is a modified copy of
17 # rally/rally/benchmark/runners/constant.py
19 """A runner that runs a configurable number of times before it returns
22 from __future__ import absolute_import
25 import multiprocessing
31 from yardstick.benchmark.runners import base
32 from yardstick.common import exceptions as y_exc
34 LOG = logging.getLogger(__name__)
37 QUEUE_PUT_TIMEOUT = 10
40 def _worker_process(queue, cls, method_name, scenario_cfg,
41 context_cfg, aborted, output_queue):
45 runner_cfg = scenario_cfg['runner']
47 interval = runner_cfg.get("interval", 1)
48 iterations = runner_cfg.get("iterations", 1)
49 run_step = runner_cfg.get("run_step", "setup,run,teardown")
51 delta = runner_cfg.get("delta", 2)
52 LOG.info("worker START, iterations %d times, class %s", iterations, cls)
54 runner_cfg['runner_id'] = os.getpid()
56 benchmark = cls(scenario_cfg, context_cfg)
57 if "setup" in run_step:
60 method = getattr(benchmark, method_name)
63 if "sla" in scenario_cfg:
64 sla_action = scenario_cfg["sla"].get("action", "assert")
68 LOG.debug("runner=%(runner)s seq=%(sequence)s START",
69 {"runner": runner_cfg["runner_id"],
70 "sequence": sequence})
75 benchmark.pre_run_wait_time(interval)
79 except y_exc.SLAValidationError as error:
80 # SLA validation failed in scenario, determine what to do now
81 if sla_action == "assert":
83 elif sla_action == "monitor":
84 LOG.warning("SLA validation failed: %s", error.args)
86 elif sla_action == "rate-control":
88 scenario_cfg['options']['rate']
90 scenario_cfg.setdefault('options', {})
91 scenario_cfg['options']['rate'] = 100
93 scenario_cfg['options']['rate'] -= delta
96 except Exception: # pylint: disable=broad-except
97 errors = traceback.format_exc()
101 # add timeout for put so we don't block test
102 # if we do timeout we don't care about dropping individual KPIs
103 output_queue.put(result, True, QUEUE_PUT_TIMEOUT)
105 benchmark.post_run_wait_time(interval)
108 'timestamp': time.time(),
109 'sequence': sequence,
114 queue.put(benchmark_output, True, QUEUE_PUT_TIMEOUT)
116 LOG.debug("runner=%(runner)s seq=%(sequence)s END",
117 {"runner": runner_cfg["runner_id"],
118 "sequence": sequence})
122 if (errors and sla_action is None) or \
123 (sequence > iterations or aborted.is_set()):
124 LOG.info("worker END")
126 if "teardown" in run_step:
130 # catch any exception in teardown and convert to simple exception
131 # never pass exceptions back to multiprocessing, because some exceptions can
133 # https://bugs.python.org/issue9400
137 LOG.debug("queue.qsize() = %s", queue.qsize())
138 LOG.debug("output_queue.qsize() = %s", output_queue.qsize())
141 class IterationRunner(base.Runner):
142 """Run a scenario for a configurable number of times
144 If the scenario ends before the time has elapsed, it will be started again.
147 iterations - amount of times the scenario will be run for
151 interval - time to wait between each scenario invocation
156 __execution_type__ = 'Iteration'
158 def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
159 name = "{}-{}-{}".format(self.__execution_type__, scenario_cfg.get("type"), os.getpid())
160 self.process = multiprocessing.Process(
162 target=_worker_process,
163 args=(self.result_queue, cls, method, scenario_cfg,
164 context_cfg, self.aborted, self.output_queue))