X-Git-Url: https://gerrit.opnfv.org/gerrit/gitweb?a=blobdiff_plain;f=yardstick%2Fbenchmark%2Fcontexts%2Fnode.py;h=baa1cf5d6398a48e594f3785d43607ecf6f0a77c;hb=9b19d7542e494c9c39da7aead2ef630a866b8455;hp=e02a716699a58ddcde53d3f5590358a0e49fa39d;hpb=fa3afbcac13e1aa3ae9cc2977dcb4cd882112f6f;p=yardstick.git diff --git a/yardstick/benchmark/contexts/node.py b/yardstick/benchmark/contexts/node.py index e02a71669..baa1cf5d6 100644 --- a/yardstick/benchmark/contexts/node.py +++ b/yardstick/benchmark/contexts/node.py @@ -8,13 +8,18 @@ ############################################################################## from __future__ import absolute_import -import sys +import errno +import subprocess import os -import yaml +import collections import logging +import yaml +import pkg_resources + +from yardstick import ssh from yardstick.benchmark.contexts.base import Context -from yardstick.definitions import YARDSTICK_ROOT_PATH +from yardstick.common import constants as consts LOG = logging.getLogger(__name__) @@ -31,22 +36,31 @@ class NodeContext(Context): self.controllers = [] self.computes = [] self.baremetals = [] - super(self.__class__, self).__init__() + self.env = {} + super(NodeContext, self).__init__() + + def read_config_file(self): + """Read from config file""" + + with open(self.file_path) as stream: + LOG.info("Parsing pod file: %s", self.file_path) + cfg = yaml.load(stream) + return cfg def init(self, attrs): """initializes itself from the supplied arguments""" self.name = attrs["name"] self.file_path = attrs.get("file", "pod.yaml") - if not os.path.exists(self.file_path): - self.file_path = os.path.join(YARDSTICK_ROOT_PATH, self.file_path) - - LOG.info("Parsing pod file: %s", self.file_path) try: - with open(self.file_path) as stream: - cfg = yaml.load(stream) + cfg = self.read_config_file() except IOError as ioerror: - sys.exit(ioerror) + if ioerror.errno == errno.ENOENT: + self.file_path = \ + os.path.join(consts.YARDSTICK_ROOT_PATH, self.file_path) + cfg = self.read_config_file() + else: + raise self.nodes.extend(cfg["nodes"]) self.controllers.extend([node for node in cfg["nodes"] @@ -60,35 +74,113 @@ class NodeContext(Context): LOG.debug("Computes: %r", self.computes) LOG.debug("BareMetals: %r", self.baremetals) + self.env = attrs.get('env', {}) + LOG.debug("Env: %r", self.env) + def deploy(self): - """don't need to deploy""" - pass + config_type = self.env.get('type', '') + if config_type == 'ansible': + self._dispatch_ansible('setup') + elif config_type == 'script': + self._dispatch_script('setup') def undeploy(self): - """don't need to undeploy""" - pass + config_type = self.env.get('type', '') + if config_type == 'ansible': + self._dispatch_ansible('teardown') + elif config_type == 'script': + self._dispatch_script('teardown') + super(NodeContext, self).undeploy() + + def _dispatch_script(self, key): + steps = self.env.get(key, []) + for step in steps: + for host, info in step.items(): + self._execute_script(host, info) + + def _dispatch_ansible(self, key): + try: + step = self.env[key] + except KeyError: + pass + else: + self._do_ansible_job(step) + + def _do_ansible_job(self, path): + cmd = 'ansible-playbook -i inventory.ini %s' % path + p = subprocess.Popen(cmd, shell=True, cwd=consts.ANSIBLE_DIR) + p.communicate() def _get_server(self, attr_name): """lookup server info by name from context attr_name: a name for a server listed in nodes config file """ - if type(attr_name) is dict: + if isinstance(attr_name, collections.Mapping): return None if self.name != attr_name.split(".")[1]: return None node_name = attr_name.split(".")[0] - nodes = [n for n in self.nodes - if n["name"] == node_name] - if len(nodes) == 0: + matching_nodes = (n for n in self.nodes if n["name"] == node_name) + + try: + # A clone is created in order to avoid affecting the + # original one. + node = dict(next(matching_nodes)) + except StopIteration: return None - elif len(nodes) > 1: - LOG.error("Duplicate nodes!!!") - LOG.error("Nodes: %r", nodes) - sys.exit(-1) - - # A clone is created in order to avoid affecting the - # original one. - node = dict(nodes[0]) + + try: + duplicate = next(matching_nodes) + except StopIteration: + pass + else: + raise ValueError("Duplicate nodes!!! Nodes: %s %s", + (matching_nodes, duplicate)) + node["name"] = attr_name return node + + def _execute_script(self, node_name, info): + if node_name == 'local': + self._execute_local_script(info) + else: + self._execute_remote_script(node_name, info) + + def _execute_remote_script(self, node_name, info): + prefix = self.env.get('prefix', '') + script, options = self._get_script(info) + + script_file = pkg_resources.resource_filename(prefix, script) + + self._get_client(node_name) + self.client._put_file_shell(script_file, '~/{}'.format(script)) + + cmd = 'sudo bash {} {}'.format(script, options) + status, stdout, stderr = self.client.execute(cmd) + if status: + raise RuntimeError(stderr) + + def _execute_local_script(self, info): + script, options = self._get_script(info) + script = os.path.join(consts.YARDSTICK_ROOT_PATH, script) + cmd = ['bash', script, options] + + p = subprocess.Popen(cmd, stdout=subprocess.PIPE) + LOG.debug('\n%s', p.communicate()[0]) + + def _get_script(self, info): + return info.get('script'), info.get('options', '') + + def _get_client(self, node_name): + node = self._get_node_info(node_name.strip()) + + if node is None: + raise SystemExit('No such node') + + self.client = ssh.SSH.from_node(node, defaults={'user': 'ubuntu'}) + + self.client.wait(timeout=600) + + def _get_node_info(self, name): + return next((n for n in self.nodes if n['name'].strip() == name))