Merge "HA testcase containerized Compass support"
[yardstick.git] / yardstick / benchmark / contexts / node.py
1 ##############################################################################
2 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
3 #
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9
10 from __future__ import absolute_import
11 import errno
12 import subprocess
13 import os
14 import collections
15 import logging
16
17 import yaml
18 import pkg_resources
19
20 from yardstick import ssh
21 from yardstick.benchmark.contexts.base import Context
22 from yardstick.common import constants as consts
23
24 LOG = logging.getLogger(__name__)
25
26
27 class NodeContext(Context):
28     """Class that handle nodes info"""
29
30     __context_type__ = "Node"
31
32     def __init__(self):
33         self.name = None
34         self.file_path = None
35         self.nodes = []
36         self.networks = {}
37         self.controllers = []
38         self.computes = []
39         self.baremetals = []
40         self.env = {}
41         super(NodeContext, self).__init__()
42
43     def read_config_file(self):
44         """Read from config file"""
45
46         with open(self.file_path) as stream:
47             LOG.info("Parsing pod file: %s", self.file_path)
48             cfg = yaml.load(stream)
49         return cfg
50
51     def init(self, attrs):
52         """initializes itself from the supplied arguments"""
53         self.name = attrs["name"]
54         self.file_path = attrs.get("file", "pod.yaml")
55
56         try:
57             cfg = self.read_config_file()
58         except IOError as ioerror:
59             if ioerror.errno == errno.ENOENT:
60                 self.file_path = \
61                     os.path.join(consts.YARDSTICK_ROOT_PATH, self.file_path)
62                 cfg = self.read_config_file()
63             else:
64                 raise
65
66         self.nodes.extend(cfg["nodes"])
67         self.controllers.extend([node for node in cfg["nodes"]
68                                  if node["role"] == "Controller"])
69         self.computes.extend([node for node in cfg["nodes"]
70                               if node["role"] == "Compute"])
71         self.baremetals.extend([node for node in cfg["nodes"]
72                                 if node["role"] == "Baremetal"])
73         LOG.debug("Nodes: %r", self.nodes)
74         LOG.debug("Controllers: %r", self.controllers)
75         LOG.debug("Computes: %r", self.computes)
76         LOG.debug("BareMetals: %r", self.baremetals)
77
78         self.env = attrs.get('env', {})
79         LOG.debug("Env: %r", self.env)
80
81         # add optional static network definition
82         self.networks.update(cfg.get("networks", {}))
83
84     def deploy(self):
85         config_type = self.env.get('type', '')
86         if config_type == 'ansible':
87             self._dispatch_ansible('setup')
88         elif config_type == 'script':
89             self._dispatch_script('setup')
90
91     def undeploy(self):
92         config_type = self.env.get('type', '')
93         if config_type == 'ansible':
94             self._dispatch_ansible('teardown')
95         elif config_type == 'script':
96             self._dispatch_script('teardown')
97         super(NodeContext, self).undeploy()
98
99     def _dispatch_script(self, key):
100         steps = self.env.get(key, [])
101         for step in steps:
102             for host, info in step.items():
103                 self._execute_script(host, info)
104
105     def _dispatch_ansible(self, key):
106         try:
107             step = self.env[key]
108         except KeyError:
109             pass
110         else:
111             self._do_ansible_job(step)
112
113     def _do_ansible_job(self, path):
114         cmd = 'ansible-playbook -i inventory.ini %s' % path
115         p = subprocess.Popen(cmd, shell=True, cwd=consts.ANSIBLE_DIR)
116         p.communicate()
117
118     def _get_server(self, attr_name):
119         """lookup server info by name from context
120         attr_name: a name for a server listed in nodes config file
121         """
122         if isinstance(attr_name, collections.Mapping):
123             return None
124
125         if self.name != attr_name.split(".")[1]:
126             return None
127         node_name = attr_name.split(".")[0]
128         matching_nodes = (n for n in self.nodes if n["name"] == node_name)
129
130         try:
131             # A clone is created in order to avoid affecting the
132             # original one.
133             node = dict(next(matching_nodes))
134         except StopIteration:
135             return None
136
137         try:
138             duplicate = next(matching_nodes)
139         except StopIteration:
140             pass
141         else:
142             raise ValueError("Duplicate nodes!!! Nodes: %s %s",
143                              (matching_nodes, duplicate))
144
145         node["name"] = attr_name
146         return node
147
148     def _get_network(self, attr_name):
149         if not isinstance(attr_name, collections.Mapping):
150             network = self.networks.get(attr_name)
151
152         else:
153             # Don't generalize too much  Just support vld_id
154             vld_id = attr_name.get('vld_id')
155             if vld_id is None:
156                 return None
157
158             network = next((n for n in self.networks.values() if
159                            n.get("vld_id") == vld_id), None)
160
161         if network is None:
162             return None
163
164         result = {
165             # name is required
166             "name": network["name"],
167             "vld_id": network.get("vld_id"),
168             "segmentation_id": network.get("segmentation_id"),
169             "network_type": network.get("network_type"),
170             "physical_network": network.get("physical_network"),
171         }
172         return result
173
174     def _execute_script(self, node_name, info):
175         if node_name == 'local':
176             self._execute_local_script(info)
177         else:
178             self._execute_remote_script(node_name, info)
179
180     def _execute_remote_script(self, node_name, info):
181         prefix = self.env.get('prefix', '')
182         script, options = self._get_script(info)
183
184         script_file = pkg_resources.resource_filename(prefix, script)
185
186         self._get_client(node_name)
187         self.client._put_file_shell(script_file, '~/{}'.format(script))
188
189         cmd = 'sudo bash {} {}'.format(script, options)
190         status, stdout, stderr = self.client.execute(cmd)
191         if status:
192             raise RuntimeError(stderr)
193
194     def _execute_local_script(self, info):
195         script, options = self._get_script(info)
196         script = os.path.join(consts.YARDSTICK_ROOT_PATH, script)
197         cmd = ['bash', script, options]
198
199         p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
200         LOG.debug('\n%s', p.communicate()[0])
201
202     def _get_script(self, info):
203         return info.get('script'), info.get('options', '')
204
205     def _get_client(self, node_name):
206         node = self._get_node_info(node_name.strip())
207
208         if node is None:
209             raise SystemExit('No such node')
210
211         self.client = ssh.SSH.from_node(node, defaults={'user': 'ubuntu'})
212
213         self.client.wait(timeout=600)
214
215     def _get_node_info(self, name):
216         return next((n for n in self.nodes if n['name'].strip() == name))