1 # Copyright (c) 2016-2017 Intel Corporation
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
15 from __future__ import absolute_import
19 import contextlib as cl
23 from collections import Mapping, MutableMapping, Iterable, Callable, deque
24 from functools import partial
25 from itertools import chain
26 from subprocess import CalledProcessError, Popen, PIPE
27 from tempfile import NamedTemporaryFile
30 import six.moves.configparser as ConfigParser
32 from six import StringIO
33 from chainmap import ChainMap
35 from yardstick.common.utils import Timer
36 from yardstick.common import constants as consts
38 cgitb.enable(format="text")
40 _LOCAL_DEFAULT = object()
42 LOG = logging.getLogger(__name__)
45 def overwrite_dict_to_cfg(cfg, cfg_dict):
46 for section in cfg_dict:
48 cfg.remove_section(section)
49 cfg.add_section(section)
50 for section, val in cfg_dict.items():
51 if isinstance(val, six.string_types):
53 elif isinstance(val, collections.Mapping):
54 for k, v in val.items():
55 cfg.set(section, k, v)
61 class TempfileContext(object):
63 def _try_get_filename_from_file(param):
65 if isinstance(param.read, Callable):
67 except AttributeError:
69 # return what was given
72 def __init__(self, data, write_func, descriptor, data_types, directory,
73 prefix, suffix, creator):
74 super(TempfileContext, self).__init__()
76 self.write_func = write_func
77 self.descriptor = descriptor
78 self.data_types = data_types
79 self.directory = directory
81 self.creator = creator
86 self.data = self._try_get_filename_from_file(self.data)
87 if isinstance(self.data, six.string_types):
88 # string -> playbook filename directly
89 data_filename = self.data
90 elif isinstance(self.data, self.data_types):
91 # list of playbooks -> put into a temporary playbook file
93 self.prefix = self.prefix.rstrip('_')
94 data_filename = ''.join([self.prefix, self.suffix])
96 data_filename = os.path.join(self.directory, data_filename)
97 if not os.path.exists(data_filename):
98 self.data_file = open(data_filename, 'w+')
100 self.data_file = self.creator()
101 self.write_func(self.data_file)
102 self.data_file.flush()
103 self.data_file.seek(0)
105 # data not passed properly -> error
106 LOG.error("%s type not recognized: %s", self.descriptor, self.data)
107 raise ValueError("{} type not recognized".format(self.descriptor))
109 LOG.debug("%s file : %s", self.descriptor, data_filename)
113 def __exit__(self, exc_type, exc_val, exc_tb):
115 self.data_file.close()
118 class CustomTemporaryFile(object):
119 DEFAULT_SUFFIX = None
120 DEFAULT_DATA_TYPES = None
122 def __init__(self, directory, prefix, suffix=_LOCAL_DEFAULT,
123 data_types=_LOCAL_DEFAULT):
124 super(CustomTemporaryFile, self).__init__()
125 self.directory = directory
127 if suffix is not _LOCAL_DEFAULT:
130 self.suffix = self.DEFAULT_SUFFIX
131 if data_types is not _LOCAL_DEFAULT:
132 self.data_types = data_types
134 self.data_types = self.DEFAULT_DATA_TYPES
135 # must open "w+" so unicode is encoded correctly
136 self.creator = partial(NamedTemporaryFile, mode="w+", delete=False,
141 def make_context(self, data, write_func, descriptor='data'):
142 return TempfileContext(data, write_func, descriptor, self.data_types,
143 self.directory, self.prefix, self.suffix,
147 class ListTemporaryFile(CustomTemporaryFile):
148 DEFAULT_DATA_TYPES = (list, tuple)
151 class MapTemporaryFile(CustomTemporaryFile):
152 DEFAULT_DATA_TYPES = dict
155 class YmlTemporaryFile(ListTemporaryFile):
156 DEFAULT_SUFFIX = '.yml'
159 class IniListTemporaryFile(ListTemporaryFile):
160 DEFAULT_SUFFIX = '.ini'
163 class IniMapTemporaryFile(MapTemporaryFile):
164 DEFAULT_SUFFIX = '.ini'
167 class JsonTemporaryFile(MapTemporaryFile):
168 DEFAULT_SUFFIX = '.json'
171 class FileNameGenerator(object):
173 def get_generator_from_filename(filename, directory, prefix, middle):
174 basename = os.path.splitext(os.path.basename(filename))[0]
175 if not basename.startswith(prefix):
176 part_list = [prefix, middle, basename]
177 elif not middle or middle in basename:
178 part_list = [basename]
180 part_list = [middle, basename]
181 return FileNameGenerator(directory=directory, part_list=part_list)
184 def _handle_existing_file(filename):
185 if not os.path.exists(filename):
188 prefix, suffix = os.path.splitext(os.path.basename(filename))
189 directory = os.path.dirname(filename)
190 if not prefix.endswith('_'):
193 temp_file = NamedTemporaryFile(delete=False, dir=directory,
194 prefix=prefix, suffix=suffix)
195 with cl.closing(temp_file):
196 return temp_file.name
198 def __init__(self, directory, part_list):
199 super(FileNameGenerator, self).__init__()
200 self.directory = directory
201 self.part_list = part_list
203 def make(self, extra):
204 if not isinstance(extra, Iterable) or isinstance(extra,
206 extra = (extra,) # wrap the singleton in an iterable
207 return self._handle_existing_file(
210 '_'.join(chain(self.part_list, extra))
215 class AnsibleNodeDict(Mapping):
216 def __init__(self, node_class, nodes):
217 super(AnsibleNodeDict, self).__init__()
218 # create a dict of name, Node instance
219 self.node_dict = {k: v for k, v in
220 (node_class(node).get_tuple() for node in
222 # collect all the node roles
223 self.node_roles = set(
224 n['role'] for n in six.itervalues(self.node_dict))
227 return repr(self.node_dict)
230 return len(self.node_dict)
232 def __getitem__(self, item):
233 return self.node_dict[item]
236 return iter(self.node_dict)
238 def iter_all_of_type(self, node_type, default=_LOCAL_DEFAULT):
239 return (node for node in six.itervalues(self) if
240 node.is_role(node_type, default))
242 def gen_inventory_lines_for_all_of_type(self, node_type,
243 default=_LOCAL_DEFAULT):
244 return [node.gen_inventory_line() for node in
245 self.iter_all_of_type(node_type, default)]
247 def gen_all_inventory_lines(self):
248 return [node.gen_inventory_line() for node in
249 six.itervalues(self.node_dict)]
251 def gen_inventory_groups(self):
252 # lowercase group names
253 return {role.lower(): [node['name'] for
254 node in self.iter_all_of_type(role)]
255 for role in self.node_roles}
258 class AnsibleNode(MutableMapping):
259 ANSIBLE_NODE_KEY_MAP = {
260 u'ansible_host': 'ip',
261 u'ansible_user': 'user',
262 u'ansible_port': 'ssh_port',
263 u'ansible_ssh_pass': 'password',
264 u'ansible_ssh_private_key_file': 'key_filename',
267 def __init__(self, data=None, **kwargs):
268 super(AnsibleNode, self).__init__()
269 if isinstance(data, MutableMapping):
275 return 'AnsibleNode<{}>'.format(self.data)
278 return len(self.data)
281 return iter(self.data)
284 def node_key_map(self):
285 return self.ANSIBLE_NODE_KEY_MAP
287 def get_inventory_params(self):
288 node_key_map = self.node_key_map
289 # password or key_filename may not be present
290 return {inventory_key: self[self_key] for inventory_key, self_key in
291 node_key_map.items() if self_key in self}
293 def is_role(self, node_type, default=_LOCAL_DEFAULT):
294 if default is not _LOCAL_DEFAULT:
295 return self.setdefault('role', default) in node_type
296 return node_type in self.get('role', set())
298 def gen_inventory_line(self):
299 inventory_params = self.get_inventory_params()
300 # use format to convert ints
301 # sort to ensure consistent key value ordering
302 formatted_args = (u"{}={}".format(*entry) for entry in
303 sorted(inventory_params.items()))
304 line = u" ".join(chain([self['name']], formatted_args))
308 return self['name'], self
310 def __contains__(self, key):
311 return self.data.__contains__(key)
313 def __getitem__(self, item):
314 return self.data[item]
316 def __setitem__(self, key, value):
317 self.data[key] = value
319 def __delitem__(self, key):
322 def __getattr__(self, item):
323 return getattr(self.data, item)
326 class AnsibleCommon(object):
327 NODE_CLASS = AnsibleNode
328 OUTFILE_PREFIX_TEMPLATE = 'ansible_{:02}'
330 __DEFAULT_VALUES_MAP = {
331 'default_timeout': 1200,
334 # default 10 min ansible timeout for non-main calls
335 'ansible_timeout': 600,
336 'scripts_dest': None,
337 '_deploy_dir': _LOCAL_DEFAULT,
340 __DEFAULT_CALLABLES_MAP = {
342 'inventory_dict': dict,
344 '_node_info_dict': dict,
348 def _get_defaults(cls):
349 # subclasses will override to change defaults using the ChainMap
351 values_map_deque, defaults_map_deque = cls._get_defaults_map_deques()
352 return ChainMap(*values_map_deque), ChainMap(*defaults_map_deque)
355 def _get_defaults_map_deques(cls):
356 # deque so we can insert or append easily
357 return (deque([cls.__DEFAULT_VALUES_MAP]),
358 deque([cls.__DEFAULT_CALLABLES_MAP]))
360 def __init__(self, nodes, **kwargs):
361 # TODO: add default Heat vars
362 super(AnsibleCommon, self).__init__()
366 # default 10 min ansible timeout for non-main calls
367 self.ansible_timeout = 600
368 self.inventory_dict = None
369 self.scripts_dest = None
370 self._deploy_dir = _LOCAL_DEFAULT
371 self._node_dict = None
372 self._node_info_dict = None
373 self.callable_task = None
374 self.test_vars = None
375 self.default_timeout = None
378 def reset(self, **kwargs):
380 reset all attributes based on various layers of default dicts
381 including new default added in subclasses
384 default_values_map, default_callables_map = self._get_defaults()
385 for name, default_value in list(default_values_map.items()):
386 setattr(self, name, kwargs.pop(name, default_value))
388 for name, func in list(default_callables_map.items()):
390 value = kwargs.pop(name)
394 setattr(self, name, value)
396 def do_install(self, playbook, directory):
397 # TODO: how to get openstack nodes from Heat
398 self.gen_inventory_ini_dict()
399 self.execute_ansible(playbook, directory)
402 def deploy_dir(self):
403 if self._deploy_dir is _LOCAL_DEFAULT:
404 raise ValueError('Deploy dir must be set before using it')
405 return self._deploy_dir
408 def deploy_dir(self, value):
409 self._deploy_dir = value
413 if not self._node_dict:
414 self._node_dict = AnsibleNodeDict(self.NODE_CLASS, self.nodes)
415 LOG.debug("node_dict = \n%s", self._node_dict)
416 return self._node_dict
418 def gen_inventory_ini_dict(self):
419 if self.inventory_dict and isinstance(self.inventory_dict,
423 node_dict = self.node_dict
424 # add all nodes to 'node' group and specify full parameter there
425 self.inventory_dict = {
426 "nodes": node_dict.gen_all_inventory_lines()
428 # place nodes into ansible groups according to their role
429 # using just node name
430 self.inventory_dict.update(node_dict.gen_inventory_groups())
433 def ansible_env(directory, log_file):
434 # have to overload here in the env because we can't modify local.conf
435 ansible_dict = dict(os.environ, **{
436 "ANSIBLE_LOG_PATH": os.path.join(directory, log_file),
437 "ANSIBLE_LOG_BASE": directory,
438 "ANSIBLE_ROLES_PATH": consts.ANSIBLE_ROLES_PATH,
439 # # required for SSH to work
440 # "ANSIBLE_SSH_ARGS": "-o UserKnownHostsFile=/dev/null "
441 # "-o GSSAPIAuthentication=no "
442 # "-o PreferredAuthentications=password "
443 # "-o ControlMaster=auto "
444 # "-o ControlPersist=60s",
445 # "ANSIBLE_HOST_KEY_CHECKING": "False",
446 # "ANSIBLE_SSH_PIPELINING": "True",
450 def _gen_ansible_playbook_file(self, playbooks, directory, prefix='tmp'):
451 # check what is passed in playbooks
452 if isinstance(playbooks, (list, tuple)):
453 if len(playbooks) == 1:
454 # list or tuple with one member -> take it
455 playbooks = playbooks[0]
457 playbooks = [{'include': playbook} for playbook in playbooks]
458 prefix = '_'.join([self.prefix, prefix, 'playbook'])
459 yml_temp_file = YmlTemporaryFile(directory=directory, prefix=prefix)
460 write_func = partial(yaml.safe_dump, playbooks,
461 default_flow_style=False,
463 return yml_temp_file.make_context(playbooks, write_func,
464 descriptor='playbooks')
466 def _gen_ansible_inventory_file(self, directory, prefix='tmp'):
467 def write_func(data_file):
468 overwrite_dict_to_cfg(inventory_config, self.inventory_dict)
469 debug_inventory = StringIO()
470 inventory_config.write(debug_inventory)
471 LOG.debug("inventory = \n%s", debug_inventory.getvalue())
472 inventory_config.write(data_file)
474 prefix = '_'.join([self.prefix, prefix, 'inventory'])
475 ini_temp_file = IniMapTemporaryFile(directory=directory, prefix=prefix)
476 inventory_config = ConfigParser.ConfigParser(allow_no_value=True)
477 # disable default lowercasing
478 inventory_config.optionxform = str
479 return ini_temp_file.make_context(self.inventory_dict, write_func,
480 descriptor='inventory')
482 def _gen_ansible_extra_vars(self, extra_vars, directory, prefix='tmp'):
483 if not isinstance(extra_vars, MutableMapping):
484 extra_vars = self.test_vars
485 prefix = '_'.join([self.prefix, prefix, 'extra_vars'])
486 # use JSON because Python YAML serializes unicode wierdly
487 json_temp_file = JsonTemporaryFile(directory=directory, prefix=prefix)
488 write_func = partial(json.dump, extra_vars, indent=4)
489 return json_temp_file.make_context(extra_vars, write_func,
490 descriptor='extra_vars')
492 def _gen_log_names(self, directory, prefix, playbook_filename):
493 generator = FileNameGenerator.get_generator_from_filename(
494 playbook_filename, directory, self.prefix, prefix)
495 return generator.make('execute.log'), generator.make(
499 def get_timeout(*timeouts):
500 for timeout in timeouts:
502 timeout = float(timeout)
505 except (TypeError, ValueError):
511 def execute_ansible(self, playbooks, directory, timeout=None,
512 extra_vars=None, ansible_check=False, prefix='tmp',
514 # there can be three types of dirs:
515 # log dir: can be anywhere
516 # inventory dir: can be anywhere
517 # playbook dir: use include to point to files in consts.ANSIBLE_DIR
519 if not os.path.isdir(directory):
520 raise OSError("Not a directory, %s" % directory)
521 timeout = self.get_timeout(timeout, self.default_timeout)
524 self.prefix = self.OUTFILE_PREFIX_TEMPLATE.format(self.counter)
526 playbook_ctx = self._gen_ansible_playbook_file(playbooks, directory,
528 inventory_ctx = self._gen_ansible_inventory_file(directory,
530 extra_vars_ctx = self._gen_ansible_extra_vars(extra_vars, directory,
533 with playbook_ctx as playbook_filename, \
534 inventory_ctx as inventory_filename, \
535 extra_vars_ctx as extra_vars_filename:
544 if extra_vars_filename is not None:
547 "@{}".format(extra_vars_filename),
549 cmd.append(playbook_filename)
551 log_file_main, log_file_checks = self._gen_log_names(
552 directory, prefix, playbook_filename)
560 LOG.debug('log file checks: %s', log_file_checks)
562 'env': self.ansible_env(directory, log_file_checks),
563 # TODO: add timeout support of use subprocess32 backport
564 # 'timeout': timeout / 2,
566 with Timer() as timer:
567 proc = Popen(cmd, stdout=PIPE, **exec_args)
568 output, _ = proc.communicate()
569 retcode = proc.wait()
570 LOG.debug("exit status = %s", retcode)
572 raise CalledProcessError(retcode, cmd, output)
573 timeout -= timer.total_seconds()
575 cmd.remove("--syntax-check")
576 LOG.debug('log file main: %s', log_file_main)
578 'env': self.ansible_env(directory, log_file_main),
579 # TODO: add timeout support of use subprocess32 backport
580 # 'timeout': timeout,
582 proc = Popen(cmd, stdout=PIPE, **exec_args)
583 output, _ = proc.communicate()
584 retcode = proc.wait()
585 LOG.debug("exit status = %s", retcode)
587 raise CalledProcessError(retcode, cmd, output)