1 # Copyright (c) 2016-2017 Intel Corporation
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
15 from __future__ import absolute_import
19 import contextlib as cl
23 from collections import Mapping, MutableMapping, Iterable, Callable, deque
24 from functools import partial
25 from itertools import chain
26 from subprocess import CalledProcessError, Popen, PIPE
27 from tempfile import NamedTemporaryFile
30 import six.moves.configparser as ConfigParser
32 from six import StringIO
33 from chainmap import ChainMap
34 from oslo_serialization import jsonutils
36 from yardstick.common.utils import Timer
37 from yardstick.common import constants as consts
39 cgitb.enable(format="text")
41 _LOCAL_DEFAULT = object()
43 LOG = logging.getLogger(__name__)
46 def overwrite_dict_to_cfg(cfg, cfg_dict):
47 for section in cfg_dict:
49 cfg.remove_section(section)
50 cfg.add_section(section)
51 for section, val in cfg_dict.items():
52 if isinstance(val, six.string_types):
54 elif isinstance(val, collections.Mapping):
55 for k, v in val.items():
56 cfg.set(section, k, v)
62 class TempfileContext(object):
64 def _try_get_filename_from_file(param):
66 if isinstance(param.read, Callable):
68 except AttributeError:
70 # return what was given
73 def __init__(self, data, write_func, descriptor, data_types, directory,
74 prefix, suffix, creator):
75 super(TempfileContext, self).__init__()
77 self.write_func = write_func
78 self.descriptor = descriptor
79 self.data_types = data_types
80 self.directory = directory
82 self.creator = creator
87 self.data = self._try_get_filename_from_file(self.data)
88 if isinstance(self.data, six.string_types):
89 # string -> playbook filename directly
90 data_filename = self.data
91 elif isinstance(self.data, self.data_types):
92 # list of playbooks -> put into a temporary playbook file
94 self.prefix = self.prefix.rstrip('_')
95 data_filename = ''.join([self.prefix, self.suffix])
97 data_filename = os.path.join(self.directory, data_filename)
98 if not os.path.exists(data_filename):
99 self.data_file = open(data_filename, 'w+')
101 self.data_file = self.creator()
102 self.write_func(self.data_file)
103 self.data_file.flush()
104 self.data_file.seek(0)
106 # data not passed properly -> error
107 LOG.error("%s type not recognized: %s", self.descriptor, self.data)
108 raise ValueError("{} type not recognized".format(self.descriptor))
110 LOG.debug("%s file : %s", self.descriptor, data_filename)
114 def __exit__(self, exc_type, exc_val, exc_tb):
116 self.data_file.close()
119 class CustomTemporaryFile(object):
120 DEFAULT_SUFFIX = None
121 DEFAULT_DATA_TYPES = None
123 def __init__(self, directory, prefix, suffix=_LOCAL_DEFAULT,
124 data_types=_LOCAL_DEFAULT):
125 super(CustomTemporaryFile, self).__init__()
126 self.directory = directory
128 if suffix is not _LOCAL_DEFAULT:
131 self.suffix = self.DEFAULT_SUFFIX
132 if data_types is not _LOCAL_DEFAULT:
133 self.data_types = data_types
135 self.data_types = self.DEFAULT_DATA_TYPES
136 # must open "w+" so unicode is encoded correctly
137 self.creator = partial(NamedTemporaryFile, mode="w+", delete=False,
142 def make_context(self, data, write_func, descriptor='data'):
143 return TempfileContext(data, write_func, descriptor, self.data_types,
144 self.directory, self.prefix, self.suffix,
148 class ListTemporaryFile(CustomTemporaryFile):
149 DEFAULT_DATA_TYPES = (list, tuple)
152 class MapTemporaryFile(CustomTemporaryFile):
153 DEFAULT_DATA_TYPES = dict
156 class YmlTemporaryFile(ListTemporaryFile):
157 DEFAULT_SUFFIX = '.yml'
160 class IniListTemporaryFile(ListTemporaryFile):
161 DEFAULT_SUFFIX = '.ini'
164 class IniMapTemporaryFile(MapTemporaryFile):
165 DEFAULT_SUFFIX = '.ini'
168 class JsonTemporaryFile(MapTemporaryFile):
169 DEFAULT_SUFFIX = '.json'
172 class FileNameGenerator(object):
174 def get_generator_from_filename(filename, directory, prefix, middle):
175 basename = os.path.splitext(os.path.basename(filename))[0]
176 if not basename.startswith(prefix):
177 part_list = [prefix, middle, basename]
178 elif not middle or middle in basename:
179 part_list = [basename]
181 part_list = [middle, basename]
182 return FileNameGenerator(directory=directory, part_list=part_list)
185 def _handle_existing_file(filename):
186 if not os.path.exists(filename):
189 prefix, suffix = os.path.splitext(os.path.basename(filename))
190 directory = os.path.dirname(filename)
191 if not prefix.endswith('_'):
194 temp_file = NamedTemporaryFile(delete=False, dir=directory,
195 prefix=prefix, suffix=suffix)
196 with cl.closing(temp_file):
197 return temp_file.name
199 def __init__(self, directory, part_list):
200 super(FileNameGenerator, self).__init__()
201 self.directory = directory
202 self.part_list = part_list
204 def make(self, extra):
205 if not isinstance(extra, Iterable) or isinstance(extra,
207 extra = (extra,) # wrap the singleton in an iterable
208 return self._handle_existing_file(
211 '_'.join(chain(self.part_list, extra))
216 class AnsibleNodeDict(Mapping):
217 def __init__(self, node_class, nodes):
218 super(AnsibleNodeDict, self).__init__()
219 # create a dict of name, Node instance
220 self.node_dict = {k: v for k, v in
221 (node_class(node).get_tuple() for node in
223 # collect all the node roles
224 self.node_roles = set(
225 n['role'] for n in six.itervalues(self.node_dict))
228 return repr(self.node_dict)
231 return len(self.node_dict)
233 def __getitem__(self, item):
234 return self.node_dict[item]
237 return iter(self.node_dict)
239 def iter_all_of_type(self, node_type, default=_LOCAL_DEFAULT):
240 return (node for node in six.itervalues(self) if
241 node.is_role(node_type, default))
243 def gen_inventory_lines_for_all_of_type(self, node_type,
244 default=_LOCAL_DEFAULT):
245 return [node.gen_inventory_line() for node in
246 self.iter_all_of_type(node_type, default)]
248 def gen_all_inventory_lines(self):
249 return [node.gen_inventory_line() for node in
250 six.itervalues(self.node_dict)]
252 def gen_inventory_groups(self):
253 # lowercase group names
254 return {role.lower(): [node['name'] for
255 node in self.iter_all_of_type(role)]
256 for role in self.node_roles}
259 class AnsibleNode(MutableMapping):
260 ANSIBLE_NODE_KEY_MAP = {
261 u'ansible_host': 'ip',
262 u'ansible_user': 'user',
263 u'ansible_port': 'ssh_port',
264 u'ansible_ssh_pass': 'password',
265 u'ansible_ssh_private_key_file': 'key_filename',
268 def __init__(self, data=None, **kwargs):
269 super(AnsibleNode, self).__init__()
270 if isinstance(data, MutableMapping):
276 return 'AnsibleNode<{}>'.format(self.data)
279 return len(self.data)
282 return iter(self.data)
285 def node_key_map(self):
286 return self.ANSIBLE_NODE_KEY_MAP
288 def get_inventory_params(self):
289 node_key_map = self.node_key_map
290 # password or key_filename may not be present
291 return {inventory_key: self[self_key] for inventory_key, self_key in
292 node_key_map.items() if self_key in self}
294 def is_role(self, node_type, default=_LOCAL_DEFAULT):
295 if default is not _LOCAL_DEFAULT:
296 return self.setdefault('role', default) in node_type
297 return node_type in self.get('role', set())
299 def gen_inventory_line(self):
300 inventory_params = self.get_inventory_params()
301 # use format to convert ints
302 # sort to ensure consistent key value ordering
303 formatted_args = (u"{}={}".format(*entry) for entry in
304 sorted(inventory_params.items()))
305 line = u" ".join(chain([self['name']], formatted_args))
309 return self['name'], self
311 def __contains__(self, key):
312 return self.data.__contains__(key)
314 def __getitem__(self, item):
315 return self.data[item]
317 def __setitem__(self, key, value):
318 self.data[key] = value
320 def __delitem__(self, key):
323 def __getattr__(self, item):
324 return getattr(self.data, item)
327 class AnsibleCommon(object):
328 NODE_CLASS = AnsibleNode
329 OUTFILE_PREFIX_TEMPLATE = 'ansible_{:02}'
331 __DEFAULT_VALUES_MAP = {
332 'default_timeout': 1200,
335 # default 10 min ansible timeout for non-main calls
336 'ansible_timeout': 600,
337 'scripts_dest': None,
338 '_deploy_dir': _LOCAL_DEFAULT,
341 __DEFAULT_CALLABLES_MAP = {
343 'inventory_dict': dict,
345 '_node_info_dict': dict,
349 def _get_defaults(cls):
350 # subclasses will override to change defaults using the ChainMap
352 values_map_deque, defaults_map_deque = cls._get_defaults_map_deques()
353 return ChainMap(*values_map_deque), ChainMap(*defaults_map_deque)
356 def _get_defaults_map_deques(cls):
357 # deque so we can insert or append easily
358 return (deque([cls.__DEFAULT_VALUES_MAP]),
359 deque([cls.__DEFAULT_CALLABLES_MAP]))
361 def __init__(self, nodes, **kwargs):
362 # TODO: add default Heat vars
363 super(AnsibleCommon, self).__init__()
367 # default 10 min ansible timeout for non-main calls
368 self.ansible_timeout = 600
369 self.inventory_dict = None
370 self.scripts_dest = None
371 self._deploy_dir = _LOCAL_DEFAULT
372 self._node_dict = None
373 self._node_info_dict = None
374 self.callable_task = None
375 self.test_vars = None
376 self.default_timeout = None
379 def reset(self, **kwargs):
381 reset all attributes based on various layers of default dicts
382 including new default added in subclasses
385 default_values_map, default_callables_map = self._get_defaults()
386 for name, default_value in list(default_values_map.items()):
387 setattr(self, name, kwargs.pop(name, default_value))
389 for name, func in list(default_callables_map.items()):
391 value = kwargs.pop(name)
395 setattr(self, name, value)
397 def do_install(self, playbook, directory):
398 # TODO: how to get openstack nodes from Heat
399 self.gen_inventory_ini_dict()
400 self.execute_ansible(playbook, directory)
403 def deploy_dir(self):
404 if self._deploy_dir is _LOCAL_DEFAULT:
405 raise ValueError('Deploy dir must be set before using it')
406 return self._deploy_dir
409 def deploy_dir(self, value):
410 self._deploy_dir = value
414 if not self._node_dict:
415 self._node_dict = AnsibleNodeDict(self.NODE_CLASS, self.nodes)
416 LOG.debug("node_dict = \n%s", self._node_dict)
417 return self._node_dict
419 def gen_inventory_ini_dict(self):
420 if self.inventory_dict and isinstance(self.inventory_dict,
424 node_dict = self.node_dict
425 # add all nodes to 'node' group and specify full parameter there
426 self.inventory_dict = {
427 "nodes": node_dict.gen_all_inventory_lines()
429 # place nodes into ansible groups according to their role
430 # using just node name
431 self.inventory_dict.update(node_dict.gen_inventory_groups())
434 def ansible_env(directory, log_file):
435 # have to overload here in the env because we can't modify local.conf
436 ansible_dict = dict(os.environ, **{
437 "ANSIBLE_LOG_PATH": os.path.join(directory, log_file),
438 "ANSIBLE_LOG_BASE": directory,
439 "ANSIBLE_ROLES_PATH": consts.ANSIBLE_ROLES_PATH,
440 # # required for SSH to work
441 # "ANSIBLE_SSH_ARGS": "-o UserKnownHostsFile=/dev/null "
442 # "-o GSSAPIAuthentication=no "
443 # "-o PreferredAuthentications=password "
444 # "-o ControlMaster=auto "
445 # "-o ControlPersist=60s",
446 # "ANSIBLE_HOST_KEY_CHECKING": "False",
447 # "ANSIBLE_SSH_PIPELINING": "True",
451 def _gen_ansible_playbook_file(self, playbooks, directory, prefix='tmp'):
452 # check what is passed in playbooks
453 if isinstance(playbooks, (list, tuple)):
454 if len(playbooks) == 1:
455 # list or tuple with one member -> take it
456 playbooks = playbooks[0]
458 playbooks = [{'include': playbook} for playbook in playbooks]
459 prefix = '_'.join([self.prefix, prefix, 'playbook'])
460 yml_temp_file = YmlTemporaryFile(directory=directory, prefix=prefix)
461 write_func = partial(yaml.safe_dump, playbooks,
462 default_flow_style=False,
464 return yml_temp_file.make_context(playbooks, write_func,
465 descriptor='playbooks')
467 def _gen_ansible_inventory_file(self, directory, prefix='tmp'):
468 def write_func(data_file):
469 overwrite_dict_to_cfg(inventory_config, self.inventory_dict)
470 debug_inventory = StringIO()
471 inventory_config.write(debug_inventory)
472 LOG.debug("inventory = \n%s", debug_inventory.getvalue())
473 inventory_config.write(data_file)
475 prefix = '_'.join([self.prefix, prefix, 'inventory'])
476 ini_temp_file = IniMapTemporaryFile(directory=directory, prefix=prefix)
477 inventory_config = ConfigParser.ConfigParser(allow_no_value=True)
478 # disable default lowercasing
479 inventory_config.optionxform = str
480 return ini_temp_file.make_context(self.inventory_dict, write_func,
481 descriptor='inventory')
483 def _gen_ansible_extra_vars(self, extra_vars, directory, prefix='tmp'):
484 if not isinstance(extra_vars, MutableMapping):
485 extra_vars = self.test_vars
486 prefix = '_'.join([self.prefix, prefix, 'extra_vars'])
487 # use JSON because Python YAML serializes unicode wierdly
488 json_temp_file = JsonTemporaryFile(directory=directory, prefix=prefix)
489 write_func = partial(json.dump, extra_vars, indent=4)
490 return json_temp_file.make_context(extra_vars, write_func,
491 descriptor='extra_vars')
493 def _gen_log_names(self, directory, prefix, playbook_filename):
494 generator = FileNameGenerator.get_generator_from_filename(
495 playbook_filename, directory, self.prefix, prefix)
496 return generator.make('execute.log'), generator.make(
500 def get_timeout(*timeouts):
501 for timeout in timeouts:
503 timeout = float(timeout)
506 except (TypeError, ValueError):
512 def _generate_ansible_cfg(self, directory):
513 parser = ConfigParser.ConfigParser()
514 parser.add_section('defaults')
515 parser.set('defaults', 'host_key_checking', 'False')
517 cfg_path = os.path.join(directory, 'ansible.cfg')
518 with open(cfg_path, 'w') as f:
521 def get_sut_info(self, directory, sut_dir='sut'):
522 if not os.path.isdir(directory):
523 raise OSError('No such directory: %s' % directory)
525 self._generate_ansible_cfg(directory)
528 self.gen_inventory_ini_dict()
529 ini_file = self._gen_ansible_inventory_file(directory, prefix=prefix)
531 inventory_path = str(f)
533 self._exec_get_sut_info_cmd(directory, inventory_path, sut_dir)
535 sut_dir = os.path.join(directory, sut_dir)
536 sut_info = self._gen_sut_info_dict(sut_dir)
540 def _exec_get_sut_info_cmd(self, directory, inventory_path, sut_dir):
541 cmd = ['ansible', 'all', '-m', 'setup', '-i',
542 inventory_path, '--tree', sut_dir]
544 proc = Popen(cmd, stdout=PIPE, cwd=directory)
545 output, _ = proc.communicate()
546 retcode = proc.wait()
547 LOG.debug("exit status = %s", retcode)
549 raise CalledProcessError(retcode, cmd, output)
551 def _gen_sut_info_dict(self, sut_dir):
554 if os.path.isdir(sut_dir):
555 root, _, files = next(os.walk(sut_dir))
556 for filename in files:
557 abs_path = os.path.join(root, filename)
558 with open(abs_path) as f:
559 data = jsonutils.load(f)
560 sut_info[filename] = data
564 def execute_ansible(self, playbooks, directory, timeout=None,
565 extra_vars=None, ansible_check=False, prefix='tmp',
567 # there can be three types of dirs:
568 # log dir: can be anywhere
569 # inventory dir: can be anywhere
570 # playbook dir: use include to point to files in consts.ANSIBLE_DIR
572 if not os.path.isdir(directory):
573 raise OSError("Not a directory, %s" % directory)
574 timeout = self.get_timeout(timeout, self.default_timeout)
577 self.prefix = self.OUTFILE_PREFIX_TEMPLATE.format(self.counter)
579 playbook_ctx = self._gen_ansible_playbook_file(playbooks, directory,
581 inventory_ctx = self._gen_ansible_inventory_file(directory,
583 extra_vars_ctx = self._gen_ansible_extra_vars(extra_vars, directory,
586 with playbook_ctx as playbook_filename, \
587 inventory_ctx as inventory_filename, \
588 extra_vars_ctx as extra_vars_filename:
597 if extra_vars_filename is not None:
600 "@{}".format(extra_vars_filename),
602 cmd.append(playbook_filename)
604 log_file_main, log_file_checks = self._gen_log_names(
605 directory, prefix, playbook_filename)
613 LOG.debug('log file checks: %s', log_file_checks)
615 'env': self.ansible_env(directory, log_file_checks),
616 # TODO: add timeout support of use subprocess32 backport
617 # 'timeout': timeout / 2,
619 with Timer() as timer:
620 proc = Popen(cmd, stdout=PIPE, **exec_args)
621 output, _ = proc.communicate()
622 retcode = proc.wait()
623 LOG.debug("exit status = %s", retcode)
625 raise CalledProcessError(retcode, cmd, output)
626 timeout -= timer.total_seconds()
628 cmd.remove("--syntax-check")
629 LOG.debug('log file main: %s', log_file_main)
631 'env': self.ansible_env(directory, log_file_main),
632 # TODO: add timeout support of use subprocess32 backport
633 # 'timeout': timeout,
635 proc = Popen(cmd, stdout=PIPE, **exec_args)
636 output, _ = proc.communicate()
637 retcode = proc.wait()
638 LOG.debug("exit status = %s", retcode)
640 raise CalledProcessError(retcode, cmd, output)