1 # Copyright (c) 2016-2017 Intel Corporation
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
17 import contextlib as cl
21 from collections import Mapping, MutableMapping, Iterable, Callable, deque
22 from functools import partial
23 from itertools import chain
28 from six.moves import configparser
30 from six import StringIO
31 from chainmap import ChainMap
32 from oslo_serialization import jsonutils
34 from yardstick.common.utils import Timer
35 from yardstick.common import constants as consts
37 cgitb.enable(format="text")
39 _LOCAL_DEFAULT = object()
41 LOG = logging.getLogger(__name__)
44 def overwrite_dict_to_cfg(cfg, cfg_dict):
45 for section in cfg_dict:
47 cfg.remove_section(section)
48 cfg.add_section(section)
49 for section, val in cfg_dict.items():
50 if isinstance(val, six.string_types):
52 elif isinstance(val, collections.Mapping):
53 for k, v in val.items():
54 cfg.set(section, k, v)
60 class TempfileContext(object):
62 def _try_get_filename_from_file(param):
64 if isinstance(param.read, Callable):
66 except AttributeError:
68 # return what was given
71 def __init__(self, data, write_func, descriptor, data_types, directory,
72 prefix, suffix, creator):
73 super(TempfileContext, self).__init__()
75 self.write_func = write_func
76 self.descriptor = descriptor
77 self.data_types = data_types
78 self.directory = directory
80 self.creator = creator
85 self.data = self._try_get_filename_from_file(self.data)
86 if isinstance(self.data, six.string_types):
87 # string -> playbook filename directly
88 data_filename = self.data
89 elif isinstance(self.data, self.data_types):
90 # list of playbooks -> put into a temporary playbook file
92 self.prefix = self.prefix.rstrip('_')
93 data_filename = ''.join([self.prefix, self.suffix])
95 data_filename = os.path.join(self.directory, data_filename)
96 if not os.path.exists(data_filename):
97 self.data_file = open(data_filename, 'w+')
99 self.data_file = self.creator()
100 self.write_func(self.data_file)
101 self.data_file.flush()
102 self.data_file.seek(0)
104 # data not passed properly -> error
105 LOG.error("%s type not recognized: %s", self.descriptor, self.data)
106 raise ValueError("{} type not recognized".format(self.descriptor))
108 LOG.debug("%s file : %s", self.descriptor, data_filename)
112 def __exit__(self, exc_type, exc_val, exc_tb):
114 self.data_file.close()
117 class CustomTemporaryFile(object):
118 DEFAULT_SUFFIX = None
119 DEFAULT_DATA_TYPES = None
121 def __init__(self, directory, prefix, suffix=_LOCAL_DEFAULT,
122 data_types=_LOCAL_DEFAULT):
123 super(CustomTemporaryFile, self).__init__()
124 self.directory = directory
126 if suffix is not _LOCAL_DEFAULT:
129 self.suffix = self.DEFAULT_SUFFIX
130 if data_types is not _LOCAL_DEFAULT:
131 self.data_types = data_types
133 self.data_types = self.DEFAULT_DATA_TYPES
134 # must open "w+" so unicode is encoded correctly
135 self.creator = partial(
136 tempfile.NamedTemporaryFile, mode="w+", delete=False,
137 dir=directory, prefix=prefix, suffix=self.suffix)
139 def make_context(self, data, write_func, descriptor='data'):
140 return TempfileContext(data, write_func, descriptor, self.data_types,
141 self.directory, self.prefix, self.suffix,
145 class ListTemporaryFile(CustomTemporaryFile):
146 DEFAULT_DATA_TYPES = (list, tuple)
149 class MapTemporaryFile(CustomTemporaryFile):
150 DEFAULT_DATA_TYPES = dict
153 class YmlTemporaryFile(ListTemporaryFile):
154 DEFAULT_SUFFIX = '.yml'
157 class IniListTemporaryFile(ListTemporaryFile):
158 DEFAULT_SUFFIX = '.ini'
161 class IniMapTemporaryFile(MapTemporaryFile):
162 DEFAULT_SUFFIX = '.ini'
165 class JsonTemporaryFile(MapTemporaryFile):
166 DEFAULT_SUFFIX = '.json'
169 class FileNameGenerator(object):
171 def get_generator_from_filename(filename, directory, prefix, middle):
172 basename = os.path.splitext(os.path.basename(filename))[0]
173 if not basename.startswith(prefix):
174 part_list = [prefix, middle, basename]
175 elif not middle or middle in basename:
176 part_list = [basename]
178 part_list = [middle, basename]
179 return FileNameGenerator(directory=directory, part_list=part_list)
182 def _handle_existing_file(filename):
183 if not os.path.exists(filename):
186 prefix, suffix = os.path.splitext(os.path.basename(filename))
187 directory = os.path.dirname(filename)
188 if not prefix.endswith('_'):
191 temp_file = tempfile.NamedTemporaryFile(delete=False, dir=directory,
192 prefix=prefix, suffix=suffix)
193 with cl.closing(temp_file):
194 return temp_file.name
196 def __init__(self, directory, part_list):
197 super(FileNameGenerator, self).__init__()
198 self.directory = directory
199 self.part_list = part_list
201 def make(self, extra):
202 if not isinstance(extra, Iterable) or isinstance(extra,
204 extra = (extra,) # wrap the singleton in an iterable
205 return self._handle_existing_file(
208 '_'.join(chain(self.part_list, extra))
213 class AnsibleNodeDict(Mapping):
214 def __init__(self, node_class, nodes):
215 super(AnsibleNodeDict, self).__init__()
216 # create a dict of name, Node instance
217 self.node_dict = {k: v for k, v in
218 (node_class(node).get_tuple() for node in
220 # collect all the node roles
221 self.node_roles = set(
222 n['role'] for n in six.itervalues(self.node_dict))
225 return repr(self.node_dict)
228 return len(self.node_dict)
230 def __getitem__(self, item):
231 return self.node_dict[item]
234 return iter(self.node_dict)
236 def iter_all_of_type(self, node_type, default=_LOCAL_DEFAULT):
237 return (node for node in six.itervalues(self) if
238 node.is_role(node_type, default))
240 def gen_inventory_lines_for_all_of_type(self, node_type,
241 default=_LOCAL_DEFAULT):
242 return [node.gen_inventory_line() for node in
243 self.iter_all_of_type(node_type, default)]
245 def gen_all_inventory_lines(self):
246 return [node.gen_inventory_line() for node in
247 six.itervalues(self.node_dict)]
249 def gen_inventory_groups(self):
250 # lowercase group names
251 return {role.lower(): [node['name'] for
252 node in self.iter_all_of_type(role)]
253 for role in self.node_roles}
256 class AnsibleNode(MutableMapping):
257 ANSIBLE_NODE_KEY_MAP = {
258 u'ansible_host': 'ip',
259 u'ansible_user': 'user',
260 u'ansible_port': 'ssh_port',
261 u'ansible_ssh_pass': 'password',
262 u'ansible_ssh_private_key_file': 'key_filename',
265 def __init__(self, data=None, **kwargs):
266 super(AnsibleNode, self).__init__()
267 if isinstance(data, MutableMapping):
273 return 'AnsibleNode<{}>'.format(self.data)
276 return len(self.data)
279 return iter(self.data)
282 def node_key_map(self):
283 return self.ANSIBLE_NODE_KEY_MAP
285 def get_inventory_params(self):
286 node_key_map = self.node_key_map
287 # password or key_filename may not be present
288 return {inventory_key: self[self_key] for inventory_key, self_key in
289 node_key_map.items() if self_key in self}
291 def is_role(self, node_type, default=_LOCAL_DEFAULT):
292 if default is not _LOCAL_DEFAULT:
293 return self.setdefault('role', default) in node_type
294 return node_type in self.get('role', set())
296 def gen_inventory_line(self):
297 inventory_params = self.get_inventory_params()
298 # use format to convert ints
299 # sort to ensure consistent key value ordering
300 formatted_args = (u"{}={}".format(*entry) for entry in
301 sorted(inventory_params.items()))
302 line = u" ".join(chain([self['name']], formatted_args))
306 return self['name'], self
308 def __contains__(self, key):
309 return self.data.__contains__(key)
311 def __getitem__(self, item):
312 return self.data[item]
314 def __setitem__(self, key, value):
315 self.data[key] = value
317 def __delitem__(self, key):
320 def __getattr__(self, item):
321 return getattr(self.data, item)
324 class AnsibleCommon(object):
325 NODE_CLASS = AnsibleNode
326 OUTFILE_PREFIX_TEMPLATE = 'ansible_{:02}'
328 __DEFAULT_VALUES_MAP = {
329 'default_timeout': 1200,
332 # default 10 min ansible timeout for non-main calls
333 'ansible_timeout': 600,
334 'scripts_dest': None,
335 '_deploy_dir': _LOCAL_DEFAULT,
338 __DEFAULT_CALLABLES_MAP = {
340 'inventory_dict': dict,
342 '_node_info_dict': dict,
346 def _get_defaults(cls):
347 # subclasses will override to change defaults using the ChainMap
349 values_map_deque, defaults_map_deque = cls._get_defaults_map_deques()
350 return ChainMap(*values_map_deque), ChainMap(*defaults_map_deque)
353 def _get_defaults_map_deques(cls):
354 # deque so we can insert or append easily
355 return (deque([cls.__DEFAULT_VALUES_MAP]),
356 deque([cls.__DEFAULT_CALLABLES_MAP]))
358 def __init__(self, nodes, **kwargs):
359 # TODO: add default Heat vars
360 super(AnsibleCommon, self).__init__()
364 # default 10 min ansible timeout for non-main calls
365 self.ansible_timeout = 600
366 self.inventory_dict = None
367 self.scripts_dest = None
368 self._deploy_dir = _LOCAL_DEFAULT
369 self._node_dict = None
370 self._node_info_dict = None
371 self.callable_task = None
372 self.test_vars = None
373 self.default_timeout = None
376 def reset(self, **kwargs):
378 reset all attributes based on various layers of default dicts
379 including new default added in subclasses
382 default_values_map, default_callables_map = self._get_defaults()
383 for name, default_value in list(default_values_map.items()):
384 setattr(self, name, kwargs.pop(name, default_value))
386 for name, func in list(default_callables_map.items()):
388 value = kwargs.pop(name)
392 setattr(self, name, value)
394 def do_install(self, playbook, directory):
395 # TODO: how to get openstack nodes from Heat
396 self.gen_inventory_ini_dict()
397 self.execute_ansible(playbook, directory)
400 def deploy_dir(self):
401 if self._deploy_dir is _LOCAL_DEFAULT:
402 raise ValueError('Deploy dir must be set before using it')
403 return self._deploy_dir
406 def deploy_dir(self, value):
407 self._deploy_dir = value
411 if not self._node_dict:
412 self._node_dict = AnsibleNodeDict(self.NODE_CLASS, self.nodes)
413 LOG.debug("node_dict = \n%s", self._node_dict)
414 return self._node_dict
416 def gen_inventory_ini_dict(self):
417 if self.inventory_dict and isinstance(self.inventory_dict,
421 node_dict = self.node_dict
422 # add all nodes to 'node' group and specify full parameter there
423 self.inventory_dict = {
424 "nodes": node_dict.gen_all_inventory_lines()
426 # place nodes into ansible groups according to their role
427 # using just node name
428 self.inventory_dict.update(node_dict.gen_inventory_groups())
431 def ansible_env(directory, log_file):
432 # have to overload here in the env because we can't modify local.conf
433 ansible_dict = dict(os.environ, **{
434 "ANSIBLE_LOG_PATH": os.path.join(directory, log_file),
435 "ANSIBLE_LOG_BASE": directory,
436 "ANSIBLE_ROLES_PATH": consts.ANSIBLE_ROLES_PATH,
437 # # required for SSH to work
438 # "ANSIBLE_SSH_ARGS": "-o UserKnownHostsFile=/dev/null "
439 # "-o GSSAPIAuthentication=no "
440 # "-o PreferredAuthentications=password "
441 # "-o ControlMaster=auto "
442 # "-o ControlPersist=60s",
443 # "ANSIBLE_HOST_KEY_CHECKING": "False",
444 # "ANSIBLE_SSH_PIPELINING": "True",
448 def _gen_ansible_playbook_file(self, playbooks, directory, prefix='tmp'):
449 # check what is passed in playbooks
450 if isinstance(playbooks, (list, tuple)):
451 if len(playbooks) == 1:
452 # list or tuple with one member -> take it
453 playbooks = playbooks[0]
455 playbooks = [{'include': playbook} for playbook in playbooks]
456 prefix = '_'.join([self.prefix, prefix, 'playbook'])
457 yml_temp_file = YmlTemporaryFile(directory=directory, prefix=prefix)
458 write_func = partial(yaml.safe_dump, playbooks,
459 default_flow_style=False,
461 return yml_temp_file.make_context(playbooks, write_func,
462 descriptor='playbooks')
464 def _gen_ansible_inventory_file(self, directory, prefix='tmp'):
465 def write_func(data_file):
466 overwrite_dict_to_cfg(inventory_config, self.inventory_dict)
467 debug_inventory = StringIO()
468 inventory_config.write(debug_inventory)
469 LOG.debug("inventory = \n%s", debug_inventory.getvalue())
470 inventory_config.write(data_file)
472 prefix = '_'.join([self.prefix, prefix, 'inventory'])
473 ini_temp_file = IniMapTemporaryFile(directory=directory, prefix=prefix)
474 inventory_config = configparser.ConfigParser(allow_no_value=True)
475 # disable default lowercasing
476 inventory_config.optionxform = str
477 return ini_temp_file.make_context(self.inventory_dict, write_func,
478 descriptor='inventory')
480 def _gen_ansible_extra_vars(self, extra_vars, directory, prefix='tmp'):
481 if not isinstance(extra_vars, MutableMapping):
482 extra_vars = self.test_vars
483 prefix = '_'.join([self.prefix, prefix, 'extra_vars'])
484 # use JSON because Python YAML serializes unicode wierdly
485 json_temp_file = JsonTemporaryFile(directory=directory, prefix=prefix)
486 write_func = partial(json.dump, extra_vars, indent=4)
487 return json_temp_file.make_context(extra_vars, write_func,
488 descriptor='extra_vars')
490 def _gen_log_names(self, directory, prefix, playbook_filename):
491 generator = FileNameGenerator.get_generator_from_filename(
492 playbook_filename, directory, self.prefix, prefix)
493 return generator.make('execute.log'), generator.make(
497 def get_timeout(*timeouts):
498 for timeout in timeouts:
500 timeout = float(timeout)
503 except (TypeError, ValueError):
509 def _generate_ansible_cfg(self, directory):
510 parser = configparser.ConfigParser()
511 parser.add_section('defaults')
512 parser.set('defaults', 'host_key_checking', 'False')
514 cfg_path = os.path.join(directory, 'ansible.cfg')
515 with open(cfg_path, 'w') as f:
518 def get_sut_info(self, directory, sut_dir='sut'):
519 if not os.path.isdir(directory):
520 raise OSError('No such directory: %s' % directory)
522 self._generate_ansible_cfg(directory)
525 self.gen_inventory_ini_dict()
526 ini_file = self._gen_ansible_inventory_file(directory, prefix=prefix)
528 inventory_path = str(f)
530 self._exec_get_sut_info_cmd(directory, inventory_path, sut_dir)
532 sut_dir = os.path.join(directory, sut_dir)
533 sut_info = self._gen_sut_info_dict(sut_dir)
537 def _exec_get_sut_info_cmd(self, directory, inventory_path, sut_dir):
538 cmd = ['ansible', 'all', '-m', 'setup', '-i',
539 inventory_path, '--tree', sut_dir]
541 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=directory)
542 output, _ = proc.communicate()
543 retcode = proc.wait()
544 LOG.debug("exit status = %s", retcode)
546 raise subprocess.CalledProcessError(retcode, cmd, output)
548 def _gen_sut_info_dict(self, sut_dir):
551 if os.path.isdir(sut_dir):
552 root, _, files = next(os.walk(sut_dir))
553 for filename in files:
554 abs_path = os.path.join(root, filename)
555 with open(abs_path) as f:
556 data = jsonutils.load(f)
557 sut_info[filename] = data
561 def execute_ansible(self, playbooks, directory, timeout=None,
562 extra_vars=None, ansible_check=False, prefix='tmp',
564 # there can be three types of dirs:
565 # log dir: can be anywhere
566 # inventory dir: can be anywhere
567 # playbook dir: use include to point to files in consts.ANSIBLE_DIR
569 if not os.path.isdir(directory):
570 raise OSError("Not a directory, %s" % directory)
571 timeout = self.get_timeout(timeout, self.default_timeout)
574 self.prefix = self.OUTFILE_PREFIX_TEMPLATE.format(self.counter)
576 playbook_ctx = self._gen_ansible_playbook_file(playbooks, directory,
578 inventory_ctx = self._gen_ansible_inventory_file(directory,
580 extra_vars_ctx = self._gen_ansible_extra_vars(extra_vars, directory,
583 with playbook_ctx as playbook_filename, \
584 inventory_ctx as inventory_filename, \
585 extra_vars_ctx as extra_vars_filename:
594 if extra_vars_filename is not None:
597 "@{}".format(extra_vars_filename),
599 cmd.append(playbook_filename)
601 log_file_main, log_file_checks = self._gen_log_names(
602 directory, prefix, playbook_filename)
610 LOG.debug('log file checks: %s', log_file_checks)
612 'env': self.ansible_env(directory, log_file_checks),
613 # TODO: add timeout support of use subprocess32 backport
614 # 'timeout': timeout / 2,
616 with Timer() as timer:
617 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
619 output, _ = proc.communicate()
620 retcode = proc.wait()
621 LOG.debug("exit status = %s", retcode)
623 raise subprocess.CalledProcessError(retcode, cmd, output)
624 timeout -= timer.total_seconds()
626 cmd.remove("--syntax-check")
627 LOG.debug('log file main: %s', log_file_main)
629 'env': self.ansible_env(directory, log_file_main),
630 # TODO: add timeout support of use subprocess32 backport
631 # 'timeout': timeout,
633 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, **exec_args)
634 output, _ = proc.communicate()
635 retcode = proc.wait()
636 LOG.debug("exit status = %s", retcode)
638 raise subprocess.CalledProcessError(retcode, cmd, output)