1 # Copyright 2013: Mirantis Inc.
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
17 from contextlib import closing
36 from flask import jsonify
37 from six.moves import configparser
38 from oslo_serialization import jsonutils
39 from oslo_utils import encodeutils
42 from yardstick.common import exceptions
45 logger = logging.getLogger(__name__)
46 logger.setLevel(logging.DEBUG)
49 # Decorator for cli-args
50 def cliargs(*args, **kwargs):
52 func.__dict__.setdefault('arguments', []).insert(0, (args, kwargs))
57 def itersubclasses(cls, _seen=None):
58 """Generator over all subclasses of a given class in depth first order."""
60 if not isinstance(cls, type):
61 raise TypeError("itersubclasses must be called with "
62 "new-style classes, not %.100r" % cls)
63 _seen = _seen or set()
65 subs = cls.__subclasses__()
66 except TypeError: # fails only when cls is type
67 subs = cls.__subclasses__(cls)
72 for sub in itersubclasses(sub, _seen):
76 def import_modules_from_package(package, raise_exception=False):
77 """Import modules given a package name
79 :param: package - Full package name. For example: rally.deploy.engines
81 yardstick_root = os.path.dirname(os.path.dirname(yardstick.__file__))
82 path = os.path.join(yardstick_root, *package.split('.'))
83 for root, _, files in os.walk(path):
84 matches = (filename for filename in files if filename.endswith('.py')
85 and not filename.startswith('__'))
86 new_package = os.path.relpath(root, yardstick_root).replace(os.sep,
89 '{}.{}'.format(new_package, filename.rsplit('.py', 1)[0])
90 for filename in matches)
91 # Find modules which haven't already been imported
92 missing_modules = module_names.difference(sys.modules)
93 logger.debug('Importing modules: %s', missing_modules)
94 for module_name in missing_modules:
96 importlib.import_module(module_name)
97 except (ImportError, SyntaxError) as exc:
100 logger.exception('Unable to import module %s', module_name)
103 NON_NONE_DEFAULT = object()
106 def get_key_with_default(data, key, default=NON_NONE_DEFAULT):
107 value = data.get(key, default)
108 if value is NON_NONE_DEFAULT:
113 def make_dict_from_map(data, key_map):
114 return {dest_key: get_key_with_default(data, src_key, default)
115 for dest_key, (src_key, default) in key_map.items()}
122 if e.errno != errno.EEXIST:
126 def remove_file(path):
130 if e.errno != errno.ENOENT:
134 def execute_command(cmd, **kwargs):
135 exec_msg = "Executing command: '%s'" % cmd
136 logger.debug(exec_msg)
138 output = subprocess.check_output(cmd.split(), **kwargs)
139 return encodeutils.safe_decode(output, incoming='utf-8').split(os.linesep)
142 def source_env(env_file):
143 p = subprocess.Popen(". %s; env" % env_file, stdout=subprocess.PIPE,
145 output = p.communicate()[0]
147 # sometimes output type would be binary_type, and it don't have splitlines
148 # method, so we need to decode
149 if isinstance(output, six.binary_type):
150 output = encodeutils.safe_decode(output)
151 env = dict(line.split('=', 1) for line in output.splitlines() if '=' in line)
152 os.environ.update(env)
156 def read_json_from_file(path):
157 with open(path, 'r') as f:
159 # don't use jsonutils.load() it conflicts with already decoded input
160 return jsonutils.loads(j)
163 def write_json_to_file(path, data, mode='w'):
164 with open(path, mode) as f:
165 jsonutils.dump(data, f)
168 def write_file(path, data, mode='w'):
169 with open(path, mode) as f:
173 def parse_ini_file(path):
174 parser = configparser.ConfigParser()
177 files = parser.read(path)
178 except configparser.MissingSectionHeaderError:
179 logger.exception('invalid file type')
183 raise RuntimeError('file not exist')
186 default = {k: v for k, v in parser.items('DEFAULT')}
187 except configparser.NoSectionError:
190 config = dict(DEFAULT=default,
191 **{s: {k: v for k, v in parser.items(
192 s)} for s in parser.sections()})
197 def get_port_mac(sshclient, port):
198 cmd = "ifconfig |grep HWaddr |grep %s |awk '{print $5}' " % port
199 _, stdout, _ = sshclient.execute(cmd, raise_on_error=True)
201 return stdout.rstrip()
204 def get_port_ip(sshclient, port):
205 cmd = "ifconfig %s |grep 'inet addr' |awk '{print $2}' " \
206 "|cut -d ':' -f2 " % port
207 _, stdout, _ = sshclient.execute(cmd, raise_on_error=True)
209 return stdout.rstrip()
212 def flatten_dict_key(data):
215 # use list, because iterable is too generic
216 if not any(isinstance(v, (collections.Mapping, list))
217 for v in data.values()):
220 for k, v in data.items():
221 if isinstance(v, collections.Mapping):
222 for n_k, n_v in v.items():
223 next_data["%s.%s" % (k, n_k)] = n_v
224 # use list because iterable is too generic
225 elif isinstance(v, collections.Iterable) and not isinstance(v, six.string_types):
226 for index, item in enumerate(v):
227 next_data["%s%d" % (k, index)] = item
231 return flatten_dict_key(next_data)
234 def translate_to_str(obj):
235 if isinstance(obj, collections.Mapping):
236 return {str(k): translate_to_str(v) for k, v in obj.items()}
237 elif isinstance(obj, list):
238 return [translate_to_str(ele) for ele in obj]
239 elif isinstance(obj, six.text_type):
244 def result_handler(status, data):
249 return jsonify(result)
252 def change_obj_to_dict(obj):
254 for k, v in vars(obj).items():
262 def set_dict_value(dic, keys, value):
265 for key in keys.split('.'):
266 return_dic.setdefault(key, {})
267 if key == keys.split('.')[-1]:
268 return_dic[key] = value
270 return_dic = return_dic[key]
274 def get_free_port(ip):
275 with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
276 port = random.randint(5000, 10000)
277 while s.connect_ex((ip, port)) == 0:
278 port = random.randint(5000, 10000)
282 def mac_address_to_hex_list(mac):
284 octets = ["0x{:02x}".format(int(elem, 16)) for elem in mac.split(':')]
286 raise exceptions.InvalidMacAddress(mac_address=mac)
287 if len(octets) != 6 or all(len(octet) != 4 for octet in octets):
288 raise exceptions.InvalidMacAddress(mac_address=mac)
292 def make_ipv4_address(ip_addr):
293 return ipaddress.IPv4Address(six.text_type(ip_addr))
296 def safe_ip_address(ip_addr):
297 """ get ip address version v6 or v4 """
299 return ipaddress.ip_address(six.text_type(ip_addr))
301 logging.error("%s is not valid", ip_addr)
305 def get_ip_version(ip_addr):
306 """ get ip address version v6 or v4 """
308 address = ipaddress.ip_address(six.text_type(ip_addr))
310 logging.error("%s is not valid", ip_addr)
313 return address.version
316 def make_ip_addr(ip, mask):
318 :param ip[str]: ip adddress
319 :param mask[str]: /24 prefix of 255.255.255.0 netmask
320 :return: IPv4Interface object
323 return ipaddress.ip_interface(six.text_type('/'.join([ip, mask])))
324 except (TypeError, ValueError):
325 # None so we can skip later
329 def ip_to_hex(ip_addr, separator=''):
331 address = ipaddress.ip_address(six.text_type(ip_addr))
333 logging.error("%s is not valid", ip_addr)
336 if address.version != 4:
340 return '{:08x}'.format(int(address))
342 return separator.join('{:02x}'.format(octet) for octet in address.packed)
345 def get_mask_from_ip_range(ip_low, ip_high):
346 _ip_low = ipaddress.ip_address(ip_low)
347 _ip_high = ipaddress.ip_address(ip_high)
348 _ip_low_int = int(_ip_low)
349 _ip_high_int = int(_ip_high)
350 return _ip_high.max_prefixlen - (_ip_high_int ^ _ip_low_int).bit_length()
353 def try_int(s, *args):
354 """Convert to integer if possible."""
357 except (TypeError, ValueError):
358 return args[0] if args else s
361 class SocketTopology(dict):
364 def parse_cpuinfo(cls, cpuinfo):
367 lines = cpuinfo.splitlines()
373 name, value = line.split(":", 1)
374 core_lines[name.strip()] = try_int(value.strip())
376 core_details.append(core_lines)
379 for core in core_details:
380 socket_map.setdefault(core["physical id"], {}).setdefault(
381 core["core id"], {})[core["processor"]] = (
382 core["processor"], core["core id"], core["physical id"])
384 return cls(socket_map)
387 return sorted(self.keys())
390 return sorted(core for cores in self.values() for core in cores)
392 def processors(self):
394 proc for cores in self.values() for procs in cores.values() for
398 def config_to_dict(config):
399 return {section: dict(config.items(section)) for section in
403 def validate_non_string_sequence(value, default=None, raise_exc=None):
404 # NOTE(ralonsoh): refactor this function to check if raise_exc is an
405 # Exception. Remove duplicate code, this function is duplicated in this
407 if isinstance(value, collections.Sequence) and not isinstance(value, six.string_types):
410 raise raise_exc # pylint: disable=raising-bad-type
414 def join_non_strings(separator, *non_strings):
416 non_strings = validate_non_string_sequence(non_strings[0], raise_exc=RuntimeError)
417 except (IndexError, RuntimeError):
419 return str(separator).join(str(non_string) for non_string in non_strings)
422 def safe_decode_utf8(s):
423 """Safe decode a str from UTF"""
424 if six.PY3 and isinstance(s, bytes):
425 return s.decode('utf-8', 'surrogateescape')
429 class ErrorClass(object):
431 def __init__(self, *args, **kwargs):
432 if 'test' not in kwargs:
435 def __getattr__(self, item):
440 def __init__(self, timeout=None, raise_exception=True):
441 super(Timer, self).__init__()
442 self.start = self.delta = None
443 self._timeout = int(timeout) if timeout else None
444 self._timeout_flag = False
445 self._raise_exception = raise_exception
447 def _timeout_handler(self, *args):
448 self._timeout_flag = True
449 if self._raise_exception:
450 raise exceptions.TimerTimeout(timeout=self._timeout)
454 self.start = datetime.datetime.now()
456 signal.signal(signal.SIGALRM, self._timeout_handler)
457 signal.alarm(self._timeout)
460 def __exit__(self, *_):
463 self.delta = datetime.datetime.now() - self.start
465 def __getattr__(self, item):
466 return getattr(self.delta, item)
469 self._raise_exception = False
470 return self.__enter__()
472 def next(self): # pragma: no cover
473 # NOTE(ralonsoh): Python 2 support.
474 if not self._timeout_flag:
475 return datetime.datetime.now()
476 raise StopIteration()
478 def __next__(self): # pragma: no cover
479 # NOTE(ralonsoh): Python 3 support.
482 def __del__(self): # pragma: no cover
485 def delta_time_sec(self):
486 return (datetime.datetime.now() - self.start).total_seconds()
489 def read_meminfo(ssh_client):
490 """Read "/proc/meminfo" file and parse all keys and values"""
492 cpuinfo = six.BytesIO()
493 ssh_client.get_file_obj('/proc/meminfo', cpuinfo)
494 lines = cpuinfo.getvalue().decode('utf-8')
495 matches = re.findall(r"([\w\(\)]+):\s+(\d+)( kB)*", lines)
497 for match in matches:
498 output[match[0]] = match[1]
503 def setup_hugepages(ssh_client, size_kb):
504 """Setup needed number of hugepages for the size specified"""
506 NR_HUGEPAGES_PATH = '/proc/sys/vm/nr_hugepages'
507 meminfo = read_meminfo(ssh_client)
508 hp_size_kb = int(meminfo['Hugepagesize'])
509 hp_number = int(math.ceil(size_kb / float(hp_size_kb)))
511 'echo %s | sudo tee %s' % (hp_number, NR_HUGEPAGES_PATH))
513 ssh_client.get_file_obj(NR_HUGEPAGES_PATH, hp)
514 hp_number_set = int(hp.getvalue().decode('utf-8').splitlines()[0])
515 logger.info('Hugepages size (kB): %s, number claimed: %s, number set: %s',
516 hp_size_kb, hp_number, hp_number_set)
517 return hp_size_kb, hp_number, hp_number_set
520 def find_relative_file(path, task_path):
522 Find file in one of places: in abs of path or relative to a directory path,
527 :return str: full path to file
529 # fixme: create schema to validate all fields have been provided
530 for lookup in [os.path.abspath(path), os.path.join(task_path, path)]:
536 raise IOError(errno.ENOENT, 'Unable to find {} file'.format(path))
539 def open_relative_file(path, task_path):
543 if e.errno == errno.ENOENT:
544 return open(os.path.join(task_path, path))
548 def wait_until_true(predicate, timeout=60, sleep=1, exception=None):
549 """Wait until callable predicate is evaluated as True
551 When in a thread different from the main one, Timer(timeout) will fail
552 because signal is not handled. In this case
554 :param predicate: (func) callable deciding whether waiting should continue
555 :param timeout: (int) timeout in seconds how long should function wait
556 :param sleep: (int) polling interval for results in seconds
557 :param exception: exception instance to raise on timeout. If None is passed
558 (default) then WaitTimeout exception is raised.
560 if isinstance(threading.current_thread(), threading._MainThread):
562 with Timer(timeout=timeout):
563 while not predicate():
565 except exceptions.TimerTimeout:
566 if exception and issubclass(exception, Exception):
567 raise exception # pylint: disable=raising-bad-type
568 raise exceptions.WaitTimeout
570 with Timer() as timer:
571 while timer.delta_time_sec() < timeout:
575 if exception and issubclass(exception, Exception):
576 raise exception # pylint: disable=raising-bad-type
577 raise exceptions.WaitTimeout
580 def send_socket_command(host, port, command):
581 """Send a string command to a specific port in a host
583 :param host: (str) ip or hostname of the host
584 :param port: (int) port number
585 :param command: (str) command to send
586 :return: 0 if success, error number if error
588 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
591 err_number = sock.connect_ex((host, int(port)))
594 sock.sendall(six.b(command))
595 except Exception: # pylint: disable=broad-except
602 def safe_cast(value, type_to_convert, default_value):
603 """Convert value to type, in case of error return default_value
605 :param value: value to convert
606 :param type_to_convert: type to convert, could be "type" or "string"
607 :param default_value: default value to return
608 :return: converted value or default_value
610 if isinstance(type_to_convert, type):
611 _type = type_to_convert
613 _type = pydoc.locate(type_to_convert)
615 raise exceptions.InvalidType(type_to_convert=type_to_convert)