1 # Copyright 2010 United States Government as represented by the
2 # Administrator of the National Aeronautics and Space Administration.
3 # Copyright 2014 SoftLayer Technologies, Inc.
4 # Copyright 2015 Mirantis, Inc
7 # Licensed under the Apache License, Version 2.0 (the "License"); you may
8 # not use this file except in compliance with the License. You may obtain
9 # a copy of the License at
11 # http://www.apache.org/licenses/LICENSE-2.0
13 # Unless required by applicable law or agreed to in writing, software
14 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
15 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
16 # License for the specific language governing permissions and limitations
20 System-level utilities and helper functions.
24 from functools import reduce
27 from eventlet import sleep
29 from time import sleep
30 from eventlet.green import socket
41 from OpenSSL import crypto
42 from oslo_config import cfg
43 from oslo_log import log as logging
44 from oslo_utils import encodeutils
45 from oslo_utils import excutils
46 from oslo_utils import netutils
47 from oslo_utils import strutils
52 from escalator.common import exception
53 from escalator import i18n
54 # from providerclient.v1 import client as provider_client
58 LOG = logging.getLogger(__name__)
63 ESCALATOR_TEST_SOCKET_FD_STR = 'ESCALATOR_TEST_SOCKET_FD'
66 def chunkreadable(iter, chunk_size=65536):
68 Wrap a readable iterator with a reader yielding chunks of
69 a preferred size, otherwise leave iterator unchanged.
71 :param iter: an iter which may also be readable
72 :param chunk_size: maximum size of chunk
74 return chunkiter(iter, chunk_size) if hasattr(iter, 'read') else iter
77 def chunkiter(fp, chunk_size=65536):
79 Return an iterator to a file-like obj which yields fixed size chunks
81 :param fp: a file-like object
82 :param chunk_size: maximum size of chunk
85 chunk = fp.read(chunk_size)
92 def cooperative_iter(iter):
94 Return an iterator which schedules after each
95 iteration. This can prevent eventlet thread starvation.
97 :param iter: an iterator to wrap
103 except Exception as err:
104 with excutils.save_and_reraise_exception():
105 msg = _LE("Error: cooperative_iter exception %s") % err
109 def cooperative_read(fd):
111 Wrap a file descriptor's read with a partial function which schedules
112 after each read. This can prevent eventlet thread starvation.
114 :param fd: a file descriptor to wrap
117 result = fd.read(*args)
123 MAX_COOP_READER_BUFFER_SIZE = 134217728 # 128M seems like a sane buffer limit
126 class CooperativeReader(object):
129 An eventlet thread friendly class for reading in image data.
131 When accessing data either through the iterator or the read method
132 we perform a sleep to allow a co-operative yield. When there is more than
133 one image being uploaded/downloaded this prevents eventlet thread
134 starvation, ie allows all threads to be scheduled periodically rather than
135 having the same thread be continuously active.
138 def __init__(self, fd):
140 :param fd: Underlying image file object
144 # NOTE(markwash): if the underlying supports read(), overwrite the
145 # default iterator-based implementation with cooperative_read which
146 # is more straightforward
147 if hasattr(fd, 'read'):
148 self.read = cooperative_read(fd)
154 def read(self, length=None):
155 """Return the requested amount of bytes, fetching the next chunk of
156 the underlying iterator when needed.
158 This is replaced with cooperative_read in __init__ if the underlying
159 fd already supports read().
162 if len(self.buffer) - self.position > 0:
163 # if no length specified but some data exists in buffer,
164 # return that data and clear the buffer
165 result = self.buffer[self.position:]
170 # otherwise read the next chunk from the underlying iterator
171 # and return it as a whole. Reset the buffer, as subsequent
172 # calls may specify the length
174 if self.iterator is None:
175 self.iterator = self.__iter__()
176 return self.iterator.next()
177 except StopIteration:
184 while len(result) < length:
185 if self.position < len(self.buffer):
186 to_read = length - len(result)
187 chunk = self.buffer[self.position:self.position + to_read]
190 # This check is here to prevent potential OOM issues if
191 # this code is called with unreasonably high values of read
192 # size. Currently it is only called from the HTTP clients
193 # of Glance backend stores, which use httplib for data
194 # streaming, which has readsize hardcoded to 8K, so this
195 # check should never fire. Regardless it still worths to
196 # make the check, as the code may be reused somewhere else.
197 if len(result) >= MAX_COOP_READER_BUFFER_SIZE:
198 raise exception.LimitExceeded()
199 self.position += len(chunk)
202 if self.iterator is None:
203 self.iterator = self.__iter__()
204 self.buffer = self.iterator.next()
206 except StopIteration:
213 return cooperative_iter(self.fd.__iter__())
216 class LimitingReader(object):
219 Reader designed to fail when reading image data past the configured
223 def __init__(self, data, limit):
225 :param data: Underlying image data object
226 :param limit: maximum number of bytes the reader should allow
233 for chunk in self.data:
234 self.bytes_read += len(chunk)
235 if self.bytes_read > self.limit:
236 raise exception.ImageSizeLimitExceeded()
241 result = self.data.read(i)
242 self.bytes_read += len(result)
243 if self.bytes_read > self.limit:
244 raise exception.ImageSizeLimitExceeded()
248 def get_dict_meta(response):
250 for key, value in response.json.items():
255 def create_mashup_dict(image_meta):
257 Returns a dictionary-like mashup of the image core properties
258 and the image custom properties from given image metadata.
260 :param image_meta: metadata of image with core and custom properties
264 for key, value in six.iteritems(image_meta):
265 if isinstance(value, dict):
266 for subkey, subvalue in six.iteritems(
267 create_mashup_dict(value)):
268 if subkey not in image_meta:
269 yield subkey, subvalue
273 return dict(get_items())
276 def safe_mkdirs(path):
280 if e.errno != errno.EEXIST:
284 def safe_remove(path):
288 if e.errno != errno.ENOENT:
292 class PrettyTable(object):
294 """Creates an ASCII art table for use in bin/escalator
301 def add_column(self, width, label="", just='l'):
302 """Add a column to the table
304 :param width: number of characters wide the column should be
305 :param label: column heading
306 :param just: justification for the column, 'l' for left,
309 self.columns.append((width, label, just))
311 def make_header(self):
314 for width, label, _ in self.columns:
315 # NOTE(sirp): headers are always left justified
316 label_part = self._clip_and_justify(label, width, 'l')
317 label_parts.append(label_part)
319 break_part = '-' * width
320 break_parts.append(break_part)
322 label_line = ' '.join(label_parts)
323 break_line = ' '.join(break_parts)
324 return '\n'.join([label_line, break_line])
326 def make_row(self, *args):
329 for data, (width, _, just) in zip(row, self.columns):
330 row_part = self._clip_and_justify(data, width, just)
331 row_parts.append(row_part)
333 row_line = ' '.join(row_parts)
337 def _clip_and_justify(data, width, just):
338 # clip field to column width
339 clipped_data = str(data)[:width]
343 justified = clipped_data.rjust(width)
346 justified = clipped_data.ljust(width)
351 def get_terminal_size():
353 def _get_terminal_size_posix():
361 height_width = struct.unpack('hh', fcntl.ioctl(sys.stderr.fileno(),
370 p = subprocess.Popen(['stty', 'size'],
372 stdout=subprocess.PIPE,
373 stderr=open(os.devnull, 'w'))
374 result = p.communicate()
375 if p.returncode == 0:
376 return tuple(int(x) for x in result[0].split())
382 def _get_terminal_size_win32():
384 from ctypes import create_string_buffer
385 from ctypes import windll
386 handle = windll.kernel32.GetStdHandle(-12)
387 csbi = create_string_buffer(22)
388 res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi)
393 unpack_tmp = struct.unpack("hhhhHhhhhhh", csbi.raw)
394 (bufx, bufy, curx, cury, wattr,
395 left, top, right, bottom, maxx, maxy) = unpack_tmp
396 height = bottom - top + 1
397 width = right - left + 1
398 return (height, width)
402 def _get_terminal_size_unknownOS():
403 raise NotImplementedError
405 func = {'posix': _get_terminal_size_posix,
406 'win32': _get_terminal_size_win32}
408 height_width = func.get(platform.os.name, _get_terminal_size_unknownOS)()
410 if height_width is None:
411 raise exception.Invalid()
413 for i in height_width:
414 if not isinstance(i, int) or i <= 0:
415 raise exception.Invalid()
417 return height_width[0], height_width[1]
421 """Decorator to enforce read-only logic"""
422 @functools.wraps(func)
423 def wrapped(self, req, *args, **kwargs):
424 if req.context.read_only:
425 msg = "Read-only access"
427 raise exc.HTTPForbidden(msg, request=req,
428 content_type="text/plain")
429 return func(self, req, *args, **kwargs)
433 def setup_remote_pydev_debug(host, port):
434 error_msg = _LE('Error setting up the debug environment. Verify that the'
435 ' option pydev_worker_debug_host is pointing to a valid '
436 'hostname or IP on which a pydev server is listening on'
437 ' the port indicated by pydev_worker_debug_port.')
441 from pydev import pydevd
445 pydevd.settrace(host,
451 with excutils.save_and_reraise_exception():
452 LOG.exception(error_msg)
455 def validate_key_cert(key_file, cert_file):
457 error_key_name = "private key"
458 error_filename = key_file
459 with open(key_file, 'r') as keyfile:
460 key_str = keyfile.read()
461 key = crypto.load_privatekey(crypto.FILETYPE_PEM, key_str)
463 error_key_name = "certificate"
464 error_filename = cert_file
465 with open(cert_file, 'r') as certfile:
466 cert_str = certfile.read()
467 cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_str)
468 except IOError as ioe:
469 raise RuntimeError(_("There is a problem with your %(error_key_name)s "
470 "%(error_filename)s. Please verify it."
472 {'error_key_name': error_key_name,
473 'error_filename': error_filename,
475 except crypto.Error as ce:
476 raise RuntimeError(_("There is a problem with your %(error_key_name)s "
477 "%(error_filename)s. Please verify it. OpenSSL"
479 {'error_key_name': error_key_name,
480 'error_filename': error_filename,
484 data = str(uuid.uuid4())
485 digest = CONF.digest_algorithm
487 LOG.warn('The FIPS (FEDERAL INFORMATION PROCESSING STANDARDS)'
488 ' state that the SHA-1 is not suitable for'
489 ' general-purpose digital signature applications (as'
490 ' specified in FIPS 186-3) that require 112 bits of'
491 ' security. The default value is sha1 in Kilo for a'
492 ' smooth upgrade process, and it will be updated'
493 ' with sha256 in next release(L).')
494 out = crypto.sign(key, data, digest)
495 crypto.verify(cert, out, data, digest)
496 except crypto.Error as ce:
497 raise RuntimeError(_("There is a problem with your key pair. "
498 "Please verify that cert %(cert_file)s and "
499 "key %(key_file)s belong together. OpenSSL "
500 "error %(ce)s") % {'cert_file': cert_file,
501 'key_file': key_file,
505 def get_test_suite_socket():
506 global ESCALATOR_TEST_SOCKET_FD_STR
507 if ESCALATOR_TEST_SOCKET_FD_STR in os.environ:
508 fd = int(os.environ[ESCALATOR_TEST_SOCKET_FD_STR])
509 sock = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM)
510 sock = socket.SocketType(_sock=sock)
511 sock.listen(CONF.backlog)
512 del os.environ[ESCALATOR_TEST_SOCKET_FD_STR]
518 def is_uuid_like(val):
519 """Returns validation of a value as a UUID.
521 For our purposes, a UUID is a canonical form string:
522 aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa
525 return str(uuid.UUID(val)) == val
526 except (TypeError, ValueError, AttributeError):
530 def exception_to_str(exc):
532 error = six.text_type(exc)
537 error = ("Caught '%(exception)s' exception." %
538 {"exception": exc.__class__.__name__})
539 return encodeutils.safe_encode(error, errors='ignore')
543 REGEX_4BYTE_UNICODE = re.compile(u'[\U00010000-\U0010ffff]')
546 REGEX_4BYTE_UNICODE = re.compile(u'[\uD800-\uDBFF][\uDC00-\uDFFF]')
549 def no_4byte_params(f):
551 Checks that no 4 byte unicode characters are allowed
552 in dicts' keys/values and string's parameters
554 def wrapper(*args, **kwargs):
556 def _is_match(some_str):
557 return (isinstance(some_str, unicode) and
558 REGEX_4BYTE_UNICODE.findall(some_str) != [])
560 def _check_dict(data_dict):
561 # a dict of dicts has to be checked recursively
562 for key, value in data_dict.iteritems():
563 if isinstance(value, dict):
567 msg = _("Property names can't contain 4 byte unicode.")
568 raise exception.Invalid(msg)
570 msg = (_("%s can't contain 4 byte unicode characters.")
572 raise exception.Invalid(msg)
574 for data_dict in [arg for arg in args if isinstance(arg, dict)]:
575 _check_dict(data_dict)
576 # now check args for str values
579 msg = _("Param values can't contain 4 byte unicode.")
580 raise exception.Invalid(msg)
581 # check kwargs as well, as params are passed as kwargs via
584 return f(*args, **kwargs)
588 def stash_conf_values():
590 Make a copy of some of the current global CONF's settings.
591 Allows determining if any of these values have changed
592 when the config is reloaded.
595 conf['bind_host'] = CONF.bind_host
596 conf['bind_port'] = CONF.bind_port
597 conf['tcp_keepidle'] = CONF.cert_file
598 conf['backlog'] = CONF.backlog
599 conf['key_file'] = CONF.key_file
600 conf['cert_file'] = CONF.cert_file
605 def validate_ip_format(ip_str):
607 valid ip_str format = '10.43.178.9'
608 invalid ip_str format : '123. 233.42.12', spaces existed in field
609 '3234.23.453.353', out of range
610 '-2.23.24.234', negative number in field
611 '1.2.3.4d', letter in field
612 '10.43.1789', invalid format
615 msg = (_("No ip given when check ip"))
617 raise exc.HTTPBadRequest(msg, content_type="text/plain")
620 if ip_str.count('.') == 3 and all(num.isdigit() and 0 <= int(
621 num) < 256 for num in ip_str.rstrip().split('.')):
624 msg = (_("%s invalid ip format!") % ip_str)
626 raise exc.HTTPBadRequest(msg, content_type="text/plain")
629 def valid_cidr(cidr):
631 msg = (_("No CIDR given."))
633 raise exc.HTTPBadRequest(explanation=msg)
635 cidr_division = cidr.split('/')
636 if (len(cidr_division) != 2 or
637 not cidr_division[0] or
638 not cidr_division[1]):
639 msg = (_("CIDR format error."))
641 raise exc.HTTPBadRequest(explanation=msg)
643 netmask_err_msg = (_("CIDR netmask error, "
644 "it should be a integer between 0-32."))
646 netmask_cidr = int(cidr_division[1])
648 LOG.warn(netmask_err_msg)
649 raise exc.HTTPBadRequest(explanation=netmask_err_msg)
651 if (netmask_cidr < 0 and
653 LOG.warn(netmask_err_msg)
654 raise exc.HTTPBadRequest(explanation=netmask_err_msg)
656 validate_ip_format(cidr_division[0])
661 Switch ip string to decimalism integer..
663 :return: decimalism integer
665 return reduce(lambda x, y: (x << 8) + y, map(int, ip.split('.')))
668 def int_into_ip(num):
669 inter_ip = lambda x: '.'.join(
670 [str(x / (256 ** i) % 256) for i in range(3, -1, -1)])
674 def is_ip_in_cidr(ip, cidr):
677 :param ip: Ip will be checked, like:192.168.1.2.
678 :param cidr: Ip range,like:192.168.0.0/24.
679 :return: If ip in cidr, return True, else return False.
682 msg = "Error, ip is empty"
683 raise exc.HTTPBadRequest(explanation=msg)
685 msg = "Error, CIDR is empty"
686 raise exc.HTTPBadRequest(explanation=msg)
687 network = cidr.split('/')
688 mask = ~(2**(32 - int(network[1])) - 1)
689 return (ip_into_int(ip) & mask) == (ip_into_int(network[0]) & mask)
692 def is_ip_in_ranges(ip, ip_ranges):
695 : ip: Ip will be checked, like:192.168.1.2.
696 : ip_ranges : Ip ranges, like:
697 [{'start':'192.168.0.10', 'end':'192.168.0.20'}
698 {'start':'192.168.0.50', 'end':'192.168.0.60'}]
699 :return: If ip in ip_ranges, return True, else return False.
702 msg = "Error, ip is empty"
703 raise exc.HTTPBadRequest(explanation=msg)
708 for ip_range in ip_ranges:
709 start_ip_int = ip_into_int(ip_range['start'])
710 end_ip_int = ip_into_int(ip_range['end'])
711 ip_int = ip_into_int(ip)
712 if ip_int >= start_ip_int and ip_int <= end_ip_int:
718 def merge_ip_ranges(ip_ranges):
721 sort_ranges_by_start_ip = {}
722 for ip_range in ip_ranges:
723 start_ip_int = ip_into_int(ip_range['start'])
724 sort_ranges_by_start_ip.update({str(start_ip_int): ip_range})
725 sort_ranges = [sort_ranges_by_start_ip[key] for key in
726 sorted(sort_ranges_by_start_ip.keys())]
727 last_range_end_ip = None
729 merged_ip_ranges = []
730 for ip_range in sort_ranges:
731 if last_range_end_ip is None:
732 last_range_end_ip = ip_range['end']
733 merged_ip_ranges.append(ip_range)
736 last_range_end_ip_int = ip_into_int(last_range_end_ip)
737 ip_range_start_ip_int = ip_into_int(ip_range['start'])
738 if (last_range_end_ip_int + 1) == ip_range_start_ip_int:
739 merged_ip_ranges[-1]['end'] = ip_range['end']
741 merged_ip_ranges.append(ip_range)
742 return merged_ip_ranges
745 def _split_ip_ranges(ip_ranges):
746 ip_ranges_start = set()
747 ip_ranges_end = set()
749 return (ip_ranges_start, ip_ranges_end)
751 for ip_range in ip_ranges:
752 ip_ranges_start.add(ip_range['start'])
753 ip_ranges_end.add(ip_range['end'])
755 return (ip_ranges_start, ip_ranges_end)
758 # [{'start':'192.168.0.10', 'end':'192.168.0.20'},
759 # {'start':'192.168.0.21', 'end':'192.168.0.22'}] and
760 # [{'start':'192.168.0.10', 'end':'192.168.0.22'}] is equal here
761 def is_ip_ranges_equal(ip_ranges1, ip_ranges2):
762 if not ip_ranges1 and not ip_ranges2:
764 if ((ip_ranges1 and not ip_ranges2) or
765 (ip_ranges2 and not ip_ranges1)):
767 ip_ranges_1 = copy.deepcopy(ip_ranges1)
768 ip_ranges_2 = copy.deepcopy(ip_ranges2)
769 merged_ip_ranges1 = merge_ip_ranges(ip_ranges_1)
770 merged_ip_ranges2 = merge_ip_ranges(ip_ranges_2)
771 ip_ranges1_start, ip_ranges1_end = _split_ip_ranges(merged_ip_ranges1)
772 ip_ranges2_start, ip_ranges2_end = _split_ip_ranges(merged_ip_ranges2)
773 if (ip_ranges1_start == ip_ranges2_start and
774 ip_ranges1_end == ip_ranges2_end):
780 def get_dvs_interfaces(host_interfaces):
782 if not isinstance(host_interfaces, list):
783 host_interfaces = eval(host_interfaces)
784 for interface in host_interfaces:
785 if not isinstance(interface, dict):
786 interface = eval(interface)
787 if ('vswitch_type' in interface and
788 interface['vswitch_type'] == 'dvs'):
789 dvs_interfaces.append(interface)
791 return dvs_interfaces
794 def get_clc_pci_info(pci_info):
796 flag1 = 'Intel Corporation Coleto Creek PCIe Endpoint'
799 if flag1 in pci or flag2 in pci:
800 clc_pci.append(pci.split()[0])
804 def cpu_str_to_list(spec):
805 """Parse a CPU set specification.
807 :param spec: cpu set string eg "1-4,^3,6"
809 Each element in the list is either a single
810 CPU number, a range of CPU numbers, or a
811 caret followed by a CPU number to be excluded
812 from a previous range.
814 :returns: a set of CPU indexes
822 cpuset_reject_ids = set()
823 for rule in spec.split(','):
828 # Note the count limit in the .split() call
829 range_parts = rule.split('-', 1)
830 if len(range_parts) > 1:
831 # So, this was a range; start by converting the parts to ints
833 start, end = [int(p.strip()) for p in range_parts]
835 raise exception.Invalid(_("Invalid range expression %r")
837 # Make sure it's a valid range
839 raise exception.Invalid(_("Invalid range expression %r")
841 # Add available CPU ids to set
842 cpuset_ids |= set(range(start, end + 1))
844 # Not a range, the rule is an exclusion rule; convert to int
846 cpuset_reject_ids.add(int(rule[1:].strip()))
848 raise exception.Invalid(_("Invalid exclusion "
849 "expression %r") % rule)
851 # OK, a single CPU to include; convert to int
853 cpuset_ids.add(int(rule))
855 raise exception.Invalid(_("Invalid inclusion "
856 "expression %r") % rule)
858 # Use sets to handle the exclusion rules for us
859 cpuset_ids -= cpuset_reject_ids
860 cpusets = list(cpuset_ids)
865 def cpu_list_to_str(cpu_list):
866 """Parse a CPU list to string.
868 :param cpu_list: eg "[1,2,3,4,6,7]"
870 :returns: a string of CPU ranges, eg 1-4,6,7
885 if cpu == (init + count):
888 group_cpus.append(tmp_cpus)
895 group_cpus.append(tmp_cpus)
897 for group in group_cpus:
899 group_spec = ("%s-%s" % (group[0], group[0]+len(group)-1))
901 group_str = [str(num) for num in group]
902 group_spec = ','.join(group_str)
904 spec += ',' + group_spec
911 def simple_subprocess_call(cmd):
912 return_code = subprocess.call(cmd,
914 stdout=subprocess.PIPE,
915 stderr=subprocess.PIPE)
919 def translate_quotation_marks_for_shell(orig_str):
921 quotation_marks = '"'
922 quotation_marks_count = orig_str.count(quotation_marks)
923 if quotation_marks_count > 0:
924 replace_marks = '\\"'
925 translated_str = orig_str.replace(quotation_marks, replace_marks)
927 translated_str = orig_str
928 return translated_str
931 def translate_marks_4_sed_command(ori_str):
932 translated_str = ori_str
937 for translated_mark in translated_marks:
938 if translated_str.count(translated_mark):
939 translated_str = translated_str.\
940 replace(translated_mark, translated_marks[translated_mark])
941 return translated_str