1 # Copyright 2010 United States Government as represented by the
2 # Administrator of the National Aeronautics and Space Administration.
3 # Copyright 2014 SoftLayer Technologies, Inc.
4 # Copyright 2015 Mirantis, Inc
7 # Licensed under the Apache License, Version 2.0 (the "License"); you may
8 # not use this file except in compliance with the License. You may obtain
9 # a copy of the License at
11 # http://www.apache.org/licenses/LICENSE-2.0
13 # Unless required by applicable law or agreed to in writing, software
14 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
15 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
16 # License for the specific language governing permissions and limitations
20 System-level utilities and helper functions.
24 from functools import reduce
27 from eventlet import sleep
29 from time import sleep
30 from eventlet.green import socket
41 from OpenSSL import crypto
42 from oslo_config import cfg
43 from oslo_log import log as logging
44 from oslo_utils import encodeutils
45 from oslo_utils import excutils
48 from escalator.common import exception
49 from escalator import i18n
53 LOG = logging.getLogger(__name__)
58 ESCALATOR_TEST_SOCKET_FD_STR = 'ESCALATOR_TEST_SOCKET_FD'
61 def chunkreadable(iter, chunk_size=65536):
63 Wrap a readable iterator with a reader yielding chunks of
64 a preferred size, otherwise leave iterator unchanged.
66 :param iter: an iter which may also be readable
67 :param chunk_size: maximum size of chunk
69 return chunkiter(iter, chunk_size) if hasattr(iter, 'read') else iter
72 def chunkiter(fp, chunk_size=65536):
74 Return an iterator to a file-like obj which yields fixed size chunks
76 :param fp: a file-like object
77 :param chunk_size: maximum size of chunk
80 chunk = fp.read(chunk_size)
87 def cooperative_iter(iter):
89 Return an iterator which schedules after each
90 iteration. This can prevent eventlet thread starvation.
92 :param iter: an iterator to wrap
98 except Exception as err:
99 with excutils.save_and_reraise_exception():
100 msg = _LE("Error: cooperative_iter exception %s") % err
104 def cooperative_read(fd):
106 Wrap a file descriptor's read with a partial function which schedules
107 after each read. This can prevent eventlet thread starvation.
109 :param fd: a file descriptor to wrap
112 result = fd.read(*args)
118 MAX_COOP_READER_BUFFER_SIZE = 134217728 # 128M seems like a sane buffer limit
121 class CooperativeReader(object):
124 An eventlet thread friendly class for reading in image data.
126 When accessing data either through the iterator or the read method
127 we perform a sleep to allow a co-operative yield. When there is more than
128 one image being uploaded/downloaded this prevents eventlet thread
129 starvation, ie allows all threads to be scheduled periodically rather than
130 having the same thread be continuously active.
133 def __init__(self, fd):
135 :param fd: Underlying image file object
139 # NOTE(markwash): if the underlying supports read(), overwrite the
140 # default iterator-based implementation with cooperative_read which
141 # is more straightforward
142 if hasattr(fd, 'read'):
143 self.read = cooperative_read(fd)
149 def read(self, length=None):
150 """Return the requested amount of bytes, fetching the next chunk of
151 the underlying iterator when needed.
153 This is replaced with cooperative_read in __init__ if the underlying
154 fd already supports read().
157 if len(self.buffer) - self.position > 0:
158 # if no length specified but some data exists in buffer,
159 # return that data and clear the buffer
160 result = self.buffer[self.position:]
165 # otherwise read the next chunk from the underlying iterator
166 # and return it as a whole. Reset the buffer, as subsequent
167 # calls may specify the length
169 if self.iterator is None:
170 self.iterator = self.__iter__()
171 return self.iterator.next()
172 except StopIteration:
179 while len(result) < length:
180 if self.position < len(self.buffer):
181 to_read = length - len(result)
182 chunk = self.buffer[self.position:self.position + to_read]
185 # This check is here to prevent potential OOM issues if
186 # this code is called with unreasonably high values of read
187 # size. Currently it is only called from the HTTP clients
188 # of Glance backend stores, which use httplib for data
189 # streaming, which has readsize hardcoded to 8K, so this
190 # check should never fire. Regardless it still worths to
191 # make the check, as the code may be reused somewhere else.
192 if len(result) >= MAX_COOP_READER_BUFFER_SIZE:
193 raise exception.LimitExceeded()
194 self.position += len(chunk)
197 if self.iterator is None:
198 self.iterator = self.__iter__()
199 self.buffer = self.iterator.next()
201 except StopIteration:
208 return cooperative_iter(self.fd.__iter__())
211 class LimitingReader(object):
214 Reader designed to fail when reading image data past the configured
218 def __init__(self, data, limit):
220 :param data: Underlying image data object
221 :param limit: maximum number of bytes the reader should allow
228 for chunk in self.data:
229 self.bytes_read += len(chunk)
230 if self.bytes_read > self.limit:
231 raise exception.ImageSizeLimitExceeded()
236 result = self.data.read(i)
237 self.bytes_read += len(result)
238 if self.bytes_read > self.limit:
239 raise exception.ImageSizeLimitExceeded()
243 def get_dict_meta(response):
245 for key, value in response.json.items():
250 def create_mashup_dict(image_meta):
252 Returns a dictionary-like mashup of the image core properties
253 and the image custom properties from given image metadata.
255 :param image_meta: metadata of image with core and custom properties
259 for key, value in six.iteritems(image_meta):
260 if isinstance(value, dict):
261 for subkey, subvalue in six.iteritems(
262 create_mashup_dict(value)):
263 if subkey not in image_meta:
264 yield subkey, subvalue
268 return dict(get_items())
271 def safe_mkdirs(path):
275 if e.errno != errno.EEXIST:
279 def safe_remove(path):
283 if e.errno != errno.ENOENT:
287 class PrettyTable(object):
289 """Creates an ASCII art table for use in bin/escalator
296 def add_column(self, width, label="", just='l'):
297 """Add a column to the table
299 :param width: number of characters wide the column should be
300 :param label: column heading
301 :param just: justification for the column, 'l' for left,
304 self.columns.append((width, label, just))
306 def make_header(self):
309 for width, label, _ in self.columns:
310 # NOTE(sirp): headers are always left justified
311 label_part = self._clip_and_justify(label, width, 'l')
312 label_parts.append(label_part)
314 break_part = '-' * width
315 break_parts.append(break_part)
317 label_line = ' '.join(label_parts)
318 break_line = ' '.join(break_parts)
319 return '\n'.join([label_line, break_line])
321 def make_row(self, *args):
324 for data, (width, _, just) in zip(row, self.columns):
325 row_part = self._clip_and_justify(data, width, just)
326 row_parts.append(row_part)
328 row_line = ' '.join(row_parts)
332 def _clip_and_justify(data, width, just):
333 # clip field to column width
334 clipped_data = str(data)[:width]
338 justified = clipped_data.rjust(width)
341 justified = clipped_data.ljust(width)
346 def get_terminal_size():
348 def _get_terminal_size_posix():
356 height_width = struct.unpack('hh', fcntl.ioctl(sys.stderr.fileno(),
365 p = subprocess.Popen(['stty', 'size'],
367 stdout=subprocess.PIPE,
368 stderr=open(os.devnull, 'w'))
369 result = p.communicate()
370 if p.returncode == 0:
371 return tuple(int(x) for x in result[0].split())
377 def _get_terminal_size_win32():
379 from ctypes import create_string_buffer
380 from ctypes import windll
381 handle = windll.kernel32.GetStdHandle(-12)
382 csbi = create_string_buffer(22)
383 res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi)
388 unpack_tmp = struct.unpack("hhhhHhhhhhh", csbi.raw)
389 (bufx, bufy, curx, cury, wattr,
390 left, top, right, bottom, maxx, maxy) = unpack_tmp
391 height = bottom - top + 1
392 width = right - left + 1
393 return (height, width)
397 def _get_terminal_size_unknownOS():
398 raise NotImplementedError
400 func = {'posix': _get_terminal_size_posix,
401 'win32': _get_terminal_size_win32}
403 height_width = func.get(platform.os.name, _get_terminal_size_unknownOS)()
405 if height_width is None:
406 raise exception.Invalid()
408 for i in height_width:
409 if not isinstance(i, int) or i <= 0:
410 raise exception.Invalid()
412 return height_width[0], height_width[1]
416 """Decorator to enforce read-only logic"""
417 @functools.wraps(func)
418 def wrapped(self, req, *args, **kwargs):
419 if req.context.read_only:
420 msg = "Read-only access"
422 raise exc.HTTPForbidden(msg, request=req,
423 content_type="text/plain")
424 return func(self, req, *args, **kwargs)
428 def setup_remote_pydev_debug(host, port):
429 error_msg = _LE('Error setting up the debug environment. Verify that the'
430 ' option pydev_worker_debug_host is pointing to a valid '
431 'hostname or IP on which a pydev server is listening on'
432 ' the port indicated by pydev_worker_debug_port.')
436 from pydev import pydevd
440 pydevd.settrace(host,
446 with excutils.save_and_reraise_exception():
447 LOG.exception(error_msg)
450 def validate_key_cert(key_file, cert_file):
452 error_key_name = "private key"
453 error_filename = key_file
454 with open(key_file, 'r') as keyfile:
455 key_str = keyfile.read()
456 key = crypto.load_privatekey(crypto.FILETYPE_PEM, key_str)
458 error_key_name = "certificate"
459 error_filename = cert_file
460 with open(cert_file, 'r') as certfile:
461 cert_str = certfile.read()
462 cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_str)
463 except IOError as ioe:
464 raise RuntimeError(_("There is a problem with your %(error_key_name)s "
465 "%(error_filename)s. Please verify it."
467 {'error_key_name': error_key_name,
468 'error_filename': error_filename,
470 except crypto.Error as ce:
471 raise RuntimeError(_("There is a problem with your %(error_key_name)s "
472 "%(error_filename)s. Please verify it. OpenSSL"
474 {'error_key_name': error_key_name,
475 'error_filename': error_filename,
479 data = str(uuid.uuid4())
480 digest = CONF.digest_algorithm
482 LOG.warn('The FIPS (FEDERAL INFORMATION PROCESSING STANDARDS)'
483 ' state that the SHA-1 is not suitable for'
484 ' general-purpose digital signature applications (as'
485 ' specified in FIPS 186-3) that require 112 bits of'
486 ' security. The default value is sha1 in Kilo for a'
487 ' smooth upgrade process, and it will be updated'
488 ' with sha256 in next release(L).')
489 out = crypto.sign(key, data, digest)
490 crypto.verify(cert, out, data, digest)
491 except crypto.Error as ce:
492 raise RuntimeError(_("There is a problem with your key pair. "
493 "Please verify that cert %(cert_file)s and "
494 "key %(key_file)s belong together. OpenSSL "
495 "error %(ce)s") % {'cert_file': cert_file,
496 'key_file': key_file,
500 def get_test_suite_socket():
501 global ESCALATOR_TEST_SOCKET_FD_STR
502 if ESCALATOR_TEST_SOCKET_FD_STR in os.environ:
503 fd = int(os.environ[ESCALATOR_TEST_SOCKET_FD_STR])
504 sock = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM)
505 sock = socket.SocketType(_sock=sock)
506 sock.listen(CONF.backlog)
507 del os.environ[ESCALATOR_TEST_SOCKET_FD_STR]
513 def is_uuid_like(val):
514 """Returns validation of a value as a UUID.
516 For our purposes, a UUID is a canonical form string:
517 aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa
520 return str(uuid.UUID(val)) == val
521 except (TypeError, ValueError, AttributeError):
525 def exception_to_str(exc):
527 error = six.text_type(exc)
532 error = ("Caught '%(exception)s' exception." %
533 {"exception": exc.__class__.__name__})
534 return encodeutils.safe_encode(error, errors='ignore')
538 REGEX_4BYTE_UNICODE = re.compile(u'[\U00010000-\U0010ffff]')
541 REGEX_4BYTE_UNICODE = re.compile(u'[\uD800-\uDBFF][\uDC00-\uDFFF]')
544 def no_4byte_params(f):
546 Checks that no 4 byte unicode characters are allowed
547 in dicts' keys/values and string's parameters
549 def wrapper(*args, **kwargs):
551 def _is_match(some_str):
552 return (isinstance(some_str, unicode) and
553 REGEX_4BYTE_UNICODE.findall(some_str) != [])
555 def _check_dict(data_dict):
556 # a dict of dicts has to be checked recursively
557 for key, value in data_dict.iteritems():
558 if isinstance(value, dict):
562 msg = _("Property names can't contain 4 byte unicode.")
563 raise exception.Invalid(msg)
565 msg = (_("%s can't contain 4 byte unicode characters.")
567 raise exception.Invalid(msg)
569 for data_dict in [arg for arg in args if isinstance(arg, dict)]:
570 _check_dict(data_dict)
571 # now check args for str values
574 msg = _("Param values can't contain 4 byte unicode.")
575 raise exception.Invalid(msg)
576 # check kwargs as well, as params are passed as kwargs via
579 return f(*args, **kwargs)
583 def stash_conf_values():
585 Make a copy of some of the current global CONF's settings.
586 Allows determining if any of these values have changed
587 when the config is reloaded.
590 conf['bind_host'] = CONF.bind_host
591 conf['bind_port'] = CONF.bind_port
592 conf['tcp_keepidle'] = CONF.cert_file
593 conf['backlog'] = CONF.backlog
594 conf['key_file'] = CONF.key_file
595 conf['cert_file'] = CONF.cert_file
600 def validate_ip_format(ip_str):
602 valid ip_str format = '10.43.178.9'
603 invalid ip_str format : '123. 233.42.12', spaces existed in field
604 '3234.23.453.353', out of range
605 '-2.23.24.234', negative number in field
606 '1.2.3.4d', letter in field
607 '10.43.1789', invalid format
610 msg = (_("No ip given when check ip"))
612 raise exc.HTTPBadRequest(msg, content_type="text/plain")
615 if ip_str.count('.') == 3 and all(num.isdigit() and 0 <= int(
616 num) < 256 for num in ip_str.rstrip().split('.')):
619 msg = (_("%s invalid ip format!") % ip_str)
621 raise exc.HTTPBadRequest(msg, content_type="text/plain")
624 def valid_cidr(cidr):
626 msg = (_("No CIDR given."))
628 raise exc.HTTPBadRequest(explanation=msg)
630 cidr_division = cidr.split('/')
631 if (len(cidr_division) != 2 or
632 not cidr_division[0] or
633 not cidr_division[1]):
634 msg = (_("CIDR format error."))
636 raise exc.HTTPBadRequest(explanation=msg)
638 netmask_err_msg = (_("CIDR netmask error, "
639 "it should be a integer between 0-32."))
641 netmask_cidr = int(cidr_division[1])
643 LOG.warn(netmask_err_msg)
644 raise exc.HTTPBadRequest(explanation=netmask_err_msg)
646 if (netmask_cidr < 0 and
648 LOG.warn(netmask_err_msg)
649 raise exc.HTTPBadRequest(explanation=netmask_err_msg)
651 validate_ip_format(cidr_division[0])
656 Switch ip string to decimalism integer..
658 :return: decimalism integer
660 return reduce(lambda x, y: (x << 8) + y, map(int, ip.split('.')))
663 def int_into_ip(num):
666 s.append(str(num % 256))
668 return '.'.join(s[::-1])
671 def is_ip_in_cidr(ip, cidr):
674 :param ip: Ip will be checked, like:192.168.1.2.
675 :param cidr: Ip range,like:192.168.0.0/24.
676 :return: If ip in cidr, return True, else return False.
679 msg = "Error, ip is empty"
680 raise exc.HTTPBadRequest(explanation=msg)
682 msg = "Error, CIDR is empty"
683 raise exc.HTTPBadRequest(explanation=msg)
684 network = cidr.split('/')
685 mask = ~(2**(32 - int(network[1])) - 1)
686 return (ip_into_int(ip) & mask) == (ip_into_int(network[0]) & mask)
689 def is_ip_in_ranges(ip, ip_ranges):
692 : ip: Ip will be checked, like:192.168.1.2.
693 : ip_ranges : Ip ranges, like:
694 [{'start':'192.168.0.10', 'end':'192.168.0.20'}
695 {'start':'192.168.0.50', 'end':'192.168.0.60'}]
696 :return: If ip in ip_ranges, return True, else return False.
699 msg = "Error, ip is empty"
700 raise exc.HTTPBadRequest(explanation=msg)
705 for ip_range in ip_ranges:
706 start_ip_int = ip_into_int(ip_range['start'])
707 end_ip_int = ip_into_int(ip_range['end'])
708 ip_int = ip_into_int(ip)
709 if ip_int >= start_ip_int and ip_int <= end_ip_int:
715 def merge_ip_ranges(ip_ranges):
718 sort_ranges_by_start_ip = {}
719 for ip_range in ip_ranges:
720 start_ip_int = ip_into_int(ip_range['start'])
721 sort_ranges_by_start_ip.update({str(start_ip_int): ip_range})
722 sort_ranges = [sort_ranges_by_start_ip[key] for key in
723 sorted(sort_ranges_by_start_ip.keys())]
724 last_range_end_ip = None
726 merged_ip_ranges = []
727 for ip_range in sort_ranges:
728 if last_range_end_ip is None:
729 last_range_end_ip = ip_range['end']
730 merged_ip_ranges.append(ip_range)
733 last_range_end_ip_int = ip_into_int(last_range_end_ip)
734 ip_range_start_ip_int = ip_into_int(ip_range['start'])
735 if (last_range_end_ip_int + 1) == ip_range_start_ip_int:
736 merged_ip_ranges[-1]['end'] = ip_range['end']
738 merged_ip_ranges.append(ip_range)
739 return merged_ip_ranges
742 def _split_ip_ranges(ip_ranges):
743 ip_ranges_start = set()
744 ip_ranges_end = set()
746 return (ip_ranges_start, ip_ranges_end)
748 for ip_range in ip_ranges:
749 ip_ranges_start.add(ip_range['start'])
750 ip_ranges_end.add(ip_range['end'])
752 return (ip_ranges_start, ip_ranges_end)
755 # [{'start':'192.168.0.10', 'end':'192.168.0.20'},
756 # {'start':'192.168.0.21', 'end':'192.168.0.22'}] and
757 # [{'start':'192.168.0.10', 'end':'192.168.0.22'}] is equal here
758 def is_ip_ranges_equal(ip_ranges1, ip_ranges2):
759 if not ip_ranges1 and not ip_ranges2:
761 if ((ip_ranges1 and not ip_ranges2) or
762 (ip_ranges2 and not ip_ranges1)):
764 ip_ranges_1 = copy.deepcopy(ip_ranges1)
765 ip_ranges_2 = copy.deepcopy(ip_ranges2)
766 merged_ip_ranges1 = merge_ip_ranges(ip_ranges_1)
767 merged_ip_ranges2 = merge_ip_ranges(ip_ranges_2)
768 ip_ranges1_start, ip_ranges1_end = _split_ip_ranges(merged_ip_ranges1)
769 ip_ranges2_start, ip_ranges2_end = _split_ip_ranges(merged_ip_ranges2)
770 if (ip_ranges1_start == ip_ranges2_start and
771 ip_ranges1_end == ip_ranges2_end):
777 def get_dvs_interfaces(host_interfaces):
779 if not isinstance(host_interfaces, list):
780 host_interfaces = eval(host_interfaces)
781 for interface in host_interfaces:
782 if not isinstance(interface, dict):
783 interface = eval(interface)
784 if ('vswitch_type' in interface and
785 interface['vswitch_type'] == 'dvs'):
786 dvs_interfaces.append(interface)
788 return dvs_interfaces
791 def get_clc_pci_info(pci_info):
793 flag1 = 'Intel Corporation Coleto Creek PCIe Endpoint'
796 if flag1 in pci or flag2 in pci:
797 clc_pci.append(pci.split()[0])
801 def cpu_str_to_list(spec):
802 """Parse a CPU set specification.
804 :param spec: cpu set string eg "1-4,^3,6"
806 Each element in the list is either a single
807 CPU number, a range of CPU numbers, or a
808 caret followed by a CPU number to be excluded
809 from a previous range.
811 :returns: a set of CPU indexes
819 cpuset_reject_ids = set()
820 for rule in spec.split(','):
825 # Note the count limit in the .split() call
826 range_parts = rule.split('-', 1)
827 if len(range_parts) > 1:
828 # So, this was a range; start by converting the parts to ints
830 start, end = [int(p.strip()) for p in range_parts]
832 raise exception.Invalid(_("Invalid range expression %r")
834 # Make sure it's a valid range
836 raise exception.Invalid(_("Invalid range expression %r")
838 # Add available CPU ids to set
839 cpuset_ids |= set(range(start, end + 1))
841 # Not a range, the rule is an exclusion rule; convert to int
843 cpuset_reject_ids.add(int(rule[1:].strip()))
845 raise exception.Invalid(_("Invalid exclusion "
846 "expression %r") % rule)
848 # OK, a single CPU to include; convert to int
850 cpuset_ids.add(int(rule))
852 raise exception.Invalid(_("Invalid inclusion "
853 "expression %r") % rule)
855 # Use sets to handle the exclusion rules for us
856 cpuset_ids -= cpuset_reject_ids
857 cpusets = list(cpuset_ids)
862 def cpu_list_to_str(cpu_list):
863 """Parse a CPU list to string.
865 :param cpu_list: eg "[1,2,3,4,6,7]"
867 :returns: a string of CPU ranges, eg 1-4,6,7
882 if cpu == (init + count):
885 group_cpus.append(tmp_cpus)
892 group_cpus.append(tmp_cpus)
894 for group in group_cpus:
896 group_spec = ("%s-%s" % (group[0], group[0]+len(group)-1))
898 group_str = [str(num) for num in group]
899 group_spec = ','.join(group_str)
901 spec += ',' + group_spec
908 def simple_subprocess_call(cmd):
909 return_code = subprocess.call(cmd,
911 stdout=subprocess.PIPE,
912 stderr=subprocess.PIPE)
916 def translate_quotation_marks_for_shell(orig_str):
918 quotation_marks = '"'
919 quotation_marks_count = orig_str.count(quotation_marks)
920 if quotation_marks_count > 0:
921 replace_marks = '\\"'
922 translated_str = orig_str.replace(quotation_marks, replace_marks)
924 translated_str = orig_str
925 return translated_str
928 def translate_marks_4_sed_command(ori_str):
929 translated_str = ori_str
934 for translated_mark in translated_marks:
935 if translated_str.count(translated_mark):
936 translated_str = translated_str.\
937 replace(translated_mark, translated_marks[translated_mark])
938 return translated_str