1 # Copyright 2012 OpenStack Foundation
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain
6 # a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
18 from oslo_utils import encodeutils
19 from oslo_utils import strutils
21 import six.moves.urllib.parse as urlparse
23 from escalatorclient.common import utils
24 from escalatorclient.openstack.common.apiclient import base
26 DEFAULT_PAGE_SIZE = 20
28 SORT_DIR_VALUES = ('asc', 'desc')
29 SORT_KEY_VALUES = ('name', 'auto_scale', 'id', 'created_at', 'updated_at')
31 OS_REQ_ID_HDR = 'x-openstack-request-id'
34 class Cluster(base.Resource):
37 return "<Cluster %s>" % self._info
39 def data(self, **kwargs):
40 return self.manager.data(self, **kwargs)
43 class ClusterManager(base.ManagerWithFind):
44 resource_class = Cluster
46 def _list(self, url, response_key, obj_class=None, body=None):
47 resp, body = self.client.get(url)
50 obj_class = self.resource_class
52 data = body[response_key]
53 return ([obj_class(self, res, loaded=True) for res in data if res],
56 def _cluster_meta_from_headers(self, headers):
57 meta = {'properties': {}}
58 safe_decode = encodeutils.safe_decode
59 for key, value in six.iteritems(headers):
60 value = safe_decode(value, incoming='utf-8')
61 if key.startswith('x-image-meta-property-'):
62 _key = safe_decode(key[22:], incoming='utf-8')
63 meta['properties'][_key] = value
64 elif key.startswith('x-image-meta-'):
65 _key = safe_decode(key[13:], incoming='utf-8')
68 for key in ['is_public', 'protected', 'deleted']:
70 meta[key] = strutils.bool_from_string(meta[key])
72 return self._format_cluster_meta_for_user(meta)
74 def _cluster_meta_to_headers(self, fields):
76 fields_copy = copy.deepcopy(fields)
78 # NOTE(flaper87): Convert to str, headers
79 # that are not instance of basestring. All
80 # headers will be encoded later, before the
83 for key, value in six.iteritems(fields_copy):
84 headers['%s' % key] = utils.to_str(value)
88 def _format_cluster_meta_for_user(meta):
89 for key in ['size', 'min_ram', 'min_disk']:
92 meta[key] = int(meta[key]) if meta[key] else 0
97 def get(self, cluster, **kwargs):
98 """Get the metadata for a specific cluster.
100 :param cluster: host object or id to look up
101 :rtype: :class:`Cluster`
103 cluster_id = base.getid(cluster)
104 resp, body = self.client.get('/v1/clusters/%s'
105 % urlparse.quote(str(cluster_id)))
106 # meta = self._cluster_meta_from_headers(resp.headers)
107 return_request_id = kwargs.get('return_req_id', None)
108 if return_request_id is not None:
109 return_request_id.append(resp.headers.get(OS_REQ_ID_HDR, None))
110 # return Host(self, meta)
111 return Cluster(self, self._format_cluster_meta_for_user(
114 def data(self, image, do_checksum=True, **kwargs):
115 """Get the raw data for a specific image.
117 :param image: image object or id to look up
118 :param do_checksum: Enable/disable checksum validation
119 :rtype: iterable containing image data
121 image_id = base.getid(image)
122 resp, body = self.client.get('/v1/images/%s'
123 % urlparse.quote(str(image_id)))
124 content_length = int(resp.headers.get('content-length', 0))
125 checksum = resp.headers.get('x-image-meta-checksum', None)
126 if do_checksum and checksum is not None:
127 body = utils.integrity_iter(body, checksum)
128 return_request_id = kwargs.get('return_req_id', None)
129 if return_request_id is not None:
130 return_request_id.append(resp.headers.get(OS_REQ_ID_HDR, None))
132 return utils.IterableWithLength(body, content_length)
134 def _build_params(self, parameters):
135 params = {'limit': parameters.get('page_size', DEFAULT_PAGE_SIZE)}
137 if 'marker' in parameters:
138 params['marker'] = parameters['marker']
140 sort_key = parameters.get('sort_key')
141 if sort_key is not None:
142 if sort_key in SORT_KEY_VALUES:
143 params['sort_key'] = sort_key
145 raise ValueError('sort_key must be one of the following: %s.'
146 % ', '.join(SORT_KEY_VALUES))
148 sort_dir = parameters.get('sort_dir')
149 if sort_dir is not None:
150 if sort_dir in SORT_DIR_VALUES:
151 params['sort_dir'] = sort_dir
153 raise ValueError('sort_dir must be one of the following: %s.'
154 % ', '.join(SORT_DIR_VALUES))
156 filters = parameters.get('filters', {})
157 params.update(filters)
161 def list(self, **kwargs):
162 """Get a list of clusters.
164 :param page_size: number of items to request in each paginated request
165 :param limit: maximum number of clusters to return
166 :param marker: begin returning clusters that
167 appear later in the cluster
168 list than that represented by this cluster id
169 :param filters: dict of direct comparison filters that mimics the
170 structure of an cluster object
171 :param return_request_id: If an empty list is provided, populate this
172 list with the request ID value from the header
173 x-openstack-request-id
174 :rtype: list of :class:`Cluster`
176 absolute_limit = kwargs.get('limit')
177 page_size = kwargs.get('page_size', DEFAULT_PAGE_SIZE)
179 def paginate(qp, return_request_id=None):
180 for param, value in six.iteritems(qp):
181 if isinstance(value, six.string_types):
182 # Note(flaper87) Url encoding should
183 # be moved inside http utils, at least
186 # Making sure all params are str before
187 # trying to encode them
188 qp[param] = encodeutils.safe_decode(value)
190 url = '/v1/clusters?%s' % urlparse.urlencode(qp)
191 clusters, resp = self._list(url, "clusters")
193 if return_request_id is not None:
194 return_request_id.append(resp.headers.get(OS_REQ_ID_HDR, None))
196 for cluster in clusters:
199 return_request_id = kwargs.get('return_req_id', None)
201 params = self._build_params(kwargs)
207 for cluster in paginate(params, return_request_id):
208 last_cluster = cluster.id
210 if (absolute_limit is not None and
211 seen + seen_last_page >= absolute_limit):
212 # Note(kragniz): we've seen enough images
218 seen += seen_last_page
220 if seen_last_page + filtered == 0:
221 # Note(kragniz): we didn't get any clusters in the last page
224 if absolute_limit is not None and seen >= absolute_limit:
225 # Note(kragniz): reached the limit of clusters to return
228 if page_size and seen_last_page + filtered < page_size:
229 # Note(kragniz): we've reached the last page of the clusters
232 # Note(kragniz): there are more clusters to come
233 params['marker'] = last_cluster