1 ##############################################################################
2 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
22 from six.moves import configparser
23 from oslo_serialization import jsonutils
24 from docker import Client
26 from api.database.v1.handlers import AsyncTaskHandler
27 from api.utils import influx
28 from api import ApiResource
29 from yardstick.common import constants as consts
30 from yardstick.common import utils
31 from yardstick.common.utils import result_handler
32 from yardstick.common import openstack_utils
33 from yardstick.common.httpClient import HttpClient
36 LOG = logging.getLogger(__name__)
37 LOG.setLevel(logging.DEBUG)
39 async_handler = AsyncTaskHandler()
42 class V1Env(ApiResource):
45 return self._dispatch_post()
47 def create_grafana(self, args):
48 task_id = str(uuid.uuid4())
50 thread = threading.Thread(target=self._create_grafana, args=(task_id,))
53 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
55 def _create_grafana(self, task_id):
56 self._create_task(task_id)
58 client = Client(base_url=consts.DOCKER_URL)
61 LOG.info('Checking if grafana image exist')
62 image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
63 if not self._check_image_exist(client, image):
64 LOG.info('Grafana image not exist, start pulling')
65 client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
67 LOG.info('Createing grafana container')
68 container = self._create_grafana_container(client)
69 LOG.info('Grafana container is created')
73 container = client.inspect_container(container['Id'])
74 ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
75 LOG.debug('container ip is: %s', ip)
77 LOG.info('Creating data source for grafana')
78 self._create_data_source(ip)
80 LOG.info('Creating dashboard for grafana')
81 self._create_dashboard(ip)
83 self._update_task_status(task_id)
85 except Exception as e:
86 self._update_task_error(task_id, str(e))
87 LOG.exception('Create grafana failed')
89 def _create_dashboard(self, ip):
90 url = 'http://admin:admin@{}:{}/api/dashboards/db'.format(ip, consts.GRAFANA_PORT)
91 path = os.path.join(consts.REPOS_DIR, 'dashboard', '*dashboard.json')
93 for i in sorted(glob.iglob(path)):
95 data = jsonutils.load(f)
97 HttpClient().post(url, data)
99 LOG.exception('Create dashboard %s failed', i)
102 def _create_data_source(self, ip):
103 url = 'http://admin:admin@{}:{}/api/datasources'.format(ip, consts.GRAFANA_PORT)
104 influx_conf = utils.parse_ini_file(consts.CONF_FILE)
107 influx_url = influx_conf['dispatcher_influxdb']['target']
109 LOG.exception('influxdb url not set in yardstick.conf')
119 "database": "yardstick",
121 "basicAuthUser": "admin",
122 "basicAuthPassword": "admin",
126 HttpClient().post(url, data)
128 LOG.exception('Create datasources failed')
131 def _create_grafana_container(self, client):
132 ports = [consts.GRAFANA_PORT]
133 port_bindings = {consts.GRAFANA_PORT: consts.GRAFANA_MAPPING_PORT}
134 restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
135 host_config = client.create_host_config(port_bindings=port_bindings,
136 restart_policy=restart_policy)
138 LOG.info('Creating container')
139 container = client.create_container(image='%s:%s' %
140 (consts.GRAFANA_IMAGE,
145 host_config=host_config)
146 LOG.info('Starting container')
147 client.start(container)
150 def _check_image_exist(self, client, t):
151 return any(t in a['RepoTags'][0]
152 for a in client.images() if a['RepoTags'])
154 def create_influxdb(self, args):
155 task_id = str(uuid.uuid4())
157 thread = threading.Thread(target=self._create_influxdb, args=(task_id,))
160 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
162 def _create_influxdb(self, task_id):
163 self._create_task(task_id)
165 client = Client(base_url=consts.DOCKER_URL)
168 LOG.info('Checking if influxdb image exist')
169 if not self._check_image_exist(client, '%s:%s' %
170 (consts.INFLUXDB_IMAGE,
171 consts.INFLUXDB_TAG)):
172 LOG.info('Influxdb image not exist, start pulling')
173 client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG)
175 LOG.info('Createing influxdb container')
176 container = self._create_influxdb_container(client)
177 LOG.info('Influxdb container is created')
181 container = client.inspect_container(container['Id'])
182 ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
183 LOG.debug('container ip is: %s', ip)
185 LOG.info('Changing output to influxdb')
186 self._change_output_to_influxdb(ip)
188 LOG.info('Config influxdb')
189 self._config_influxdb()
191 self._update_task_status(task_id)
194 except Exception as e:
195 self._update_task_error(task_id, str(e))
196 LOG.exception('Creating influxdb failed')
198 def _create_influxdb_container(self, client):
200 ports = [consts.INFLUXDB_DASHBOARD_PORT, consts.INFLUXDB_PORT]
201 port_bindings = {k: k for k in ports}
202 restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
203 host_config = client.create_host_config(port_bindings=port_bindings,
204 restart_policy=restart_policy)
206 LOG.info('Creating container')
207 container = client.create_container(image='%s:%s' %
208 (consts.INFLUXDB_IMAGE,
209 consts.INFLUXDB_TAG),
213 host_config=host_config)
214 LOG.info('Starting container')
215 client.start(container)
218 def _config_influxdb(self):
220 client = influx.get_data_db_client()
221 client.create_user(consts.INFLUXDB_USER,
222 consts.INFLUXDB_PASS,
223 consts.INFLUXDB_DB_NAME)
224 client.create_database(consts.INFLUXDB_DB_NAME)
225 LOG.info('Success to config influxDB')
227 LOG.exception('Config influxdb failed')
229 def _change_output_to_influxdb(self, ip):
230 utils.makedirs(consts.CONF_DIR)
232 parser = configparser.ConfigParser()
233 LOG.info('Reading output sample configuration')
234 parser.read(consts.CONF_SAMPLE_FILE)
236 LOG.info('Set dispatcher to influxdb')
237 parser.set('DEFAULT', 'dispatcher', 'influxdb')
238 parser.set('dispatcher_influxdb', 'target',
239 'http://{}:{}'.format(ip, consts.INFLUXDB_PORT))
241 LOG.info('Writing to %s', consts.CONF_FILE)
242 with open(consts.CONF_FILE, 'w') as f:
245 def prepare_env(self, args):
246 task_id = str(uuid.uuid4())
248 thread = threading.Thread(target=self._prepare_env_daemon,
252 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
254 def _already_source_openrc(self):
255 """Check if openrc is sourced already"""
256 return all(os.environ.get(k) for k in ['OS_AUTH_URL',
261 def _prepare_env_daemon(self, task_id):
262 self._create_task(task_id)
265 self._create_directories()
267 rc_file = consts.OPENRC
269 LOG.info('Checkout Openrc Environment variable')
270 if not self._already_source_openrc():
271 LOG.info('Openrc variable not found in Environment')
272 if not os.path.exists(rc_file):
273 LOG.info('Openrc file not found')
274 installer_ip = os.environ.get('INSTALLER_IP',
276 installer_type = os.environ.get('INSTALLER_TYPE', 'compass')
277 LOG.info('Getting openrc file from %s', installer_type)
278 self._get_remote_rc_file(rc_file,
281 LOG.info('Source openrc file')
282 self._source_file(rc_file)
283 LOG.info('Appending external network')
284 self._append_external_network(rc_file)
285 LOG.info('Openrc file exist, source openrc file')
286 self._source_file(rc_file)
288 LOG.info('Cleaning images')
291 LOG.info('Loading images')
294 self._update_task_status(task_id)
296 except Exception as e:
297 self._update_task_error(task_id, str(e))
298 LOG.exception('Prepare env failed')
300 def _create_directories(self):
301 utils.makedirs(consts.CONF_DIR)
303 def _source_file(self, rc_file):
304 utils.source_env(rc_file)
306 def _get_remote_rc_file(self, rc_file, installer_ip, installer_type):
308 os_fetch_script = os.path.join(consts.RELENG_DIR, consts.FETCH_SCRIPT)
311 cmd = [os_fetch_script, '-d', rc_file, '-i', installer_type,
313 p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
316 if p.returncode != 0:
317 LOG.error('Failed to fetch credentials from installer')
319 if e.errno != errno.EEXIST:
322 def _append_external_network(self, rc_file):
323 neutron_client = openstack_utils.get_neutron_client()
324 networks = neutron_client.list_networks()['networks']
326 ext_network = next(n['name']
327 for n in networks if n['router:external'])
328 except StopIteration:
329 LOG.warning("Can't find external network")
331 cmd = 'export EXTERNAL_NETWORK=%s' % ext_network
333 with open(rc_file, 'a') as f:
336 if e.errno != errno.EEXIST:
339 def _clean_images(self):
340 cmd = [consts.CLEAN_IMAGES_SCRIPT]
341 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
342 output = p.communicate()[0]
345 def _load_images(self):
346 cmd = [consts.LOAD_IMAGES_SCRIPT]
347 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
348 output = p.communicate()[0]
351 def _create_task(self, task_id):
352 async_handler.insert({'status': 0, 'task_id': task_id})
354 def _update_task_status(self, task_id):
355 async_handler.update_attr(task_id, {'status': 1})
357 def _update_task_error(self, task_id, error):
358 async_handler.update_attr(task_id, {'status': 2, 'error': error})
360 def update_openrc(self, args):
362 openrc_vars = args['openrc']
364 return result_handler(consts.API_ERROR, 'openrc must be provided')
366 if not isinstance(openrc_vars, collections.Mapping):
367 return result_handler(consts.API_ERROR, 'args should be a dict')
369 lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
370 LOG.debug('Writing: %s', ''.join(lines))
372 LOG.info('Writing openrc: Writing')
373 utils.makedirs(consts.CONF_DIR)
375 with open(consts.OPENRC, 'w') as f:
377 LOG.info('Writing openrc: Done')
379 LOG.info('Source openrc: Sourcing')
381 self._source_file(consts.OPENRC)
382 except Exception as e:
383 LOG.exception('Failed to source openrc')
384 return result_handler(consts.API_ERROR, str(e))
385 LOG.info('Source openrc: Done')
387 return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars})
389 def upload_pod_file(self, args):
391 pod_file = args['file']
393 return result_handler(consts.API_ERROR, 'file must be provided')
395 LOG.info('Checking file')
396 data = yaml.safe_load(pod_file.read())
397 if not isinstance(data, collections.Mapping):
398 return result_handler(consts.API_ERROR, 'invalid yaml file')
400 LOG.info('Writing file')
401 with open(consts.POD_FILE, 'w') as f:
402 yaml.dump(data, f, default_flow_style=False)
403 LOG.info('Writing finished')
405 return result_handler(consts.API_SUCCESS, {'pod_info': data})
407 def update_pod_file(self, args):
409 pod_dic = args['pod']
411 return result_handler(consts.API_ERROR, 'pod must be provided')
413 if not isinstance(pod_dic, collections.Mapping):
414 return result_handler(consts.API_ERROR, 'pod should be a dict')
416 LOG.info('Writing file')
417 with open(consts.POD_FILE, 'w') as f:
418 yaml.dump(pod_dic, f, default_flow_style=False)
419 LOG.info('Writing finished')
421 return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic})
423 def update_hosts(self, hosts_ip):
424 if not isinstance(hosts_ip, collections.Mapping):
425 return result_handler(consts.API_ERROR, 'args should be a dict')
426 LOG.info('Writing hosts: Writing')
427 LOG.debug('Writing: %s', hosts_ip)
428 cmd = ["sudo", "python", "write_hosts.py"]
429 p = subprocess.Popen(cmd,
430 stdin=subprocess.PIPE,
431 stdout=subprocess.PIPE,
432 stderr=subprocess.PIPE,
433 cwd=os.path.join(consts.REPOS_DIR,
435 _, err = p.communicate(jsonutils.dumps(hosts_ip))
436 if p.returncode != 0:
437 return result_handler(consts.API_ERROR, err)
438 LOG.info('Writing hosts: Done')
439 return result_handler(consts.API_SUCCESS, 'success')