1 ##############################################################################
2 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
21 from subprocess import PIPE
23 from six.moves import configparser
24 from oslo_serialization import jsonutils
25 from docker import Client
27 from api.database.v1.handlers import AsyncTaskHandler
28 from api.utils import influx
29 from api.utils.common import result_handler
30 from yardstick.common import constants as consts
31 from yardstick.common import utils as common_utils
32 from yardstick.common import openstack_utils
33 from yardstick.common.httpClient import HttpClient
36 LOG = logging.getLogger(__name__)
37 LOG.setLevel(logging.DEBUG)
39 async_handler = AsyncTaskHandler()
42 def create_grafana(args):
43 task_id = str(uuid.uuid4())
45 thread = threading.Thread(target=_create_grafana, args=(task_id,))
48 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
51 def _create_grafana(task_id):
54 client = Client(base_url=consts.DOCKER_URL)
57 LOG.info('Checking if grafana image exist')
58 image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
59 if not _check_image_exist(client, image):
60 LOG.info('Grafana image not exist, start pulling')
61 client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
63 LOG.info('Createing grafana container')
64 _create_grafana_container(client)
65 LOG.info('Grafana container is created')
69 LOG.info('Creating data source for grafana')
72 LOG.info('Creating dashboard for grafana')
75 _update_task_status(task_id)
77 except Exception as e:
78 _update_task_error(task_id, str(e))
79 LOG.exception('Create grafana failed')
82 def _create_dashboard():
83 url = 'http://admin:admin@%s:3000/api/dashboards/db' % consts.GRAFANA_IP
84 path = os.path.join(consts.REPOS_DIR, 'dashboard', '*dashboard.json')
86 for i in sorted(glob.iglob(path)):
88 data = jsonutils.load(f)
90 HttpClient().post(url, data)
92 LOG.exception('Create dashboard %s failed', i)
96 def _create_data_source():
97 url = 'http://admin:admin@%s:3000/api/datasources' % consts.GRAFANA_IP
102 "url": "http://%s:8086" % consts.INFLUXDB_IP,
105 "database": "yardstick",
107 "basicAuthUser": "admin",
108 "basicAuthPassword": "admin",
112 HttpClient().post(url, data)
114 LOG.exception('Create datasources failed')
118 def _create_grafana_container(client):
120 port_bindings = {k: k for k in ports}
121 restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
122 host_config = client.create_host_config(port_bindings=port_bindings,
123 restart_policy=restart_policy)
125 LOG.info('Creating container')
126 container = client.create_container(image='%s:%s' % (consts.GRAFANA_IMAGE,
131 host_config=host_config)
132 LOG.info('Starting container')
133 client.start(container)
136 def _check_image_exist(client, t):
137 return any(t in a['RepoTags'][0] for a in client.images() if a['RepoTags'])
140 def create_influxdb(args):
141 task_id = str(uuid.uuid4())
143 thread = threading.Thread(target=_create_influxdb, args=(task_id,))
146 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
149 def _create_influxdb(task_id):
150 _create_task(task_id)
152 client = Client(base_url=consts.DOCKER_URL)
155 LOG.info('Changing output to influxdb')
156 _change_output_to_influxdb()
158 LOG.info('Checking if influxdb image exist')
159 if not _check_image_exist(client, '%s:%s' % (consts.INFLUXDB_IMAGE,
160 consts.INFLUXDB_TAG)):
161 LOG.info('Influxdb image not exist, start pulling')
162 client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG)
164 LOG.info('Createing influxdb container')
165 _create_influxdb_container(client)
166 LOG.info('Influxdb container is created')
170 LOG.info('Config influxdb')
173 _update_task_status(task_id)
176 except Exception as e:
177 _update_task_error(task_id, str(e))
178 LOG.exception('Creating influxdb failed')
181 def _create_influxdb_container(client):
184 port_bindings = {k: k for k in ports}
185 restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
186 host_config = client.create_host_config(port_bindings=port_bindings,
187 restart_policy=restart_policy)
189 LOG.info('Creating container')
190 container = client.create_container(image='%s:%s' % (consts.INFLUXDB_IMAGE,
191 consts.INFLUXDB_TAG),
195 host_config=host_config)
196 LOG.info('Starting container')
197 client.start(container)
200 def _config_influxdb():
202 client = influx.get_data_db_client()
203 client.create_user(consts.INFLUXDB_USER,
204 consts.INFLUXDB_PASS,
205 consts.INFLUXDB_DB_NAME)
206 client.create_database(consts.INFLUXDB_DB_NAME)
207 LOG.info('Success to config influxDB')
209 LOG.exception('Config influxdb failed')
212 def _change_output_to_influxdb():
213 common_utils.makedirs(consts.CONF_DIR)
215 parser = configparser.ConfigParser()
216 LOG.info('Reading output sample configuration')
217 parser.read(consts.CONF_SAMPLE_FILE)
219 LOG.info('Set dispatcher to influxdb')
220 parser.set('DEFAULT', 'dispatcher', 'influxdb')
221 parser.set('dispatcher_influxdb', 'target',
222 'http://%s:8086' % consts.INFLUXDB_IP)
224 LOG.info('Writing to %s', consts.CONF_FILE)
225 with open(consts.CONF_FILE, 'w') as f:
229 def prepare_env(args):
230 task_id = str(uuid.uuid4())
232 thread = threading.Thread(target=_prepare_env_daemon, args=(task_id,))
235 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
238 def _already_source_openrc():
239 """Check if openrc is sourced already"""
240 return all(os.environ.get(k) for k in ['OS_AUTH_URL', 'OS_USERNAME',
241 'OS_PASSWORD', 'EXTERNAL_NETWORK'])
244 def _prepare_env_daemon(task_id):
245 _create_task(task_id)
248 _create_directories()
250 rc_file = consts.OPENRC
252 LOG.info('Checkout Openrc Environment variable')
253 if not _already_source_openrc():
254 LOG.info('Openrc variable not found in Environment')
255 if not os.path.exists(rc_file):
256 LOG.info('Openrc file not found')
257 installer_ip = os.environ.get('INSTALLER_IP', '192.168.200.2')
258 installer_type = os.environ.get('INSTALLER_TYPE', 'compass')
259 LOG.info('Getting openrc file from %s', installer_type)
260 _get_remote_rc_file(rc_file, installer_ip, installer_type)
261 LOG.info('Source openrc file')
262 _source_file(rc_file)
263 LOG.info('Appending external network')
264 _append_external_network(rc_file)
265 LOG.info('Openrc file exist, source openrc file')
266 _source_file(rc_file)
268 LOG.info('Cleaning images')
271 LOG.info('Loading images')
274 _update_task_status(task_id)
276 except Exception as e:
277 _update_task_error(task_id, str(e))
278 LOG.exception('Prepare env failed')
281 def _create_directories():
282 common_utils.makedirs(consts.CONF_DIR)
285 def _source_file(rc_file):
286 common_utils.source_env(rc_file)
289 def _get_remote_rc_file(rc_file, installer_ip, installer_type):
291 os_fetch_script = os.path.join(consts.RELENG_DIR, consts.FETCH_SCRIPT)
294 cmd = [os_fetch_script, '-d', rc_file, '-i', installer_type,
296 p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
299 if p.returncode != 0:
300 LOG.error('Failed to fetch credentials from installer')
302 if e.errno != errno.EEXIST:
306 def _append_external_network(rc_file):
307 neutron_client = openstack_utils.get_neutron_client()
308 networks = neutron_client.list_networks()['networks']
310 ext_network = next(n['name'] for n in networks if n['router:external'])
311 except StopIteration:
312 LOG.warning("Can't find external network")
314 cmd = 'export EXTERNAL_NETWORK=%s' % ext_network
316 with open(rc_file, 'a') as f:
319 if e.errno != errno.EEXIST:
324 cmd = [consts.CLEAN_IMAGES_SCRIPT]
325 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
326 output = p.communicate()[0]
331 cmd = [consts.LOAD_IMAGES_SCRIPT]
332 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
333 output = p.communicate()[0]
337 def _create_task(task_id):
338 async_handler.insert({'status': 0, 'task_id': task_id})
341 def _update_task_status(task_id):
342 async_handler.update_attr(task_id, {'status': 1})
345 def _update_task_error(task_id, error):
346 async_handler.update_attr(task_id, {'status': 2, 'error': error})
349 def update_openrc(args):
351 openrc_vars = args['openrc']
353 return result_handler(consts.API_ERROR, 'openrc must be provided')
355 if not isinstance(openrc_vars, collections.Mapping):
356 return result_handler(consts.API_ERROR, 'args should be a dict')
358 lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
359 LOG.debug('Writing: %s', ''.join(lines))
361 LOG.info('Writing openrc: Writing')
362 common_utils.makedirs(consts.CONF_DIR)
364 with open(consts.OPENRC, 'w') as f:
366 LOG.info('Writing openrc: Done')
368 LOG.info('Source openrc: Sourcing')
370 _source_file(consts.OPENRC)
371 except Exception as e:
372 LOG.exception('Failed to source openrc')
373 return result_handler(consts.API_ERROR, str(e))
374 LOG.info('Source openrc: Done')
376 return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars})
379 def upload_pod_file(args):
381 pod_file = args['file']
383 return result_handler(consts.API_ERROR, 'file must be provided')
385 LOG.info('Checking file')
386 data = yaml.load(pod_file.read())
387 if not isinstance(data, collections.Mapping):
388 return result_handler(consts.API_ERROR, 'invalid yaml file')
390 LOG.info('Writing file')
391 with open(consts.POD_FILE, 'w') as f:
392 yaml.dump(data, f, default_flow_style=False)
393 LOG.info('Writing finished')
395 return result_handler(consts.API_SUCCESS, {'pod_info': data})
398 def update_pod_file(args):
400 pod_dic = args['pod']
402 return result_handler(consts.API_ERROR, 'pod must be provided')
404 if not isinstance(pod_dic, collections.Mapping):
405 return result_handler(consts.API_ERROR, 'pod should be a dict')
407 LOG.info('Writing file')
408 with open(consts.POD_FILE, 'w') as f:
409 yaml.dump(pod_dic, f, default_flow_style=False)
410 LOG.info('Writing finished')
412 return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic})
415 def update_hosts(hosts_ip):
416 if not isinstance(hosts_ip, dict):
417 return result_handler(consts.API_ERROR, 'Error, args should be a dict')
418 LOG.info('Writing hosts: Writing')
419 LOG.debug('Writing: %s', hosts_ip)
420 cmd = ["sudo", "python", "write_hosts.py"]
421 p = subprocess.Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE,
422 cwd = os.path.join(consts.REPOS_DIR, "api/resources"))
423 _, err = p.communicate(jsonutils.dumps(hosts_ip))
424 if p.returncode != 0 :
425 return result_handler(consts.API_ERROR, err)
426 LOG.info('Writing hosts: Done')
427 return result_handler(consts.API_SUCCESS, 'success')