1 ##############################################################################
2 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
22 from six.moves import configparser
23 from oslo_serialization import jsonutils
24 from docker import Client
26 from api.database.v1.handlers import AsyncTaskHandler
27 from api.utils import influx
28 from api import ApiResource
29 from yardstick.common import constants as consts
30 from yardstick.common import utils
31 from yardstick.common.utils import result_handler
32 from yardstick.common import openstack_utils
33 from yardstick.common.httpClient import HttpClient
36 LOG = logging.getLogger(__name__)
37 LOG.setLevel(logging.DEBUG)
39 async_handler = AsyncTaskHandler()
42 class V1Env(ApiResource):
45 return self._dispatch_post()
47 def create_grafana(self, args):
48 task_id = str(uuid.uuid4())
50 thread = threading.Thread(target=self._create_grafana, args=(task_id,))
53 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
55 def _create_grafana(self, task_id):
56 self._create_task(task_id)
58 client = Client(base_url=consts.DOCKER_URL)
61 LOG.info('Checking if grafana image exist')
62 image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
63 if not self._check_image_exist(client, image):
64 LOG.info('Grafana image not exist, start pulling')
65 client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
67 LOG.info('Createing grafana container')
68 self._create_grafana_container(client)
69 LOG.info('Grafana container is created')
73 LOG.info('Creating data source for grafana')
74 self._create_data_source()
76 LOG.info('Creating dashboard for grafana')
77 self._create_dashboard()
79 self._update_task_status(task_id)
81 except Exception as e:
82 self._update_task_error(task_id, str(e))
83 LOG.exception('Create grafana failed')
85 def _create_dashboard(self):
86 url = 'http://admin:admin@%s:3000/api/dashboards/db' % consts.GRAFANA_IP
87 path = os.path.join(consts.REPOS_DIR, 'dashboard', '*dashboard.json')
89 for i in sorted(glob.iglob(path)):
91 data = jsonutils.load(f)
93 HttpClient().post(url, data)
95 LOG.exception('Create dashboard %s failed', i)
98 def _create_data_source(self):
99 url = 'http://admin:admin@%s:3000/api/datasources' % consts.GRAFANA_IP
104 "url": "http://%s:8086" % consts.INFLUXDB_IP,
107 "database": "yardstick",
109 "basicAuthUser": "admin",
110 "basicAuthPassword": "admin",
114 HttpClient().post(url, data)
116 LOG.exception('Create datasources failed')
119 def _create_grafana_container(self, client):
121 port_bindings = {k: k for k in ports}
122 restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
123 host_config = client.create_host_config(port_bindings=port_bindings,
124 restart_policy=restart_policy)
126 LOG.info('Creating container')
127 container = client.create_container(image='%s:%s' %
128 (consts.GRAFANA_IMAGE,
133 host_config=host_config)
134 LOG.info('Starting container')
135 client.start(container)
137 def _check_image_exist(self, client, t):
138 return any(t in a['RepoTags'][0]
139 for a in client.images() if a['RepoTags'])
141 def create_influxdb(self, args):
142 task_id = str(uuid.uuid4())
144 thread = threading.Thread(target=self._create_influxdb, args=(task_id,))
147 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
149 def _create_influxdb(self, task_id):
150 self._create_task(task_id)
152 client = Client(base_url=consts.DOCKER_URL)
155 LOG.info('Changing output to influxdb')
156 self._change_output_to_influxdb()
158 LOG.info('Checking if influxdb image exist')
159 if not self._check_image_exist(client, '%s:%s' %
160 (consts.INFLUXDB_IMAGE,
161 consts.INFLUXDB_TAG)):
162 LOG.info('Influxdb image not exist, start pulling')
163 client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG)
165 LOG.info('Createing influxdb container')
166 self._create_influxdb_container(client)
167 LOG.info('Influxdb container is created')
171 LOG.info('Config influxdb')
172 self._config_influxdb()
174 self._update_task_status(task_id)
177 except Exception as e:
178 self._update_task_error(task_id, str(e))
179 LOG.exception('Creating influxdb failed')
181 def _create_influxdb_container(self, client):
184 port_bindings = {k: k for k in ports}
185 restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
186 host_config = client.create_host_config(port_bindings=port_bindings,
187 restart_policy=restart_policy)
189 LOG.info('Creating container')
190 container = client.create_container(image='%s:%s' %
191 (consts.INFLUXDB_IMAGE,
192 consts.INFLUXDB_TAG),
196 host_config=host_config)
197 LOG.info('Starting container')
198 client.start(container)
200 def _config_influxdb(self):
202 client = influx.get_data_db_client()
203 client.create_user(consts.INFLUXDB_USER,
204 consts.INFLUXDB_PASS,
205 consts.INFLUXDB_DB_NAME)
206 client.create_database(consts.INFLUXDB_DB_NAME)
207 LOG.info('Success to config influxDB')
209 LOG.exception('Config influxdb failed')
211 def _change_output_to_influxdb(self):
212 utils.makedirs(consts.CONF_DIR)
214 parser = configparser.ConfigParser()
215 LOG.info('Reading output sample configuration')
216 parser.read(consts.CONF_SAMPLE_FILE)
218 LOG.info('Set dispatcher to influxdb')
219 parser.set('DEFAULT', 'dispatcher', 'influxdb')
220 parser.set('dispatcher_influxdb', 'target',
221 'http://%s:8086' % consts.INFLUXDB_IP)
223 LOG.info('Writing to %s', consts.CONF_FILE)
224 with open(consts.CONF_FILE, 'w') as f:
227 def prepare_env(self, args):
228 task_id = str(uuid.uuid4())
230 thread = threading.Thread(target=self._prepare_env_daemon,
234 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
236 def _already_source_openrc(self):
237 """Check if openrc is sourced already"""
238 return all(os.environ.get(k) for k in ['OS_AUTH_URL',
243 def _prepare_env_daemon(self, task_id):
244 self._create_task(task_id)
247 self._create_directories()
249 rc_file = consts.OPENRC
251 LOG.info('Checkout Openrc Environment variable')
252 if not self._already_source_openrc():
253 LOG.info('Openrc variable not found in Environment')
254 if not os.path.exists(rc_file):
255 LOG.info('Openrc file not found')
256 installer_ip = os.environ.get('INSTALLER_IP',
258 installer_type = os.environ.get('INSTALLER_TYPE', 'compass')
259 LOG.info('Getting openrc file from %s', installer_type)
260 self._get_remote_rc_file(rc_file,
263 LOG.info('Source openrc file')
264 self._source_file(rc_file)
265 LOG.info('Appending external network')
266 self._append_external_network(rc_file)
267 LOG.info('Openrc file exist, source openrc file')
268 self._source_file(rc_file)
270 LOG.info('Cleaning images')
273 LOG.info('Loading images')
276 self._update_task_status(task_id)
278 except Exception as e:
279 self._update_task_error(task_id, str(e))
280 LOG.exception('Prepare env failed')
282 def _create_directories(self):
283 utils.makedirs(consts.CONF_DIR)
285 def _source_file(self, rc_file):
286 utils.source_env(rc_file)
288 def _get_remote_rc_file(self, rc_file, installer_ip, installer_type):
290 os_fetch_script = os.path.join(consts.RELENG_DIR, consts.FETCH_SCRIPT)
293 cmd = [os_fetch_script, '-d', rc_file, '-i', installer_type,
295 p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
298 if p.returncode != 0:
299 LOG.error('Failed to fetch credentials from installer')
301 if e.errno != errno.EEXIST:
304 def _append_external_network(self, rc_file):
305 neutron_client = openstack_utils.get_neutron_client()
306 networks = neutron_client.list_networks()['networks']
308 ext_network = next(n['name']
309 for n in networks if n['router:external'])
310 except StopIteration:
311 LOG.warning("Can't find external network")
313 cmd = 'export EXTERNAL_NETWORK=%s' % ext_network
315 with open(rc_file, 'a') as f:
318 if e.errno != errno.EEXIST:
321 def _clean_images(self):
322 cmd = [consts.CLEAN_IMAGES_SCRIPT]
323 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
324 output = p.communicate()[0]
327 def _load_images(self):
328 cmd = [consts.LOAD_IMAGES_SCRIPT]
329 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
330 output = p.communicate()[0]
333 def _create_task(self, task_id):
334 async_handler.insert({'status': 0, 'task_id': task_id})
336 def _update_task_status(self, task_id):
337 async_handler.update_attr(task_id, {'status': 1})
339 def _update_task_error(self, task_id, error):
340 async_handler.update_attr(task_id, {'status': 2, 'error': error})
342 def update_openrc(self, args):
344 openrc_vars = args['openrc']
346 return result_handler(consts.API_ERROR, 'openrc must be provided')
348 if not isinstance(openrc_vars, collections.Mapping):
349 return result_handler(consts.API_ERROR, 'args should be a dict')
351 lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
352 LOG.debug('Writing: %s', ''.join(lines))
354 LOG.info('Writing openrc: Writing')
355 utils.makedirs(consts.CONF_DIR)
357 with open(consts.OPENRC, 'w') as f:
359 LOG.info('Writing openrc: Done')
361 LOG.info('Source openrc: Sourcing')
363 self._source_file(consts.OPENRC)
364 except Exception as e:
365 LOG.exception('Failed to source openrc')
366 return result_handler(consts.API_ERROR, str(e))
367 LOG.info('Source openrc: Done')
369 return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars})
371 def upload_pod_file(self, args):
373 pod_file = args['file']
375 return result_handler(consts.API_ERROR, 'file must be provided')
377 LOG.info('Checking file')
378 data = yaml.load(pod_file.read())
379 if not isinstance(data, collections.Mapping):
380 return result_handler(consts.API_ERROR, 'invalid yaml file')
382 LOG.info('Writing file')
383 with open(consts.POD_FILE, 'w') as f:
384 yaml.dump(data, f, default_flow_style=False)
385 LOG.info('Writing finished')
387 return result_handler(consts.API_SUCCESS, {'pod_info': data})
389 def update_pod_file(self, args):
391 pod_dic = args['pod']
393 return result_handler(consts.API_ERROR, 'pod must be provided')
395 if not isinstance(pod_dic, collections.Mapping):
396 return result_handler(consts.API_ERROR, 'pod should be a dict')
398 LOG.info('Writing file')
399 with open(consts.POD_FILE, 'w') as f:
400 yaml.dump(pod_dic, f, default_flow_style=False)
401 LOG.info('Writing finished')
403 return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic})
405 def update_hosts(self, hosts_ip):
406 if not isinstance(hosts_ip, collections.Mapping):
407 return result_handler(consts.API_ERROR, 'args should be a dict')
408 LOG.info('Writing hosts: Writing')
409 LOG.debug('Writing: %s', hosts_ip)
410 cmd = ["sudo", "python", "write_hosts.py"]
411 p = subprocess.Popen(cmd,
412 stdin=subprocess.PIPE,
413 stdout=subprocess.PIPE,
414 stderr=subprocess.PIPE,
415 cwd=os.path.join(consts.REPOS_DIR,
417 _, err = p.communicate(jsonutils.dumps(hosts_ip))
418 if p.returncode != 0:
419 return result_handler(consts.API_ERROR, err)
420 LOG.info('Writing hosts: Done')
421 return result_handler(consts.API_SUCCESS, 'success')