1 ##############################################################################
2 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
22 from six.moves import configparser
23 from oslo_serialization import jsonutils
24 from docker import Client
25 from docker.errors import APIError
26 from requests.exceptions import HTTPError
28 from api.database.v1.handlers import AsyncTaskHandler
29 from api.utils import influx
30 from api import ApiResource
31 from yardstick.common import constants as consts
32 from yardstick.common import utils
33 from yardstick.common.utils import result_handler
34 from yardstick.common import openstack_utils
35 from yardstick.common.httpClient import HttpClient
36 from yardstick.common.yaml_loader import yaml_load
38 LOG = logging.getLogger(__name__)
39 LOG.setLevel(logging.DEBUG)
41 async_handler = AsyncTaskHandler()
44 class V1Env(ApiResource):
47 return self._dispatch_post()
49 def create_grafana(self, *args):
50 task_id = str(uuid.uuid4())
52 thread = threading.Thread(target=self._create_grafana, args=(task_id,))
55 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
57 def _create_grafana(self, task_id):
58 self._create_task(task_id)
60 client = Client(base_url=consts.DOCKER_URL)
63 LOG.info('Checking if grafana image exist')
64 image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
65 if not self._check_image_exist(client, image):
66 LOG.info('Grafana image not exist, start pulling')
67 client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
69 LOG.info('Createing grafana container')
70 container = self._create_grafana_container(client)
71 LOG.info('Grafana container is created')
75 container = client.inspect_container(container['Id'])
76 ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
77 LOG.debug('container ip is: %s', ip)
79 LOG.info('Creating data source for grafana')
80 self._create_data_source(ip)
82 LOG.info('Creating dashboard for grafana')
83 self._create_dashboard(ip)
85 self._update_task_status(task_id)
87 except (APIError, HTTPError) as e:
88 self._update_task_error(task_id, str(e))
89 LOG.exception('Create grafana failed')
91 def _create_dashboard(self, ip):
92 url = 'http://admin:admin@{}:{}/api/dashboards/db'.format(ip, consts.GRAFANA_PORT)
93 path = os.path.join(consts.REPOS_DIR, 'dashboard', 'opnfv_yardstick_tc*.json')
95 for i in sorted(glob.iglob(path)):
97 data = jsonutils.load(f)
99 HttpClient().post(url, {"dashboard": data})
101 LOG.exception('Create dashboard %s failed', i)
104 def _create_data_source(self, ip):
105 url = 'http://admin:admin@{}:{}/api/datasources'.format(ip, consts.GRAFANA_PORT)
106 influx_conf = utils.parse_ini_file(consts.CONF_FILE).get('dispatcher_influxdb', {})
112 "url": influx_conf.get('target', ''),
113 "password": influx_conf.get('password', ''),
114 "user": influx_conf.get('username', ''),
115 "database": "yardstick",
117 "basicAuthUser": "admin",
118 "basicAuthPassword": "admin",
122 HttpClient().post(url, data, timeout=10)
124 LOG.exception('Create datasources failed')
127 def _create_grafana_container(self, client):
128 ports = [consts.GRAFANA_PORT]
129 port_bindings = {consts.GRAFANA_PORT: consts.GRAFANA_MAPPING_PORT}
130 restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
131 host_config = client.create_host_config(port_bindings=port_bindings,
132 restart_policy=restart_policy)
134 LOG.info('Creating container')
135 container = client.create_container(image='%s:%s' %
136 (consts.GRAFANA_IMAGE,
141 host_config=host_config)
142 LOG.info('Starting container')
143 client.start(container)
146 def _check_image_exist(self, client, t):
147 return any(t in a['RepoTags'][0]
148 for a in client.images() if a['RepoTags'])
150 def create_influxdb(self, *args):
151 task_id = str(uuid.uuid4())
153 thread = threading.Thread(target=self._create_influxdb, args=(task_id,))
156 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
158 def _create_influxdb(self, task_id):
159 self._create_task(task_id)
161 client = Client(base_url=consts.DOCKER_URL)
164 LOG.info('Checking if influxdb image exist')
165 if not self._check_image_exist(client, '%s:%s' %
166 (consts.INFLUXDB_IMAGE,
167 consts.INFLUXDB_TAG)):
168 LOG.info('Influxdb image not exist, start pulling')
169 client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG)
171 LOG.info('Createing influxdb container')
172 container = self._create_influxdb_container(client)
173 LOG.info('Influxdb container is created')
177 container = client.inspect_container(container['Id'])
178 ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
179 LOG.debug('container ip is: %s', ip)
181 LOG.info('Changing output to influxdb')
182 self._change_output_to_influxdb(ip)
184 LOG.info('Config influxdb')
185 self._config_influxdb()
187 self._update_task_status(task_id)
190 except APIError as e:
191 self._update_task_error(task_id, str(e))
192 LOG.exception('Creating influxdb failed')
194 def _create_influxdb_container(self, client):
196 ports = [consts.INFLUXDB_DASHBOARD_PORT, consts.INFLUXDB_PORT]
197 port_bindings = {k: k for k in ports}
198 restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
199 host_config = client.create_host_config(port_bindings=port_bindings,
200 restart_policy=restart_policy)
202 LOG.info('Creating container')
203 container = client.create_container(image='%s:%s' %
204 (consts.INFLUXDB_IMAGE,
205 consts.INFLUXDB_TAG),
209 host_config=host_config)
210 LOG.info('Starting container')
211 client.start(container)
214 def _config_influxdb(self):
216 client = influx.get_data_db_client()
217 client.create_user(consts.INFLUXDB_USER,
218 consts.INFLUXDB_PASS,
219 consts.INFLUXDB_DB_NAME)
220 client.create_database(consts.INFLUXDB_DB_NAME)
221 LOG.info('Success to config influxDB')
223 LOG.exception('Config influxdb failed')
225 def _change_output_to_influxdb(self, ip):
226 utils.makedirs(consts.CONF_DIR)
228 parser = configparser.ConfigParser()
229 LOG.info('Reading output sample configuration')
230 parser.read(consts.CONF_SAMPLE_FILE)
232 LOG.info('Set dispatcher to influxdb')
233 parser.set('DEFAULT', 'dispatcher', 'influxdb')
234 parser.set('dispatcher_influxdb', 'target',
235 'http://{}:{}'.format(ip, consts.INFLUXDB_PORT))
237 LOG.info('Writing to %s', consts.CONF_FILE)
238 with open(consts.CONF_FILE, 'w') as f:
241 def prepare_env(self, *args):
242 task_id = str(uuid.uuid4())
244 thread = threading.Thread(target=self._prepare_env_daemon,
248 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
250 def _already_source_openrc(self):
251 """Check if openrc is sourced already"""
252 return all(os.environ.get(k) for k in ['OS_AUTH_URL',
257 def _prepare_env_daemon(self, task_id):
258 self._create_task(task_id)
261 self._create_directories()
263 rc_file = consts.OPENRC
265 LOG.info('Checkout Openrc Environment variable')
266 if not self._already_source_openrc():
267 LOG.info('Openrc variable not found in Environment')
268 if not os.path.exists(rc_file):
269 LOG.info('Openrc file not found')
270 installer_ip = os.environ.get('INSTALLER_IP',
272 installer_type = os.environ.get('INSTALLER_TYPE', 'compass')
273 LOG.info('Getting openrc file from %s', installer_type)
274 self._get_remote_rc_file(rc_file,
277 LOG.info('Source openrc file')
278 self._source_file(rc_file)
279 LOG.info('Appending external network')
280 self._append_external_network(rc_file)
281 LOG.info('Openrc file exist, source openrc file')
282 self._source_file(rc_file)
284 LOG.info('Cleaning images')
287 LOG.info('Loading images')
290 self._update_task_status(task_id)
292 except (subprocess.CalledProcessError, OSError) as e:
293 self._update_task_error(task_id, str(e))
294 LOG.exception('Prepare env failed')
296 def _create_directories(self):
297 utils.makedirs(consts.CONF_DIR)
299 def _source_file(self, rc_file):
300 utils.source_env(rc_file)
302 def _get_remote_rc_file(self, rc_file, installer_ip, installer_type):
304 os_fetch_script = os.path.join(consts.RELENG_DIR, consts.FETCH_SCRIPT)
307 cmd = [os_fetch_script, '-d', rc_file, '-i', installer_type,
309 p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
312 if p.returncode != 0:
313 LOG.error('Failed to fetch credentials from installer')
315 if e.errno != errno.EEXIST:
318 def _append_external_network(self, rc_file):
319 neutron_client = openstack_utils.get_neutron_client()
320 networks = neutron_client.list_networks()['networks']
322 ext_network = next(n['name']
323 for n in networks if n['router:external'])
324 except StopIteration:
325 LOG.warning("Can't find external network")
327 cmd = 'export EXTERNAL_NETWORK=%s' % ext_network
329 with open(rc_file, 'a') as f:
332 if e.errno != errno.EEXIST:
335 def _clean_images(self):
336 cmd = [consts.CLEAN_IMAGES_SCRIPT]
337 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
338 output = p.communicate()[0]
341 def _load_images(self):
342 cmd = [consts.LOAD_IMAGES_SCRIPT]
343 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
344 output = p.communicate()[0]
347 def _create_task(self, task_id):
348 async_handler.insert({'status': 0, 'task_id': task_id})
350 def _update_task_status(self, task_id):
351 async_handler.update_attr(task_id, {'status': 1})
353 def _update_task_error(self, task_id, error):
354 async_handler.update_attr(task_id, {'status': 2, 'error': error})
356 def update_openrc(self, args):
358 openrc_vars = args['openrc']
360 return result_handler(consts.API_ERROR, 'openrc must be provided')
362 if not isinstance(openrc_vars, collections.Mapping):
363 return result_handler(consts.API_ERROR, 'args should be a dict')
365 lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
366 LOG.debug('Writing: %s', ''.join(lines))
368 LOG.info('Writing openrc: Writing')
369 utils.makedirs(consts.CONF_DIR)
371 with open(consts.OPENRC, 'w') as f:
373 LOG.info('Writing openrc: Done')
375 LOG.info('Source openrc: Sourcing')
377 self._source_file(consts.OPENRC)
378 except subprocess.CalledProcessError as e:
379 LOG.exception('Failed to source openrc')
380 return result_handler(consts.API_ERROR, str(e))
381 LOG.info('Source openrc: Done')
383 return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars})
385 def upload_pod_file(self, args):
387 pod_file = args['file']
389 return result_handler(consts.API_ERROR, 'file must be provided')
391 LOG.info('Checking file')
392 data = yaml_load(pod_file.read())
393 if not isinstance(data, collections.Mapping):
394 return result_handler(consts.API_ERROR, 'invalid yaml file')
396 LOG.info('Writing file')
397 with open(consts.POD_FILE, 'w') as f:
398 yaml.dump(data, f, default_flow_style=False)
399 LOG.info('Writing finished')
401 return result_handler(consts.API_SUCCESS, {'pod_info': data})
403 def update_pod_file(self, args):
405 pod_dic = args['pod']
407 return result_handler(consts.API_ERROR, 'pod must be provided')
409 if not isinstance(pod_dic, collections.Mapping):
410 return result_handler(consts.API_ERROR, 'pod should be a dict')
412 LOG.info('Writing file')
413 with open(consts.POD_FILE, 'w') as f:
414 yaml.dump(pod_dic, f, default_flow_style=False)
415 LOG.info('Writing finished')
417 return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic})
419 def update_hosts(self, hosts_ip):
420 if not isinstance(hosts_ip, collections.Mapping):
421 return result_handler(consts.API_ERROR, 'args should be a dict')
422 LOG.info('Writing hosts: Writing')
423 LOG.debug('Writing: %s', hosts_ip)
424 cmd = ["sudo", "python", "write_hosts.py"]
425 p = subprocess.Popen(cmd,
426 stdin=subprocess.PIPE,
427 stdout=subprocess.PIPE,
428 stderr=subprocess.PIPE,
429 cwd=os.path.join(consts.REPOS_DIR,
431 _, err = p.communicate(jsonutils.dumps(hosts_ip))
432 if p.returncode != 0:
433 return result_handler(consts.API_ERROR, err)
434 LOG.info('Writing hosts: Done')
435 return result_handler(consts.API_SUCCESS, 'success')