1 ##############################################################################
2 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
22 from six.moves import configparser
23 from oslo_serialization import jsonutils
24 from docker import Client
26 from api.database.v1.handlers import AsyncTaskHandler
27 from api.utils import influx
28 from api import ApiResource
29 from yardstick.common import constants as consts
30 from yardstick.common import utils
31 from yardstick.common.utils import result_handler
32 from yardstick.common import openstack_utils
33 from yardstick.common.httpClient import HttpClient
34 from yardstick.common.yaml_loader import yaml_load
36 LOG = logging.getLogger(__name__)
37 LOG.setLevel(logging.DEBUG)
39 async_handler = AsyncTaskHandler()
42 class V1Env(ApiResource):
45 return self._dispatch_post()
47 def create_grafana(self, args):
48 task_id = str(uuid.uuid4())
50 thread = threading.Thread(target=self._create_grafana, args=(task_id,))
53 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
55 def _create_grafana(self, task_id):
56 self._create_task(task_id)
58 client = Client(base_url=consts.DOCKER_URL)
61 LOG.info('Checking if grafana image exist')
62 image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
63 if not self._check_image_exist(client, image):
64 LOG.info('Grafana image not exist, start pulling')
65 client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
67 LOG.info('Createing grafana container')
68 container = self._create_grafana_container(client)
69 LOG.info('Grafana container is created')
73 container = client.inspect_container(container['Id'])
74 ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
75 LOG.debug('container ip is: %s', ip)
77 LOG.info('Creating data source for grafana')
78 self._create_data_source(ip)
80 LOG.info('Creating dashboard for grafana')
81 self._create_dashboard(ip)
83 self._update_task_status(task_id)
85 except Exception as e:
86 self._update_task_error(task_id, str(e))
87 LOG.exception('Create grafana failed')
89 def _create_dashboard(self, ip):
90 url = 'http://admin:admin@{}:{}/api/dashboards/db'.format(ip, consts.GRAFANA_PORT)
91 path = os.path.join(consts.REPOS_DIR, 'dashboard', 'opnfv_yardstick_tc*.json')
93 for i in sorted(glob.iglob(path)):
95 data = jsonutils.load(f)
97 HttpClient().post(url, {"dashboard": data})
99 LOG.exception('Create dashboard %s failed', i)
102 def _create_data_source(self, ip):
103 url = 'http://admin:admin@{}:{}/api/datasources'.format(ip, consts.GRAFANA_PORT)
104 influx_conf = utils.parse_ini_file(consts.CONF_FILE).get('dispatcher_influxdb', {})
110 "url": influx_conf.get('target', ''),
111 "password": influx_conf.get('password', ''),
112 "user": influx_conf.get('username', ''),
113 "database": "yardstick",
115 "basicAuthUser": "admin",
116 "basicAuthPassword": "admin",
120 HttpClient().post(url, data, timeout=10)
122 LOG.exception('Create datasources failed')
125 def _create_grafana_container(self, client):
126 ports = [consts.GRAFANA_PORT]
127 port_bindings = {consts.GRAFANA_PORT: consts.GRAFANA_MAPPING_PORT}
128 restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
129 host_config = client.create_host_config(port_bindings=port_bindings,
130 restart_policy=restart_policy)
132 LOG.info('Creating container')
133 container = client.create_container(image='%s:%s' %
134 (consts.GRAFANA_IMAGE,
139 host_config=host_config)
140 LOG.info('Starting container')
141 client.start(container)
144 def _check_image_exist(self, client, t):
145 return any(t in a['RepoTags'][0]
146 for a in client.images() if a['RepoTags'])
148 def create_influxdb(self, args):
149 task_id = str(uuid.uuid4())
151 thread = threading.Thread(target=self._create_influxdb, args=(task_id,))
154 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
156 def _create_influxdb(self, task_id):
157 self._create_task(task_id)
159 client = Client(base_url=consts.DOCKER_URL)
162 LOG.info('Checking if influxdb image exist')
163 if not self._check_image_exist(client, '%s:%s' %
164 (consts.INFLUXDB_IMAGE,
165 consts.INFLUXDB_TAG)):
166 LOG.info('Influxdb image not exist, start pulling')
167 client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG)
169 LOG.info('Createing influxdb container')
170 container = self._create_influxdb_container(client)
171 LOG.info('Influxdb container is created')
175 container = client.inspect_container(container['Id'])
176 ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
177 LOG.debug('container ip is: %s', ip)
179 LOG.info('Changing output to influxdb')
180 self._change_output_to_influxdb(ip)
182 LOG.info('Config influxdb')
183 self._config_influxdb()
185 self._update_task_status(task_id)
188 except Exception as e:
189 self._update_task_error(task_id, str(e))
190 LOG.exception('Creating influxdb failed')
192 def _create_influxdb_container(self, client):
194 ports = [consts.INFLUXDB_DASHBOARD_PORT, consts.INFLUXDB_PORT]
195 port_bindings = {k: k for k in ports}
196 restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
197 host_config = client.create_host_config(port_bindings=port_bindings,
198 restart_policy=restart_policy)
200 LOG.info('Creating container')
201 container = client.create_container(image='%s:%s' %
202 (consts.INFLUXDB_IMAGE,
203 consts.INFLUXDB_TAG),
207 host_config=host_config)
208 LOG.info('Starting container')
209 client.start(container)
212 def _config_influxdb(self):
214 client = influx.get_data_db_client()
215 client.create_user(consts.INFLUXDB_USER,
216 consts.INFLUXDB_PASS,
217 consts.INFLUXDB_DB_NAME)
218 client.create_database(consts.INFLUXDB_DB_NAME)
219 LOG.info('Success to config influxDB')
221 LOG.exception('Config influxdb failed')
223 def _change_output_to_influxdb(self, ip):
224 utils.makedirs(consts.CONF_DIR)
226 parser = configparser.ConfigParser()
227 LOG.info('Reading output sample configuration')
228 parser.read(consts.CONF_SAMPLE_FILE)
230 LOG.info('Set dispatcher to influxdb')
231 parser.set('DEFAULT', 'dispatcher', 'influxdb')
232 parser.set('dispatcher_influxdb', 'target',
233 'http://{}:{}'.format(ip, consts.INFLUXDB_PORT))
235 LOG.info('Writing to %s', consts.CONF_FILE)
236 with open(consts.CONF_FILE, 'w') as f:
239 def prepare_env(self, args):
240 task_id = str(uuid.uuid4())
242 thread = threading.Thread(target=self._prepare_env_daemon,
246 return result_handler(consts.API_SUCCESS, {'task_id': task_id})
248 def _already_source_openrc(self):
249 """Check if openrc is sourced already"""
250 return all(os.environ.get(k) for k in ['OS_AUTH_URL',
255 def _prepare_env_daemon(self, task_id):
256 self._create_task(task_id)
259 self._create_directories()
261 rc_file = consts.OPENRC
263 LOG.info('Checkout Openrc Environment variable')
264 if not self._already_source_openrc():
265 LOG.info('Openrc variable not found in Environment')
266 if not os.path.exists(rc_file):
267 LOG.info('Openrc file not found')
268 installer_ip = os.environ.get('INSTALLER_IP',
270 installer_type = os.environ.get('INSTALLER_TYPE', 'compass')
271 LOG.info('Getting openrc file from %s', installer_type)
272 self._get_remote_rc_file(rc_file,
275 LOG.info('Source openrc file')
276 self._source_file(rc_file)
277 LOG.info('Appending external network')
278 self._append_external_network(rc_file)
279 LOG.info('Openrc file exist, source openrc file')
280 self._source_file(rc_file)
282 LOG.info('Cleaning images')
285 LOG.info('Loading images')
288 self._update_task_status(task_id)
290 except Exception as e:
291 self._update_task_error(task_id, str(e))
292 LOG.exception('Prepare env failed')
294 def _create_directories(self):
295 utils.makedirs(consts.CONF_DIR)
297 def _source_file(self, rc_file):
298 utils.source_env(rc_file)
300 def _get_remote_rc_file(self, rc_file, installer_ip, installer_type):
302 os_fetch_script = os.path.join(consts.RELENG_DIR, consts.FETCH_SCRIPT)
305 cmd = [os_fetch_script, '-d', rc_file, '-i', installer_type,
307 p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
310 if p.returncode != 0:
311 LOG.error('Failed to fetch credentials from installer')
313 if e.errno != errno.EEXIST:
316 def _append_external_network(self, rc_file):
317 neutron_client = openstack_utils.get_neutron_client()
318 networks = neutron_client.list_networks()['networks']
320 ext_network = next(n['name']
321 for n in networks if n['router:external'])
322 except StopIteration:
323 LOG.warning("Can't find external network")
325 cmd = 'export EXTERNAL_NETWORK=%s' % ext_network
327 with open(rc_file, 'a') as f:
330 if e.errno != errno.EEXIST:
333 def _clean_images(self):
334 cmd = [consts.CLEAN_IMAGES_SCRIPT]
335 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
336 output = p.communicate()[0]
339 def _load_images(self):
340 cmd = [consts.LOAD_IMAGES_SCRIPT]
341 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
342 output = p.communicate()[0]
345 def _create_task(self, task_id):
346 async_handler.insert({'status': 0, 'task_id': task_id})
348 def _update_task_status(self, task_id):
349 async_handler.update_attr(task_id, {'status': 1})
351 def _update_task_error(self, task_id, error):
352 async_handler.update_attr(task_id, {'status': 2, 'error': error})
354 def update_openrc(self, args):
356 openrc_vars = args['openrc']
358 return result_handler(consts.API_ERROR, 'openrc must be provided')
360 if not isinstance(openrc_vars, collections.Mapping):
361 return result_handler(consts.API_ERROR, 'args should be a dict')
363 lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
364 LOG.debug('Writing: %s', ''.join(lines))
366 LOG.info('Writing openrc: Writing')
367 utils.makedirs(consts.CONF_DIR)
369 with open(consts.OPENRC, 'w') as f:
371 LOG.info('Writing openrc: Done')
373 LOG.info('Source openrc: Sourcing')
375 self._source_file(consts.OPENRC)
376 except Exception as e:
377 LOG.exception('Failed to source openrc')
378 return result_handler(consts.API_ERROR, str(e))
379 LOG.info('Source openrc: Done')
381 return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars})
383 def upload_pod_file(self, args):
385 pod_file = args['file']
387 return result_handler(consts.API_ERROR, 'file must be provided')
389 LOG.info('Checking file')
390 data = yaml_load(pod_file.read())
391 if not isinstance(data, collections.Mapping):
392 return result_handler(consts.API_ERROR, 'invalid yaml file')
394 LOG.info('Writing file')
395 with open(consts.POD_FILE, 'w') as f:
396 yaml.dump(data, f, default_flow_style=False)
397 LOG.info('Writing finished')
399 return result_handler(consts.API_SUCCESS, {'pod_info': data})
401 def update_pod_file(self, args):
403 pod_dic = args['pod']
405 return result_handler(consts.API_ERROR, 'pod must be provided')
407 if not isinstance(pod_dic, collections.Mapping):
408 return result_handler(consts.API_ERROR, 'pod should be a dict')
410 LOG.info('Writing file')
411 with open(consts.POD_FILE, 'w') as f:
412 yaml.dump(pod_dic, f, default_flow_style=False)
413 LOG.info('Writing finished')
415 return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic})
417 def update_hosts(self, hosts_ip):
418 if not isinstance(hosts_ip, collections.Mapping):
419 return result_handler(consts.API_ERROR, 'args should be a dict')
420 LOG.info('Writing hosts: Writing')
421 LOG.debug('Writing: %s', hosts_ip)
422 cmd = ["sudo", "python", "write_hosts.py"]
423 p = subprocess.Popen(cmd,
424 stdin=subprocess.PIPE,
425 stdout=subprocess.PIPE,
426 stderr=subprocess.PIPE,
427 cwd=os.path.join(consts.REPOS_DIR,
429 _, err = p.communicate(jsonutils.dumps(hosts_ip))
430 if p.returncode != 0:
431 return result_handler(consts.API_ERROR, err)
432 LOG.info('Writing hosts: Done')
433 return result_handler(consts.API_SUCCESS, 'success')