Add pod.yaml files for Apex
[yardstick.git] / api / resources / v1 / env.py
1 ##############################################################################
2 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
3 #
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
10
11 import errno
12 import logging
13 import os
14 import subprocess
15 import threading
16 import time
17 import uuid
18 import glob
19 import yaml
20 import collections
21
22 from six.moves import configparser
23 from oslo_serialization import jsonutils
24 from docker import Client
25 from docker.errors import APIError
26 from requests.exceptions import HTTPError
27
28 from api.database.v1.handlers import AsyncTaskHandler
29 from api.utils import influx
30 from api import ApiResource
31 from yardstick.common import constants as consts
32 from yardstick.common import utils
33 from yardstick.common.utils import result_handler
34 from yardstick.common import openstack_utils
35 from yardstick.common.httpClient import HttpClient
36 from yardstick.common.yaml_loader import yaml_load
37
38 LOG = logging.getLogger(__name__)
39 LOG.setLevel(logging.DEBUG)
40
41 async_handler = AsyncTaskHandler()
42
43
44 class V1Env(ApiResource):
45
46     def post(self):
47         return self._dispatch_post()
48
49     def create_grafana(self, *args):
50         task_id = str(uuid.uuid4())
51
52         thread = threading.Thread(target=self._create_grafana, args=(task_id,))
53         thread.start()
54
55         return result_handler(consts.API_SUCCESS, {'task_id': task_id})
56
57     def _create_grafana(self, task_id):
58         self._create_task(task_id)
59
60         client = Client(base_url=consts.DOCKER_URL)
61
62         try:
63             LOG.info('Checking if grafana image exist')
64             image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
65             if not self._check_image_exist(client, image):
66                 LOG.info('Grafana image not exist, start pulling')
67                 client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
68
69             LOG.info('Createing grafana container')
70             container = self._create_grafana_container(client)
71             LOG.info('Grafana container is created')
72
73             time.sleep(5)
74
75             container = client.inspect_container(container['Id'])
76             ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
77             LOG.debug('container ip is: %s', ip)
78
79             LOG.info('Creating data source for grafana')
80             self._create_data_source(ip)
81
82             LOG.info('Creating dashboard for grafana')
83             self._create_dashboard(ip)
84
85             self._update_task_status(task_id)
86             LOG.info('Finished')
87         except (APIError, HTTPError) as e:
88             self._update_task_error(task_id, str(e))
89             LOG.exception('Create grafana failed')
90
91     def _create_dashboard(self, ip):
92         url = 'http://admin:admin@{}:{}/api/dashboards/db'.format(ip, consts.GRAFANA_PORT)
93         path = os.path.join(consts.REPOS_DIR, 'dashboard', 'opnfv_yardstick_tc*.json')
94
95         for i in sorted(glob.iglob(path)):
96             with open(i) as f:
97                 data = jsonutils.load(f)
98             try:
99                 HttpClient().post(url, {"dashboard": data})
100             except Exception:
101                 LOG.exception('Create dashboard %s failed', i)
102                 raise
103
104     def _create_data_source(self, ip):
105         url = 'http://admin:admin@{}:{}/api/datasources'.format(ip, consts.GRAFANA_PORT)
106         influx_conf = utils.parse_ini_file(consts.CONF_FILE).get('dispatcher_influxdb', {})
107
108         data = {
109             "name": "yardstick",
110             "type": "influxdb",
111             "access": "proxy",
112             "url": influx_conf.get('target', ''),
113             "password": influx_conf.get('password', ''),
114             "user": influx_conf.get('username', ''),
115             "database": "yardstick",
116             "basicAuth": True,
117             "basicAuthUser": "admin",
118             "basicAuthPassword": "admin",
119             "isDefault": True,
120         }
121         try:
122             HttpClient().post(url, data, timeout=60)
123         except Exception:
124             LOG.exception('Create datasources failed')
125             raise
126
127     def _create_grafana_container(self, client):
128         ports = [consts.GRAFANA_PORT]
129         port_bindings = {consts.GRAFANA_PORT: consts.GRAFANA_MAPPING_PORT}
130         restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
131         host_config = client.create_host_config(port_bindings=port_bindings,
132                                                 restart_policy=restart_policy)
133
134         LOG.info('Creating container')
135         container = client.create_container(image='%s:%s' %
136                                             (consts.GRAFANA_IMAGE,
137                                              consts.GRAFANA_TAG),
138                                             ports=ports,
139                                             detach=True,
140                                             tty=True,
141                                             host_config=host_config)
142         LOG.info('Starting container')
143         client.start(container)
144         return container
145
146     def _check_image_exist(self, client, t):
147         return any(t in a['RepoTags'][0]
148                    for a in client.images() if a['RepoTags'])
149
150     def create_influxdb(self, *args):
151         task_id = str(uuid.uuid4())
152
153         thread = threading.Thread(target=self._create_influxdb, args=(task_id,))
154         thread.start()
155
156         return result_handler(consts.API_SUCCESS, {'task_id': task_id})
157
158     def _create_influxdb(self, task_id):
159         self._create_task(task_id)
160
161         client = Client(base_url=consts.DOCKER_URL)
162
163         try:
164             LOG.info('Checking if influxdb image exist')
165             if not self._check_image_exist(client, '%s:%s' %
166                                            (consts.INFLUXDB_IMAGE,
167                                             consts.INFLUXDB_TAG)):
168                 LOG.info('Influxdb image not exist, start pulling')
169                 client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG)
170
171             LOG.info('Createing influxdb container')
172             container = self._create_influxdb_container(client)
173             LOG.info('Influxdb container is created')
174
175             time.sleep(5)
176
177             container = client.inspect_container(container['Id'])
178             ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
179             LOG.debug('container ip is: %s', ip)
180
181             LOG.info('Changing output to influxdb')
182             self._change_output_to_influxdb(ip)
183
184             LOG.info('Config influxdb')
185             self._config_influxdb()
186
187             self._update_task_status(task_id)
188
189             LOG.info('Finished')
190         except APIError as e:
191             self._update_task_error(task_id, str(e))
192             LOG.exception('Creating influxdb failed')
193
194     def _create_influxdb_container(self, client):
195
196         ports = [consts.INFLUXDB_DASHBOARD_PORT, consts.INFLUXDB_PORT]
197         port_bindings = {k: k for k in ports}
198         restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
199         host_config = client.create_host_config(port_bindings=port_bindings,
200                                                 restart_policy=restart_policy)
201
202         LOG.info('Creating container')
203         container = client.create_container(image='%s:%s' %
204                                             (consts.INFLUXDB_IMAGE,
205                                              consts.INFLUXDB_TAG),
206                                             ports=ports,
207                                             detach=True,
208                                             tty=True,
209                                             host_config=host_config)
210         LOG.info('Starting container')
211         client.start(container)
212         return container
213
214     def _config_influxdb(self):
215         try:
216             client = influx.get_data_db_client()
217             client.create_user(consts.INFLUXDB_USER,
218                                consts.INFLUXDB_PASS,
219                                consts.INFLUXDB_DB_NAME)
220             client.create_database(consts.INFLUXDB_DB_NAME)
221             LOG.info('Success to config influxDB')
222         except HTTPError:
223             LOG.exception('Config influxdb failed')
224
225     def _change_output_to_influxdb(self, ip):
226         utils.makedirs(consts.CONF_DIR)
227
228         parser = configparser.ConfigParser()
229         LOG.info('Reading output sample configuration')
230         parser.read(consts.CONF_SAMPLE_FILE)
231
232         LOG.info('Set dispatcher to influxdb')
233         parser.set('DEFAULT', 'dispatcher', 'influxdb')
234         parser.set('dispatcher_influxdb', 'target',
235                    'http://{}:{}'.format(ip, consts.INFLUXDB_PORT))
236
237         LOG.info('Writing to %s', consts.CONF_FILE)
238         with open(consts.CONF_FILE, 'w') as f:
239             parser.write(f)
240
241     def prepare_env(self, *args):
242         task_id = str(uuid.uuid4())
243
244         thread = threading.Thread(target=self._prepare_env_daemon,
245                                   args=(task_id,))
246         thread.start()
247
248         return result_handler(consts.API_SUCCESS, {'task_id': task_id})
249
250     def _already_source_openrc(self):
251         """Check if openrc is sourced already"""
252         return all(os.environ.get(k) for k in ['OS_AUTH_URL',
253                                                'OS_USERNAME',
254                                                'OS_PASSWORD',
255                                                'EXTERNAL_NETWORK'])
256
257     def _prepare_env_daemon(self, task_id):
258         self._create_task(task_id)
259
260         try:
261             self._create_directories()
262
263             rc_file = consts.OPENRC
264
265             LOG.info('Checkout Openrc Environment variable')
266             if not self._already_source_openrc():
267                 LOG.info('Openrc variable not found in Environment')
268                 if not os.path.exists(rc_file):
269                     LOG.info('Openrc file not found')
270                     installer_ip = os.environ.get('INSTALLER_IP',
271                                                   '192.168.200.2')
272                     installer_type = os.environ.get('INSTALLER_TYPE', 'compass')
273                     LOG.info('Getting openrc file from %s', installer_type)
274                     self._get_remote_rc_file(rc_file,
275                                              installer_ip,
276                                              installer_type)
277                     LOG.info('Source openrc file')
278                     self._source_file(rc_file)
279                     LOG.info('Appending external network')
280                     self._append_external_network(rc_file)
281                 LOG.info('Openrc file exist, source openrc file')
282                 self._source_file(rc_file)
283
284             LOG.info('Cleaning images')
285             self._clean_images()
286
287             LOG.info('Loading images')
288             self._load_images()
289
290             self._update_task_status(task_id)
291             LOG.info('Finished')
292         except (subprocess.CalledProcessError, OSError) as e:
293             self._update_task_error(task_id, str(e))
294             LOG.exception('Prepare env failed')
295
296     def _create_directories(self):
297         utils.makedirs(consts.CONF_DIR)
298
299     def _source_file(self, rc_file):
300         utils.source_env(rc_file)
301
302     def _get_remote_rc_file(self, rc_file, installer_ip, installer_type):
303
304         os_fetch_script = os.path.join(consts.RELENG_DIR, consts.FETCH_SCRIPT)
305
306         try:
307             cmd = [os_fetch_script, '-d', rc_file, '-i', installer_type,
308                    '-a', installer_ip]
309             p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
310             p.communicate()
311
312             if p.returncode != 0:
313                 LOG.error('Failed to fetch credentials from installer')
314         except OSError as e:
315             if e.errno != errno.EEXIST:
316                 raise
317
318     def _append_external_network(self, rc_file):
319         neutron_client = openstack_utils.get_neutron_client()
320         networks = neutron_client.list_networks()['networks']
321         try:
322             ext_network = next(n['name']
323                                for n in networks if n['router:external'])
324         except StopIteration:
325             LOG.warning("Can't find external network")
326         else:
327             cmd = 'export EXTERNAL_NETWORK=%s' % ext_network
328             try:
329                 with open(rc_file, 'a') as f:
330                     f.write(cmd + '\n')
331             except OSError as e:
332                 if e.errno != errno.EEXIST:
333                     raise
334
335     def _clean_images(self):
336         cmd = [consts.CLEAN_IMAGES_SCRIPT]
337         p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
338         output = p.communicate()[0]
339         LOG.debug(output)
340
341     def _load_images(self):
342         cmd = [consts.LOAD_IMAGES_SCRIPT]
343         p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
344         output = p.communicate()[0]
345         LOG.debug(output)
346
347     def _create_task(self, task_id):
348         async_handler.insert({'status': 0, 'task_id': task_id})
349
350     def _update_task_status(self, task_id):
351         async_handler.update_attr(task_id, {'status': 1})
352
353     def _update_task_error(self, task_id, error):
354         async_handler.update_attr(task_id, {'status': 2, 'error': error})
355
356     def update_openrc(self, args):
357         try:
358             openrc_vars = args['openrc']
359         except KeyError:
360             return result_handler(consts.API_ERROR, 'openrc must be provided')
361         else:
362             if not isinstance(openrc_vars, collections.Mapping):
363                 return result_handler(consts.API_ERROR, 'args should be a dict')
364
365         lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
366         LOG.debug('Writing: %s', ''.join(lines))
367
368         LOG.info('Writing openrc: Writing')
369         utils.makedirs(consts.CONF_DIR)
370
371         with open(consts.OPENRC, 'w') as f:
372             f.writelines(lines)
373         LOG.info('Writing openrc: Done')
374
375         LOG.info('Source openrc: Sourcing')
376         try:
377             self._source_file(consts.OPENRC)
378         except subprocess.CalledProcessError as e:
379             LOG.exception('Failed to source openrc')
380             return result_handler(consts.API_ERROR, str(e))
381         LOG.info('Source openrc: Done')
382
383         return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars})
384
385     def upload_pod_file(self, args):
386         try:
387             pod_file = args['file']
388         except KeyError:
389             return result_handler(consts.API_ERROR, 'file must be provided')
390
391         LOG.info('Checking file')
392         data = yaml_load(pod_file.read())
393         if not isinstance(data, collections.Mapping):
394             return result_handler(consts.API_ERROR, 'invalid yaml file')
395
396         LOG.info('Writing file')
397         with open(consts.POD_FILE, 'w') as f:
398             yaml.dump(data, f, default_flow_style=False)
399         LOG.info('Writing finished')
400
401         return result_handler(consts.API_SUCCESS, {'pod_info': data})
402
403     def update_pod_file(self, args):
404         try:
405             pod_dic = args['pod']
406         except KeyError:
407             return result_handler(consts.API_ERROR, 'pod must be provided')
408         else:
409             if not isinstance(pod_dic, collections.Mapping):
410                 return result_handler(consts.API_ERROR, 'pod should be a dict')
411
412         LOG.info('Writing file')
413         with open(consts.POD_FILE, 'w') as f:
414             yaml.dump(pod_dic, f, default_flow_style=False)
415         LOG.info('Writing finished')
416
417         return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic})
418
419     def update_hosts(self, hosts_ip):
420         if not isinstance(hosts_ip, collections.Mapping):
421             return result_handler(consts.API_ERROR, 'args should be a dict')
422         LOG.info('Writing hosts: Writing')
423         LOG.debug('Writing: %s', hosts_ip)
424         cmd = ["sudo", "python", "write_hosts.py"]
425         p = subprocess.Popen(cmd,
426                              stdin=subprocess.PIPE,
427                              stdout=subprocess.PIPE,
428                              stderr=subprocess.PIPE,
429                              cwd=os.path.join(consts.REPOS_DIR,
430                                               "api/resources"))
431         _, err = p.communicate(jsonutils.dumps(hosts_ip))
432         if p.returncode != 0:
433             return result_handler(consts.API_ERROR, err)
434         LOG.info('Writing hosts: Done')
435         return result_handler(consts.API_SUCCESS, 'success')