Merge "Dashboard for TC056"
[yardstick.git] / api / resources / v1 / env.py
1 ##############################################################################
2 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
3 #
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
10
11 import errno
12 import logging
13
14 import ipaddress
15 import os
16 import subprocess
17 import threading
18 import time
19 import uuid
20 import glob
21
22 import six
23 import yaml
24 import collections
25
26 from six.moves import configparser
27 from oslo_serialization import jsonutils
28 from docker import Client
29 from docker.errors import APIError
30 from requests.exceptions import HTTPError
31
32 from api.database.v1.handlers import AsyncTaskHandler
33 from api.utils import influx
34 from api import ApiResource
35 from yardstick.common import constants as consts
36 from yardstick.common import utils
37 from yardstick.common.utils import result_handler
38 from yardstick.common import openstack_utils
39 from yardstick.common.httpClient import HttpClient
40 from yardstick.common.yaml_loader import yaml_load
41
42 LOG = logging.getLogger(__name__)
43 LOG.setLevel(logging.DEBUG)
44
45 async_handler = AsyncTaskHandler()
46
47
48 class V1Env(ApiResource):
49
50     def post(self):
51         return self._dispatch_post()
52
53     def create_grafana(self, *args):
54         task_id = str(uuid.uuid4())
55
56         thread = threading.Thread(target=self._create_grafana, args=(task_id,))
57         thread.start()
58
59         return result_handler(consts.API_SUCCESS, {'task_id': task_id})
60
61     def _create_grafana(self, task_id):
62         self._create_task(task_id)
63
64         client = Client(base_url=consts.DOCKER_URL)
65
66         try:
67             LOG.info('Checking if grafana image exist')
68             image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
69             if not self._check_image_exist(client, image):
70                 LOG.info('Grafana image not exist, start pulling')
71                 client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
72
73             LOG.info('Createing grafana container')
74             container = self._create_grafana_container(client)
75             LOG.info('Grafana container is created')
76
77             time.sleep(5)
78
79             container = client.inspect_container(container['Id'])
80             ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
81             LOG.debug('container ip is: %s', ip)
82
83             LOG.info('Creating data source for grafana')
84             self._create_data_source(ip)
85
86             LOG.info('Creating dashboard for grafana')
87             self._create_dashboard(ip)
88
89             self._update_task_status(task_id)
90             LOG.info('Finished')
91         except (APIError, HTTPError) as e:
92             self._update_task_error(task_id, str(e))
93             LOG.exception('Create grafana failed')
94
95     def _create_dashboard(self, ip):
96         url = 'http://admin:admin@{}:{}/api/dashboards/db'.format(ip, consts.GRAFANA_PORT)
97         path = os.path.join(consts.REPOS_DIR, 'dashboard', 'opnfv_yardstick_tc*.json')
98
99         for i in sorted(glob.iglob(path)):
100             with open(i) as f:
101                 data = jsonutils.load(f)
102             try:
103                 HttpClient().post(url, {"dashboard": data})
104             except Exception:
105                 LOG.exception('Create dashboard %s failed', i)
106                 raise
107
108     def _create_data_source(self, ip):
109         url = 'http://admin:admin@{}:{}/api/datasources'.format(ip, consts.GRAFANA_PORT)
110         influx_conf = utils.parse_ini_file(consts.CONF_FILE).get('dispatcher_influxdb', {})
111
112         data = {
113             "name": "yardstick",
114             "type": "influxdb",
115             "access": "proxy",
116             "url": influx_conf.get('target', ''),
117             "password": influx_conf.get('password', ''),
118             "user": influx_conf.get('username', ''),
119             "database": "yardstick",
120             "basicAuth": True,
121             "basicAuthUser": "admin",
122             "basicAuthPassword": "admin",
123             "isDefault": True,
124         }
125         try:
126             HttpClient().post(url, data, timeout=60)
127         except Exception:
128             LOG.exception('Create datasources failed')
129             raise
130
131     def _create_grafana_container(self, client):
132         ports = [consts.GRAFANA_PORT]
133         port_bindings = {consts.GRAFANA_PORT: consts.GRAFANA_MAPPING_PORT}
134         restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
135         host_config = client.create_host_config(port_bindings=port_bindings,
136                                                 restart_policy=restart_policy)
137
138         LOG.info('Creating container')
139         container = client.create_container(image='%s:%s' %
140                                             (consts.GRAFANA_IMAGE,
141                                              consts.GRAFANA_TAG),
142                                             ports=ports,
143                                             detach=True,
144                                             tty=True,
145                                             host_config=host_config)
146         LOG.info('Starting container')
147         client.start(container)
148         return container
149
150     def _check_image_exist(self, client, t):
151         return any(t in a['RepoTags'][0]
152                    for a in client.images() if a['RepoTags'])
153
154     def create_influxdb(self, *args):
155         task_id = str(uuid.uuid4())
156
157         thread = threading.Thread(target=self._create_influxdb, args=(task_id,))
158         thread.start()
159
160         return result_handler(consts.API_SUCCESS, {'task_id': task_id})
161
162     def _create_influxdb(self, task_id):
163         self._create_task(task_id)
164
165         client = Client(base_url=consts.DOCKER_URL)
166
167         try:
168             LOG.info('Checking if influxdb image exist')
169             if not self._check_image_exist(client, '%s:%s' %
170                                            (consts.INFLUXDB_IMAGE,
171                                             consts.INFLUXDB_TAG)):
172                 LOG.info('Influxdb image not exist, start pulling')
173                 client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG)
174
175             LOG.info('Createing influxdb container')
176             container = self._create_influxdb_container(client)
177             LOG.info('Influxdb container is created')
178
179             time.sleep(5)
180
181             container = client.inspect_container(container['Id'])
182             ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
183             LOG.debug('container ip is: %s', ip)
184
185             LOG.info('Changing output to influxdb')
186             self._change_output_to_influxdb(ip)
187
188             LOG.info('Config influxdb')
189             self._config_influxdb()
190
191             self._update_task_status(task_id)
192
193             LOG.info('Finished')
194         except APIError as e:
195             self._update_task_error(task_id, str(e))
196             LOG.exception('Creating influxdb failed')
197
198     def _create_influxdb_container(self, client):
199
200         ports = [consts.INFLUXDB_DASHBOARD_PORT, consts.INFLUXDB_PORT]
201         port_bindings = {k: k for k in ports}
202         restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
203         host_config = client.create_host_config(port_bindings=port_bindings,
204                                                 restart_policy=restart_policy)
205
206         LOG.info('Creating container')
207         container = client.create_container(image='%s:%s' %
208                                             (consts.INFLUXDB_IMAGE,
209                                              consts.INFLUXDB_TAG),
210                                             ports=ports,
211                                             detach=True,
212                                             tty=True,
213                                             host_config=host_config)
214         LOG.info('Starting container')
215         client.start(container)
216         return container
217
218     def _config_influxdb(self):
219         try:
220             client = influx.get_data_db_client()
221             client.create_user(consts.INFLUXDB_USER,
222                                consts.INFLUXDB_PASS,
223                                consts.INFLUXDB_DB_NAME)
224             client.create_database(consts.INFLUXDB_DB_NAME)
225             LOG.info('Success to config influxDB')
226         except HTTPError:
227             LOG.exception('Config influxdb failed')
228
229     def _change_output_to_influxdb(self, ip):
230         utils.makedirs(consts.CONF_DIR)
231
232         parser = configparser.ConfigParser()
233         LOG.info('Reading output sample configuration')
234         parser.read(consts.CONF_SAMPLE_FILE)
235
236         LOG.info('Set dispatcher to influxdb')
237         parser.set('DEFAULT', 'dispatcher', 'influxdb')
238         parser.set('dispatcher_influxdb', 'target',
239                    'http://{}:{}'.format(ip, consts.INFLUXDB_PORT))
240
241         LOG.info('Writing to %s', consts.CONF_FILE)
242         with open(consts.CONF_FILE, 'w') as f:
243             parser.write(f)
244
245     def prepare_env(self, *args):
246         task_id = str(uuid.uuid4())
247
248         thread = threading.Thread(target=self._prepare_env_daemon,
249                                   args=(task_id,))
250         thread.start()
251
252         return result_handler(consts.API_SUCCESS, {'task_id': task_id})
253
254     def _already_source_openrc(self):
255         """Check if openrc is sourced already"""
256         return all(os.environ.get(k) for k in ['OS_AUTH_URL',
257                                                'OS_USERNAME',
258                                                'OS_PASSWORD',
259                                                'EXTERNAL_NETWORK'])
260
261     def _prepare_env_daemon(self, task_id):
262         self._create_task(task_id)
263
264         try:
265             self._create_directories()
266
267             rc_file = consts.OPENRC
268
269             LOG.info('Checkout Openrc Environment variable')
270             if not self._already_source_openrc():
271                 LOG.info('Openrc variable not found in Environment')
272                 if not os.path.exists(rc_file):
273                     LOG.info('Openrc file not found')
274                     installer_ip = os.environ.get('INSTALLER_IP',
275                                                   '192.168.200.2')
276                     # validate installer_ip is a valid ipaddress
277                     installer_ip = str(ipaddress.IPv4Address(six.u(installer_ip)))
278                     installer_type = os.environ.get('INSTALLER_TYPE', 'compass')
279                     LOG.info('Getting openrc file from %s', installer_type)
280                     self._get_remote_rc_file(rc_file,
281                                              installer_ip,
282                                              installer_type)
283                     LOG.info('Source openrc file')
284                     self._source_file(rc_file)
285                     LOG.info('Appending external network')
286                     self._append_external_network(rc_file)
287                 LOG.info('Openrc file exist, source openrc file')
288                 self._source_file(rc_file)
289
290             LOG.info('Cleaning images')
291             self._clean_images()
292
293             LOG.info('Loading images')
294             self._load_images()
295
296             self._update_task_status(task_id)
297             LOG.info('Finished')
298         except (subprocess.CalledProcessError, OSError) as e:
299             self._update_task_error(task_id, str(e))
300             LOG.exception('Prepare env failed')
301
302     def _create_directories(self):
303         utils.makedirs(consts.CONF_DIR)
304
305     def _source_file(self, rc_file):
306         utils.source_env(rc_file)
307
308     def _get_remote_rc_file(self, rc_file, installer_ip, installer_type):
309
310         os_fetch_script = os.path.join(consts.RELENG_DIR, consts.FETCH_SCRIPT)
311
312         try:
313             cmd = [os_fetch_script, '-d', rc_file, '-i', installer_type,
314                    '-a', installer_ip]
315             p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
316             p.communicate()
317
318             if p.returncode != 0:
319                 LOG.error('Failed to fetch credentials from installer')
320         except OSError as e:
321             if e.errno != errno.EEXIST:
322                 raise
323
324     def _append_external_network(self, rc_file):
325         neutron_client = openstack_utils.get_neutron_client()
326         networks = neutron_client.list_networks()['networks']
327         try:
328             ext_network = next(n['name']
329                                for n in networks if n['router:external'])
330         except StopIteration:
331             LOG.warning("Can't find external network")
332         else:
333             cmd = 'export EXTERNAL_NETWORK=%s' % ext_network
334             try:
335                 with open(rc_file, 'a') as f:
336                     f.write(cmd + '\n')
337             except OSError as e:
338                 if e.errno != errno.EEXIST:
339                     raise
340
341     def _clean_images(self):
342         cmd = [consts.CLEAN_IMAGES_SCRIPT]
343         p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
344         output = p.communicate()[0]
345         LOG.debug(output)
346
347     def _load_images(self):
348         cmd = [consts.LOAD_IMAGES_SCRIPT]
349         p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
350         output = p.communicate()[0]
351         LOG.debug(output)
352
353     def _create_task(self, task_id):
354         async_handler.insert({'status': 0, 'task_id': task_id})
355
356     def _update_task_status(self, task_id):
357         async_handler.update_attr(task_id, {'status': 1})
358
359     def _update_task_error(self, task_id, error):
360         async_handler.update_attr(task_id, {'status': 2, 'error': error})
361
362     def update_openrc(self, args):
363         try:
364             openrc_vars = args['openrc']
365         except KeyError:
366             return result_handler(consts.API_ERROR, 'openrc must be provided')
367         else:
368             if not isinstance(openrc_vars, collections.Mapping):
369                 return result_handler(consts.API_ERROR, 'args should be a dict')
370
371         lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
372         LOG.debug('Writing: %s', ''.join(lines))
373
374         LOG.info('Writing openrc: Writing')
375         utils.makedirs(consts.CONF_DIR)
376
377         with open(consts.OPENRC, 'w') as f:
378             f.writelines(lines)
379         LOG.info('Writing openrc: Done')
380
381         LOG.info('Source openrc: Sourcing')
382         try:
383             self._source_file(consts.OPENRC)
384         except subprocess.CalledProcessError as e:
385             LOG.exception('Failed to source openrc')
386             return result_handler(consts.API_ERROR, str(e))
387         LOG.info('Source openrc: Done')
388
389         return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars})
390
391     def upload_pod_file(self, args):
392         try:
393             pod_file = args['file']
394         except KeyError:
395             return result_handler(consts.API_ERROR, 'file must be provided')
396
397         LOG.info('Checking file')
398         data = yaml_load(pod_file.read())
399         if not isinstance(data, collections.Mapping):
400             return result_handler(consts.API_ERROR, 'invalid yaml file')
401
402         LOG.info('Writing file')
403         with open(consts.POD_FILE, 'w') as f:
404             yaml.dump(data, f, default_flow_style=False)
405         LOG.info('Writing finished')
406
407         return result_handler(consts.API_SUCCESS, {'pod_info': data})
408
409     def update_pod_file(self, args):
410         try:
411             pod_dic = args['pod']
412         except KeyError:
413             return result_handler(consts.API_ERROR, 'pod must be provided')
414         else:
415             if not isinstance(pod_dic, collections.Mapping):
416                 return result_handler(consts.API_ERROR, 'pod should be a dict')
417
418         LOG.info('Writing file')
419         with open(consts.POD_FILE, 'w') as f:
420             yaml.dump(pod_dic, f, default_flow_style=False)
421         LOG.info('Writing finished')
422
423         return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic})
424
425     def update_hosts(self, hosts_ip):
426         if not isinstance(hosts_ip, collections.Mapping):
427             return result_handler(consts.API_ERROR, 'args should be a dict')
428         LOG.info('Writing hosts: Writing')
429         LOG.debug('Writing: %s', hosts_ip)
430         cmd = ["sudo", "python", "write_hosts.py"]
431         p = subprocess.Popen(cmd,
432                              stdin=subprocess.PIPE,
433                              stdout=subprocess.PIPE,
434                              stderr=subprocess.PIPE,
435                              cwd=os.path.join(consts.REPOS_DIR,
436                                               "api/resources"))
437         _, err = p.communicate(jsonutils.dumps(hosts_ip))
438         if p.returncode != 0:
439             return result_handler(consts.API_ERROR, err)
440         LOG.info('Writing hosts: Done')
441         return result_handler(consts.API_SUCCESS, 'success')