Prohibit the importation of a list of libraries
[yardstick.git] / api / resources / v1 / env.py
1 ##############################################################################
2 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
3 #
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
10
11 import errno
12 import logging
13 import os
14 import subprocess
15 import threading
16 import time
17 import uuid
18 import glob
19 import yaml
20 import collections
21
22 from six.moves import configparser
23 from oslo_serialization import jsonutils
24 from docker import Client
25
26 from api.database.v1.handlers import AsyncTaskHandler
27 from api.utils import influx
28 from api import ApiResource
29 from yardstick.common import constants as consts
30 from yardstick.common import utils
31 from yardstick.common.utils import result_handler
32 from yardstick.common import openstack_utils
33 from yardstick.common.httpClient import HttpClient
34 from yardstick.common.yaml_loader import yaml_load
35
36 LOG = logging.getLogger(__name__)
37 LOG.setLevel(logging.DEBUG)
38
39 async_handler = AsyncTaskHandler()
40
41
42 class V1Env(ApiResource):
43
44     def post(self):
45         return self._dispatch_post()
46
47     def create_grafana(self, args):
48         task_id = str(uuid.uuid4())
49
50         thread = threading.Thread(target=self._create_grafana, args=(task_id,))
51         thread.start()
52
53         return result_handler(consts.API_SUCCESS, {'task_id': task_id})
54
55     def _create_grafana(self, task_id):
56         self._create_task(task_id)
57
58         client = Client(base_url=consts.DOCKER_URL)
59
60         try:
61             LOG.info('Checking if grafana image exist')
62             image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
63             if not self._check_image_exist(client, image):
64                 LOG.info('Grafana image not exist, start pulling')
65                 client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
66
67             LOG.info('Createing grafana container')
68             container = self._create_grafana_container(client)
69             LOG.info('Grafana container is created')
70
71             time.sleep(5)
72
73             container = client.inspect_container(container['Id'])
74             ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
75             LOG.debug('container ip is: %s', ip)
76
77             LOG.info('Creating data source for grafana')
78             self._create_data_source(ip)
79
80             LOG.info('Creating dashboard for grafana')
81             self._create_dashboard(ip)
82
83             self._update_task_status(task_id)
84             LOG.info('Finished')
85         except Exception as e:
86             self._update_task_error(task_id, str(e))
87             LOG.exception('Create grafana failed')
88
89     def _create_dashboard(self, ip):
90         url = 'http://admin:admin@{}:{}/api/dashboards/db'.format(ip, consts.GRAFANA_PORT)
91         path = os.path.join(consts.REPOS_DIR, 'dashboard', 'opnfv_yardstick_tc*.json')
92
93         for i in sorted(glob.iglob(path)):
94             with open(i) as f:
95                 data = jsonutils.load(f)
96             try:
97                 HttpClient().post(url, {"dashboard": data})
98             except Exception:
99                 LOG.exception('Create dashboard %s failed', i)
100                 raise
101
102     def _create_data_source(self, ip):
103         url = 'http://admin:admin@{}:{}/api/datasources'.format(ip, consts.GRAFANA_PORT)
104         influx_conf = utils.parse_ini_file(consts.CONF_FILE).get('dispatcher_influxdb', {})
105
106         data = {
107             "name": "yardstick",
108             "type": "influxdb",
109             "access": "proxy",
110             "url": influx_conf.get('target', ''),
111             "password": influx_conf.get('password', ''),
112             "user": influx_conf.get('username', ''),
113             "database": "yardstick",
114             "basicAuth": True,
115             "basicAuthUser": "admin",
116             "basicAuthPassword": "admin",
117             "isDefault": True,
118         }
119         try:
120             HttpClient().post(url, data, timeout=10)
121         except Exception:
122             LOG.exception('Create datasources failed')
123             raise
124
125     def _create_grafana_container(self, client):
126         ports = [consts.GRAFANA_PORT]
127         port_bindings = {consts.GRAFANA_PORT: consts.GRAFANA_MAPPING_PORT}
128         restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
129         host_config = client.create_host_config(port_bindings=port_bindings,
130                                                 restart_policy=restart_policy)
131
132         LOG.info('Creating container')
133         container = client.create_container(image='%s:%s' %
134                                             (consts.GRAFANA_IMAGE,
135                                              consts.GRAFANA_TAG),
136                                             ports=ports,
137                                             detach=True,
138                                             tty=True,
139                                             host_config=host_config)
140         LOG.info('Starting container')
141         client.start(container)
142         return container
143
144     def _check_image_exist(self, client, t):
145         return any(t in a['RepoTags'][0]
146                    for a in client.images() if a['RepoTags'])
147
148     def create_influxdb(self, args):
149         task_id = str(uuid.uuid4())
150
151         thread = threading.Thread(target=self._create_influxdb, args=(task_id,))
152         thread.start()
153
154         return result_handler(consts.API_SUCCESS, {'task_id': task_id})
155
156     def _create_influxdb(self, task_id):
157         self._create_task(task_id)
158
159         client = Client(base_url=consts.DOCKER_URL)
160
161         try:
162             LOG.info('Checking if influxdb image exist')
163             if not self._check_image_exist(client, '%s:%s' %
164                                            (consts.INFLUXDB_IMAGE,
165                                             consts.INFLUXDB_TAG)):
166                 LOG.info('Influxdb image not exist, start pulling')
167                 client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG)
168
169             LOG.info('Createing influxdb container')
170             container = self._create_influxdb_container(client)
171             LOG.info('Influxdb container is created')
172
173             time.sleep(5)
174
175             container = client.inspect_container(container['Id'])
176             ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
177             LOG.debug('container ip is: %s', ip)
178
179             LOG.info('Changing output to influxdb')
180             self._change_output_to_influxdb(ip)
181
182             LOG.info('Config influxdb')
183             self._config_influxdb()
184
185             self._update_task_status(task_id)
186
187             LOG.info('Finished')
188         except Exception as e:
189             self._update_task_error(task_id, str(e))
190             LOG.exception('Creating influxdb failed')
191
192     def _create_influxdb_container(self, client):
193
194         ports = [consts.INFLUXDB_DASHBOARD_PORT, consts.INFLUXDB_PORT]
195         port_bindings = {k: k for k in ports}
196         restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
197         host_config = client.create_host_config(port_bindings=port_bindings,
198                                                 restart_policy=restart_policy)
199
200         LOG.info('Creating container')
201         container = client.create_container(image='%s:%s' %
202                                             (consts.INFLUXDB_IMAGE,
203                                              consts.INFLUXDB_TAG),
204                                             ports=ports,
205                                             detach=True,
206                                             tty=True,
207                                             host_config=host_config)
208         LOG.info('Starting container')
209         client.start(container)
210         return container
211
212     def _config_influxdb(self):
213         try:
214             client = influx.get_data_db_client()
215             client.create_user(consts.INFLUXDB_USER,
216                                consts.INFLUXDB_PASS,
217                                consts.INFLUXDB_DB_NAME)
218             client.create_database(consts.INFLUXDB_DB_NAME)
219             LOG.info('Success to config influxDB')
220         except Exception:
221             LOG.exception('Config influxdb failed')
222
223     def _change_output_to_influxdb(self, ip):
224         utils.makedirs(consts.CONF_DIR)
225
226         parser = configparser.ConfigParser()
227         LOG.info('Reading output sample configuration')
228         parser.read(consts.CONF_SAMPLE_FILE)
229
230         LOG.info('Set dispatcher to influxdb')
231         parser.set('DEFAULT', 'dispatcher', 'influxdb')
232         parser.set('dispatcher_influxdb', 'target',
233                    'http://{}:{}'.format(ip, consts.INFLUXDB_PORT))
234
235         LOG.info('Writing to %s', consts.CONF_FILE)
236         with open(consts.CONF_FILE, 'w') as f:
237             parser.write(f)
238
239     def prepare_env(self, args):
240         task_id = str(uuid.uuid4())
241
242         thread = threading.Thread(target=self._prepare_env_daemon,
243                                   args=(task_id,))
244         thread.start()
245
246         return result_handler(consts.API_SUCCESS, {'task_id': task_id})
247
248     def _already_source_openrc(self):
249         """Check if openrc is sourced already"""
250         return all(os.environ.get(k) for k in ['OS_AUTH_URL',
251                                                'OS_USERNAME',
252                                                'OS_PASSWORD',
253                                                'EXTERNAL_NETWORK'])
254
255     def _prepare_env_daemon(self, task_id):
256         self._create_task(task_id)
257
258         try:
259             self._create_directories()
260
261             rc_file = consts.OPENRC
262
263             LOG.info('Checkout Openrc Environment variable')
264             if not self._already_source_openrc():
265                 LOG.info('Openrc variable not found in Environment')
266                 if not os.path.exists(rc_file):
267                     LOG.info('Openrc file not found')
268                     installer_ip = os.environ.get('INSTALLER_IP',
269                                                   '192.168.200.2')
270                     installer_type = os.environ.get('INSTALLER_TYPE', 'compass')
271                     LOG.info('Getting openrc file from %s', installer_type)
272                     self._get_remote_rc_file(rc_file,
273                                              installer_ip,
274                                              installer_type)
275                     LOG.info('Source openrc file')
276                     self._source_file(rc_file)
277                     LOG.info('Appending external network')
278                     self._append_external_network(rc_file)
279                 LOG.info('Openrc file exist, source openrc file')
280                 self._source_file(rc_file)
281
282             LOG.info('Cleaning images')
283             self._clean_images()
284
285             LOG.info('Loading images')
286             self._load_images()
287
288             self._update_task_status(task_id)
289             LOG.info('Finished')
290         except Exception as e:
291             self._update_task_error(task_id, str(e))
292             LOG.exception('Prepare env failed')
293
294     def _create_directories(self):
295         utils.makedirs(consts.CONF_DIR)
296
297     def _source_file(self, rc_file):
298         utils.source_env(rc_file)
299
300     def _get_remote_rc_file(self, rc_file, installer_ip, installer_type):
301
302         os_fetch_script = os.path.join(consts.RELENG_DIR, consts.FETCH_SCRIPT)
303
304         try:
305             cmd = [os_fetch_script, '-d', rc_file, '-i', installer_type,
306                    '-a', installer_ip]
307             p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
308             p.communicate()
309
310             if p.returncode != 0:
311                 LOG.error('Failed to fetch credentials from installer')
312         except OSError as e:
313             if e.errno != errno.EEXIST:
314                 raise
315
316     def _append_external_network(self, rc_file):
317         neutron_client = openstack_utils.get_neutron_client()
318         networks = neutron_client.list_networks()['networks']
319         try:
320             ext_network = next(n['name']
321                                for n in networks if n['router:external'])
322         except StopIteration:
323             LOG.warning("Can't find external network")
324         else:
325             cmd = 'export EXTERNAL_NETWORK=%s' % ext_network
326             try:
327                 with open(rc_file, 'a') as f:
328                     f.write(cmd + '\n')
329             except OSError as e:
330                 if e.errno != errno.EEXIST:
331                     raise
332
333     def _clean_images(self):
334         cmd = [consts.CLEAN_IMAGES_SCRIPT]
335         p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
336         output = p.communicate()[0]
337         LOG.debug(output)
338
339     def _load_images(self):
340         cmd = [consts.LOAD_IMAGES_SCRIPT]
341         p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
342         output = p.communicate()[0]
343         LOG.debug(output)
344
345     def _create_task(self, task_id):
346         async_handler.insert({'status': 0, 'task_id': task_id})
347
348     def _update_task_status(self, task_id):
349         async_handler.update_attr(task_id, {'status': 1})
350
351     def _update_task_error(self, task_id, error):
352         async_handler.update_attr(task_id, {'status': 2, 'error': error})
353
354     def update_openrc(self, args):
355         try:
356             openrc_vars = args['openrc']
357         except KeyError:
358             return result_handler(consts.API_ERROR, 'openrc must be provided')
359         else:
360             if not isinstance(openrc_vars, collections.Mapping):
361                 return result_handler(consts.API_ERROR, 'args should be a dict')
362
363         lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
364         LOG.debug('Writing: %s', ''.join(lines))
365
366         LOG.info('Writing openrc: Writing')
367         utils.makedirs(consts.CONF_DIR)
368
369         with open(consts.OPENRC, 'w') as f:
370             f.writelines(lines)
371         LOG.info('Writing openrc: Done')
372
373         LOG.info('Source openrc: Sourcing')
374         try:
375             self._source_file(consts.OPENRC)
376         except Exception as e:
377             LOG.exception('Failed to source openrc')
378             return result_handler(consts.API_ERROR, str(e))
379         LOG.info('Source openrc: Done')
380
381         return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars})
382
383     def upload_pod_file(self, args):
384         try:
385             pod_file = args['file']
386         except KeyError:
387             return result_handler(consts.API_ERROR, 'file must be provided')
388
389         LOG.info('Checking file')
390         data = yaml_load(pod_file.read())
391         if not isinstance(data, collections.Mapping):
392             return result_handler(consts.API_ERROR, 'invalid yaml file')
393
394         LOG.info('Writing file')
395         with open(consts.POD_FILE, 'w') as f:
396             yaml.dump(data, f, default_flow_style=False)
397         LOG.info('Writing finished')
398
399         return result_handler(consts.API_SUCCESS, {'pod_info': data})
400
401     def update_pod_file(self, args):
402         try:
403             pod_dic = args['pod']
404         except KeyError:
405             return result_handler(consts.API_ERROR, 'pod must be provided')
406         else:
407             if not isinstance(pod_dic, collections.Mapping):
408                 return result_handler(consts.API_ERROR, 'pod should be a dict')
409
410         LOG.info('Writing file')
411         with open(consts.POD_FILE, 'w') as f:
412             yaml.dump(pod_dic, f, default_flow_style=False)
413         LOG.info('Writing finished')
414
415         return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic})
416
417     def update_hosts(self, hosts_ip):
418         if not isinstance(hosts_ip, collections.Mapping):
419             return result_handler(consts.API_ERROR, 'args should be a dict')
420         LOG.info('Writing hosts: Writing')
421         LOG.debug('Writing: %s', hosts_ip)
422         cmd = ["sudo", "python", "write_hosts.py"]
423         p = subprocess.Popen(cmd,
424                              stdin=subprocess.PIPE,
425                              stdout=subprocess.PIPE,
426                              stderr=subprocess.PIPE,
427                              cwd=os.path.join(consts.REPOS_DIR,
428                                               "api/resources"))
429         _, err = p.communicate(jsonutils.dumps(hosts_ip))
430         if p.returncode != 0:
431             return result_handler(consts.API_ERROR, err)
432         LOG.info('Writing hosts: Done')
433         return result_handler(consts.API_SUCCESS, 'success')