Add common openstack opertation scenarios: network
[yardstick.git] / api / resources / v1 / env.py
1 ##############################################################################
2 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
3 #
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
10
11 import errno
12 import logging
13 import os
14 import subprocess
15 import threading
16 import time
17 import uuid
18 import glob
19 import yaml
20 import collections
21
22 from six.moves import configparser
23 from oslo_serialization import jsonutils
24 from docker import Client
25
26 from api.database.v1.handlers import AsyncTaskHandler
27 from api.utils import influx
28 from api import ApiResource
29 from yardstick.common import constants as consts
30 from yardstick.common import utils
31 from yardstick.common.utils import result_handler
32 from yardstick.common import openstack_utils
33 from yardstick.common.httpClient import HttpClient
34
35
36 LOG = logging.getLogger(__name__)
37 LOG.setLevel(logging.DEBUG)
38
39 async_handler = AsyncTaskHandler()
40
41
42 class V1Env(ApiResource):
43
44     def post(self):
45         return self._dispatch_post()
46
47     def create_grafana(self, args):
48         task_id = str(uuid.uuid4())
49
50         thread = threading.Thread(target=self._create_grafana, args=(task_id,))
51         thread.start()
52
53         return result_handler(consts.API_SUCCESS, {'task_id': task_id})
54
55     def _create_grafana(self, task_id):
56         self._create_task(task_id)
57
58         client = Client(base_url=consts.DOCKER_URL)
59
60         try:
61             LOG.info('Checking if grafana image exist')
62             image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
63             if not self._check_image_exist(client, image):
64                 LOG.info('Grafana image not exist, start pulling')
65                 client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
66
67             LOG.info('Createing grafana container')
68             container = self._create_grafana_container(client)
69             LOG.info('Grafana container is created')
70
71             time.sleep(5)
72
73             container = client.inspect_container(container['Id'])
74             ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
75             LOG.debug('container ip is: %s', ip)
76
77             LOG.info('Creating data source for grafana')
78             self._create_data_source(ip)
79
80             LOG.info('Creating dashboard for grafana')
81             self._create_dashboard(ip)
82
83             self._update_task_status(task_id)
84             LOG.info('Finished')
85         except Exception as e:
86             self._update_task_error(task_id, str(e))
87             LOG.exception('Create grafana failed')
88
89     def _create_dashboard(self, ip):
90         url = 'http://admin:admin@{}:{}/api/dashboards/db'.format(ip, consts.GRAFANA_PORT)
91         path = os.path.join(consts.REPOS_DIR, 'dashboard', '*dashboard.json')
92
93         for i in sorted(glob.iglob(path)):
94             with open(i) as f:
95                 data = jsonutils.load(f)
96             try:
97                 HttpClient().post(url, data)
98             except Exception:
99                 LOG.exception('Create dashboard %s failed', i)
100                 raise
101
102     def _create_data_source(self, ip):
103         url = 'http://admin:admin@{}:{}/api/datasources'.format(ip, consts.GRAFANA_PORT)
104         influx_conf = utils.parse_ini_file(consts.CONF_FILE)
105
106         try:
107             influx_url = influx_conf['dispatcher_influxdb']['target']
108         except KeyError:
109             LOG.exception('influxdb url not set in yardstick.conf')
110             raise
111
112         data = {
113             "name": "yardstick",
114             "type": "influxdb",
115             "access": "proxy",
116             "url": influx_url,
117             "password": "root",
118             "user": "root",
119             "database": "yardstick",
120             "basicAuth": True,
121             "basicAuthUser": "admin",
122             "basicAuthPassword": "admin",
123             "isDefault": False,
124         }
125         try:
126             HttpClient().post(url, data)
127         except Exception:
128             LOG.exception('Create datasources failed')
129             raise
130
131     def _create_grafana_container(self, client):
132         ports = [consts.GRAFANA_PORT]
133         port_bindings = {consts.GRAFANA_PORT: consts.GRAFANA_MAPPING_PORT}
134         restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
135         host_config = client.create_host_config(port_bindings=port_bindings,
136                                                 restart_policy=restart_policy)
137
138         LOG.info('Creating container')
139         container = client.create_container(image='%s:%s' %
140                                             (consts.GRAFANA_IMAGE,
141                                              consts.GRAFANA_TAG),
142                                             ports=ports,
143                                             detach=True,
144                                             tty=True,
145                                             host_config=host_config)
146         LOG.info('Starting container')
147         client.start(container)
148         return container
149
150     def _check_image_exist(self, client, t):
151         return any(t in a['RepoTags'][0]
152                    for a in client.images() if a['RepoTags'])
153
154     def create_influxdb(self, args):
155         task_id = str(uuid.uuid4())
156
157         thread = threading.Thread(target=self._create_influxdb, args=(task_id,))
158         thread.start()
159
160         return result_handler(consts.API_SUCCESS, {'task_id': task_id})
161
162     def _create_influxdb(self, task_id):
163         self._create_task(task_id)
164
165         client = Client(base_url=consts.DOCKER_URL)
166
167         try:
168             LOG.info('Checking if influxdb image exist')
169             if not self._check_image_exist(client, '%s:%s' %
170                                            (consts.INFLUXDB_IMAGE,
171                                             consts.INFLUXDB_TAG)):
172                 LOG.info('Influxdb image not exist, start pulling')
173                 client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG)
174
175             LOG.info('Createing influxdb container')
176             container = self._create_influxdb_container(client)
177             LOG.info('Influxdb container is created')
178
179             time.sleep(5)
180
181             container = client.inspect_container(container['Id'])
182             ip = container['NetworkSettings']['Networks']['bridge']['IPAddress']
183             LOG.debug('container ip is: %s', ip)
184
185             LOG.info('Changing output to influxdb')
186             self._change_output_to_influxdb(ip)
187
188             LOG.info('Config influxdb')
189             self._config_influxdb()
190
191             self._update_task_status(task_id)
192
193             LOG.info('Finished')
194         except Exception as e:
195             self._update_task_error(task_id, str(e))
196             LOG.exception('Creating influxdb failed')
197
198     def _create_influxdb_container(self, client):
199
200         ports = [consts.INFLUXDB_DASHBOARD_PORT, consts.INFLUXDB_PORT]
201         port_bindings = {k: k for k in ports}
202         restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
203         host_config = client.create_host_config(port_bindings=port_bindings,
204                                                 restart_policy=restart_policy)
205
206         LOG.info('Creating container')
207         container = client.create_container(image='%s:%s' %
208                                             (consts.INFLUXDB_IMAGE,
209                                              consts.INFLUXDB_TAG),
210                                             ports=ports,
211                                             detach=True,
212                                             tty=True,
213                                             host_config=host_config)
214         LOG.info('Starting container')
215         client.start(container)
216         return container
217
218     def _config_influxdb(self):
219         try:
220             client = influx.get_data_db_client()
221             client.create_user(consts.INFLUXDB_USER,
222                                consts.INFLUXDB_PASS,
223                                consts.INFLUXDB_DB_NAME)
224             client.create_database(consts.INFLUXDB_DB_NAME)
225             LOG.info('Success to config influxDB')
226         except Exception:
227             LOG.exception('Config influxdb failed')
228
229     def _change_output_to_influxdb(self, ip):
230         utils.makedirs(consts.CONF_DIR)
231
232         parser = configparser.ConfigParser()
233         LOG.info('Reading output sample configuration')
234         parser.read(consts.CONF_SAMPLE_FILE)
235
236         LOG.info('Set dispatcher to influxdb')
237         parser.set('DEFAULT', 'dispatcher', 'influxdb')
238         parser.set('dispatcher_influxdb', 'target',
239                    'http://{}:{}'.format(ip, consts.INFLUXDB_PORT))
240
241         LOG.info('Writing to %s', consts.CONF_FILE)
242         with open(consts.CONF_FILE, 'w') as f:
243             parser.write(f)
244
245     def prepare_env(self, args):
246         task_id = str(uuid.uuid4())
247
248         thread = threading.Thread(target=self._prepare_env_daemon,
249                                   args=(task_id,))
250         thread.start()
251
252         return result_handler(consts.API_SUCCESS, {'task_id': task_id})
253
254     def _already_source_openrc(self):
255         """Check if openrc is sourced already"""
256         return all(os.environ.get(k) for k in ['OS_AUTH_URL',
257                                                'OS_USERNAME',
258                                                'OS_PASSWORD',
259                                                'EXTERNAL_NETWORK'])
260
261     def _prepare_env_daemon(self, task_id):
262         self._create_task(task_id)
263
264         try:
265             self._create_directories()
266
267             rc_file = consts.OPENRC
268
269             LOG.info('Checkout Openrc Environment variable')
270             if not self._already_source_openrc():
271                 LOG.info('Openrc variable not found in Environment')
272                 if not os.path.exists(rc_file):
273                     LOG.info('Openrc file not found')
274                     installer_ip = os.environ.get('INSTALLER_IP',
275                                                   '192.168.200.2')
276                     installer_type = os.environ.get('INSTALLER_TYPE', 'compass')
277                     LOG.info('Getting openrc file from %s', installer_type)
278                     self._get_remote_rc_file(rc_file,
279                                              installer_ip,
280                                              installer_type)
281                     LOG.info('Source openrc file')
282                     self._source_file(rc_file)
283                     LOG.info('Appending external network')
284                     self._append_external_network(rc_file)
285                 LOG.info('Openrc file exist, source openrc file')
286                 self._source_file(rc_file)
287
288             LOG.info('Cleaning images')
289             self._clean_images()
290
291             LOG.info('Loading images')
292             self._load_images()
293
294             self._update_task_status(task_id)
295             LOG.info('Finished')
296         except Exception as e:
297             self._update_task_error(task_id, str(e))
298             LOG.exception('Prepare env failed')
299
300     def _create_directories(self):
301         utils.makedirs(consts.CONF_DIR)
302
303     def _source_file(self, rc_file):
304         utils.source_env(rc_file)
305
306     def _get_remote_rc_file(self, rc_file, installer_ip, installer_type):
307
308         os_fetch_script = os.path.join(consts.RELENG_DIR, consts.FETCH_SCRIPT)
309
310         try:
311             cmd = [os_fetch_script, '-d', rc_file, '-i', installer_type,
312                    '-a', installer_ip]
313             p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
314             p.communicate()
315
316             if p.returncode != 0:
317                 LOG.error('Failed to fetch credentials from installer')
318         except OSError as e:
319             if e.errno != errno.EEXIST:
320                 raise
321
322     def _append_external_network(self, rc_file):
323         neutron_client = openstack_utils.get_neutron_client()
324         networks = neutron_client.list_networks()['networks']
325         try:
326             ext_network = next(n['name']
327                                for n in networks if n['router:external'])
328         except StopIteration:
329             LOG.warning("Can't find external network")
330         else:
331             cmd = 'export EXTERNAL_NETWORK=%s' % ext_network
332             try:
333                 with open(rc_file, 'a') as f:
334                     f.write(cmd + '\n')
335             except OSError as e:
336                 if e.errno != errno.EEXIST:
337                     raise
338
339     def _clean_images(self):
340         cmd = [consts.CLEAN_IMAGES_SCRIPT]
341         p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
342         output = p.communicate()[0]
343         LOG.debug(output)
344
345     def _load_images(self):
346         cmd = [consts.LOAD_IMAGES_SCRIPT]
347         p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
348         output = p.communicate()[0]
349         LOG.debug(output)
350
351     def _create_task(self, task_id):
352         async_handler.insert({'status': 0, 'task_id': task_id})
353
354     def _update_task_status(self, task_id):
355         async_handler.update_attr(task_id, {'status': 1})
356
357     def _update_task_error(self, task_id, error):
358         async_handler.update_attr(task_id, {'status': 2, 'error': error})
359
360     def update_openrc(self, args):
361         try:
362             openrc_vars = args['openrc']
363         except KeyError:
364             return result_handler(consts.API_ERROR, 'openrc must be provided')
365         else:
366             if not isinstance(openrc_vars, collections.Mapping):
367                 return result_handler(consts.API_ERROR, 'args should be a dict')
368
369         lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
370         LOG.debug('Writing: %s', ''.join(lines))
371
372         LOG.info('Writing openrc: Writing')
373         utils.makedirs(consts.CONF_DIR)
374
375         with open(consts.OPENRC, 'w') as f:
376             f.writelines(lines)
377         LOG.info('Writing openrc: Done')
378
379         LOG.info('Source openrc: Sourcing')
380         try:
381             self._source_file(consts.OPENRC)
382         except Exception as e:
383             LOG.exception('Failed to source openrc')
384             return result_handler(consts.API_ERROR, str(e))
385         LOG.info('Source openrc: Done')
386
387         return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars})
388
389     def upload_pod_file(self, args):
390         try:
391             pod_file = args['file']
392         except KeyError:
393             return result_handler(consts.API_ERROR, 'file must be provided')
394
395         LOG.info('Checking file')
396         data = yaml.safe_load(pod_file.read())
397         if not isinstance(data, collections.Mapping):
398             return result_handler(consts.API_ERROR, 'invalid yaml file')
399
400         LOG.info('Writing file')
401         with open(consts.POD_FILE, 'w') as f:
402             yaml.dump(data, f, default_flow_style=False)
403         LOG.info('Writing finished')
404
405         return result_handler(consts.API_SUCCESS, {'pod_info': data})
406
407     def update_pod_file(self, args):
408         try:
409             pod_dic = args['pod']
410         except KeyError:
411             return result_handler(consts.API_ERROR, 'pod must be provided')
412         else:
413             if not isinstance(pod_dic, collections.Mapping):
414                 return result_handler(consts.API_ERROR, 'pod should be a dict')
415
416         LOG.info('Writing file')
417         with open(consts.POD_FILE, 'w') as f:
418             yaml.dump(pod_dic, f, default_flow_style=False)
419         LOG.info('Writing finished')
420
421         return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic})
422
423     def update_hosts(self, hosts_ip):
424         if not isinstance(hosts_ip, collections.Mapping):
425             return result_handler(consts.API_ERROR, 'args should be a dict')
426         LOG.info('Writing hosts: Writing')
427         LOG.debug('Writing: %s', hosts_ip)
428         cmd = ["sudo", "python", "write_hosts.py"]
429         p = subprocess.Popen(cmd,
430                              stdin=subprocess.PIPE,
431                              stdout=subprocess.PIPE,
432                              stderr=subprocess.PIPE,
433                              cwd=os.path.join(consts.REPOS_DIR,
434                                               "api/resources"))
435         _, err = p.communicate(jsonutils.dumps(hosts_ip))
436         if p.returncode != 0:
437             return result_handler(consts.API_ERROR, err)
438         LOG.info('Writing hosts: Done')
439         return result_handler(consts.API_SUCCESS, 'success')