Yardstick: Aarch64 jenkins slave support
[yardstick.git] / api / resources / env_action.py
1 ##############################################################################
2 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
3 #
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
10
11 import errno
12 import logging
13 import os
14 import subprocess
15 import threading
16 import time
17 import uuid
18 import glob
19 import yaml
20 import collections
21
22 from six.moves import configparser
23 from oslo_serialization import jsonutils
24 from docker import Client
25
26 from api.database.v1.handlers import AsyncTaskHandler
27 from api.utils import influx
28 from api.utils.common import result_handler
29 from yardstick.common import constants as consts
30 from yardstick.common import utils as common_utils
31 from yardstick.common import openstack_utils
32 from yardstick.common.httpClient import HttpClient
33
34
35 LOG = logging.getLogger(__name__)
36 LOG.setLevel(logging.DEBUG)
37
38 async_handler = AsyncTaskHandler()
39
40
41 def create_grafana(args):
42     task_id = str(uuid.uuid4())
43
44     thread = threading.Thread(target=_create_grafana, args=(task_id,))
45     thread.start()
46
47     return result_handler(consts.API_SUCCESS, {'task_id': task_id})
48
49
50 def _create_grafana(task_id):
51     _create_task(task_id)
52
53     client = Client(base_url=consts.DOCKER_URL)
54
55     try:
56         LOG.info('Checking if grafana image exist')
57         image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
58         if not _check_image_exist(client, image):
59             LOG.info('Grafana image not exist, start pulling')
60             client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
61
62         LOG.info('Createing grafana container')
63         _create_grafana_container(client)
64         LOG.info('Grafana container is created')
65
66         time.sleep(5)
67
68         LOG.info('Creating data source for grafana')
69         _create_data_source()
70
71         LOG.info('Creating dashboard for grafana')
72         _create_dashboard()
73
74         _update_task_status(task_id)
75         LOG.info('Finished')
76     except Exception as e:
77         _update_task_error(task_id, str(e))
78         LOG.exception('Create grafana failed')
79
80
81 def _create_dashboard():
82     url = 'http://admin:admin@%s:3000/api/dashboards/db' % consts.GRAFANA_IP
83     path = os.path.join(consts.REPOS_DIR, 'dashboard', '*dashboard.json')
84
85     for i in sorted(glob.iglob(path)):
86         with open(i) as f:
87             data = jsonutils.load(f)
88         try:
89             HttpClient().post(url, data)
90         except Exception:
91             LOG.exception('Create dashboard %s failed', i)
92             raise
93
94
95 def _create_data_source():
96     url = 'http://admin:admin@%s:3000/api/datasources' % consts.GRAFANA_IP
97     data = {
98         "name": "yardstick",
99         "type": "influxdb",
100         "access": "proxy",
101         "url": "http://%s:8086" % consts.INFLUXDB_IP,
102         "password": "root",
103         "user": "root",
104         "database": "yardstick",
105         "basicAuth": True,
106         "basicAuthUser": "admin",
107         "basicAuthPassword": "admin",
108         "isDefault": False,
109     }
110     try:
111         HttpClient().post(url, data)
112     except Exception:
113         LOG.exception('Create datasources failed')
114         raise
115
116
117 def _create_grafana_container(client):
118     ports = [3000]
119     port_bindings = {k: k for k in ports}
120     restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
121     host_config = client.create_host_config(port_bindings=port_bindings,
122                                             restart_policy=restart_policy)
123
124     LOG.info('Creating container')
125     container = client.create_container(image='%s:%s' % (consts.GRAFANA_IMAGE,
126                                                          consts.GRAFANA_TAG),
127                                         ports=ports,
128                                         detach=True,
129                                         tty=True,
130                                         host_config=host_config)
131     LOG.info('Starting container')
132     client.start(container)
133
134
135 def _check_image_exist(client, t):
136     return any(t in a['RepoTags'][0] for a in client.images() if a['RepoTags'])
137
138
139 def create_influxdb(args):
140     task_id = str(uuid.uuid4())
141
142     thread = threading.Thread(target=_create_influxdb, args=(task_id,))
143     thread.start()
144
145     return result_handler(consts.API_SUCCESS, {'task_id': task_id})
146
147
148 def _create_influxdb(task_id):
149     _create_task(task_id)
150
151     client = Client(base_url=consts.DOCKER_URL)
152
153     try:
154         LOG.info('Changing output to influxdb')
155         _change_output_to_influxdb()
156
157         LOG.info('Checking if influxdb image exist')
158         if not _check_image_exist(client, '%s:%s' % (consts.INFLUXDB_IMAGE,
159                                                      consts.INFLUXDB_TAG)):
160             LOG.info('Influxdb image not exist, start pulling')
161             client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG)
162
163         LOG.info('Createing influxdb container')
164         _create_influxdb_container(client)
165         LOG.info('Influxdb container is created')
166
167         time.sleep(5)
168
169         LOG.info('Config influxdb')
170         _config_influxdb()
171
172         _update_task_status(task_id)
173
174         LOG.info('Finished')
175     except Exception as e:
176         _update_task_error(task_id, str(e))
177         LOG.exception('Creating influxdb failed')
178
179
180 def _create_influxdb_container(client):
181
182     ports = [8083, 8086]
183     port_bindings = {k: k for k in ports}
184     restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
185     host_config = client.create_host_config(port_bindings=port_bindings,
186                                             restart_policy=restart_policy)
187
188     LOG.info('Creating container')
189     container = client.create_container(image='%s:%s' % (consts.INFLUXDB_IMAGE,
190                                                          consts.INFLUXDB_TAG),
191                                         ports=ports,
192                                         detach=True,
193                                         tty=True,
194                                         host_config=host_config)
195     LOG.info('Starting container')
196     client.start(container)
197
198
199 def _config_influxdb():
200     try:
201         client = influx.get_data_db_client()
202         client.create_user(consts.INFLUXDB_USER,
203                            consts.INFLUXDB_PASS,
204                            consts.INFLUXDB_DB_NAME)
205         client.create_database(consts.INFLUXDB_DB_NAME)
206         LOG.info('Success to config influxDB')
207     except Exception:
208         LOG.exception('Config influxdb failed')
209
210
211 def _change_output_to_influxdb():
212     common_utils.makedirs(consts.CONF_DIR)
213
214     parser = configparser.ConfigParser()
215     LOG.info('Reading output sample configuration')
216     parser.read(consts.CONF_SAMPLE_FILE)
217
218     LOG.info('Set dispatcher to influxdb')
219     parser.set('DEFAULT', 'dispatcher', 'influxdb')
220     parser.set('dispatcher_influxdb', 'target',
221                'http://%s:8086' % consts.INFLUXDB_IP)
222
223     LOG.info('Writing to %s', consts.CONF_FILE)
224     with open(consts.CONF_FILE, 'w') as f:
225         parser.write(f)
226
227
228 def prepare_env(args):
229     task_id = str(uuid.uuid4())
230
231     thread = threading.Thread(target=_prepare_env_daemon, args=(task_id,))
232     thread.start()
233
234     return result_handler(consts.API_SUCCESS, {'task_id': task_id})
235
236
237 def _already_source_openrc():
238     """Check if openrc is sourced already"""
239     return all(os.environ.get(k) for k in ['OS_AUTH_URL', 'OS_USERNAME',
240                                            'OS_PASSWORD', 'EXTERNAL_NETWORK'])
241
242
243 def _prepare_env_daemon(task_id):
244     _create_task(task_id)
245
246     try:
247         _create_directories()
248
249         rc_file = consts.OPENRC
250
251         LOG.info('Checkout Openrc Environment variable')
252         if not _already_source_openrc():
253             LOG.info('Openrc variable not found in Environment')
254             if not os.path.exists(rc_file):
255                 LOG.info('Openrc file not found')
256                 installer_ip = os.environ.get('INSTALLER_IP', '192.168.200.2')
257                 installer_type = os.environ.get('INSTALLER_TYPE', 'compass')
258                 LOG.info('Getting openrc file from %s', installer_type)
259                 _get_remote_rc_file(rc_file, installer_ip, installer_type)
260                 LOG.info('Source openrc file')
261                 _source_file(rc_file)
262                 LOG.info('Appending external network')
263                 _append_external_network(rc_file)
264             LOG.info('Openrc file exist, source openrc file')
265             _source_file(rc_file)
266
267         LOG.info('Cleaning images')
268         _clean_images()
269
270         LOG.info('Loading images')
271         _load_images()
272
273         _update_task_status(task_id)
274         LOG.info('Finished')
275     except Exception as e:
276         _update_task_error(task_id, str(e))
277         LOG.exception('Prepare env failed')
278
279
280 def _create_directories():
281     common_utils.makedirs(consts.CONF_DIR)
282
283
284 def _source_file(rc_file):
285     common_utils.source_env(rc_file)
286
287
288 def _get_remote_rc_file(rc_file, installer_ip, installer_type):
289
290     os_fetch_script = os.path.join(consts.RELENG_DIR, consts.FETCH_SCRIPT)
291
292     try:
293         cmd = [os_fetch_script, '-d', rc_file, '-i', installer_type,
294                '-a', installer_ip]
295         p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
296         p.communicate()
297
298         if p.returncode != 0:
299             LOG.error('Failed to fetch credentials from installer')
300     except OSError as e:
301         if e.errno != errno.EEXIST:
302             raise
303
304
305 def _append_external_network(rc_file):
306     neutron_client = openstack_utils.get_neutron_client()
307     networks = neutron_client.list_networks()['networks']
308     try:
309         ext_network = next(n['name'] for n in networks if n['router:external'])
310     except StopIteration:
311         LOG.warning("Can't find external network")
312     else:
313         cmd = 'export EXTERNAL_NETWORK=%s' % ext_network
314         try:
315             with open(rc_file, 'a') as f:
316                 f.write(cmd + '\n')
317         except OSError as e:
318             if e.errno != errno.EEXIST:
319                 raise
320
321
322 def _clean_images():
323     cmd = [consts.CLEAN_IMAGES_SCRIPT]
324     p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
325     output = p.communicate()[0]
326     LOG.debug(output)
327
328
329 def _load_images():
330     cmd = [consts.LOAD_IMAGES_SCRIPT]
331     p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
332     output = p.communicate()[0]
333     LOG.debug(output)
334
335
336 def _create_task(task_id):
337     async_handler.insert({'status': 0, 'task_id': task_id})
338
339
340 def _update_task_status(task_id):
341     async_handler.update_attr(task_id, {'status': 1})
342
343
344 def _update_task_error(task_id, error):
345     async_handler.update_attr(task_id, {'status': 2, 'error': error})
346
347
348 def update_openrc(args):
349     try:
350         openrc_vars = args['openrc']
351     except KeyError:
352         return result_handler(consts.API_ERROR, 'openrc must be provided')
353     else:
354         if not isinstance(openrc_vars, collections.Mapping):
355             return result_handler(consts.API_ERROR, 'args should be a dict')
356
357     lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
358     LOG.debug('Writing: %s', ''.join(lines))
359
360     LOG.info('Writing openrc: Writing')
361     common_utils.makedirs(consts.CONF_DIR)
362
363     with open(consts.OPENRC, 'w') as f:
364         f.writelines(lines)
365     LOG.info('Writing openrc: Done')
366
367     LOG.info('Source openrc: Sourcing')
368     try:
369         _source_file(consts.OPENRC)
370     except Exception as e:
371         LOG.exception('Failed to source openrc')
372         return result_handler(consts.API_ERROR, str(e))
373     LOG.info('Source openrc: Done')
374
375     return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars})
376
377
378 def upload_pod_file(args):
379     try:
380         pod_file = args['file']
381     except KeyError:
382         return result_handler(consts.API_ERROR, 'file must be provided')
383
384     LOG.info('Checking file')
385     data = yaml.load(pod_file.read())
386     if not isinstance(data, collections.Mapping):
387         return result_handler(consts.API_ERROR, 'invalid yaml file')
388
389     LOG.info('Writing file')
390     with open(consts.POD_FILE, 'w') as f:
391         yaml.dump(data, f, default_flow_style=False)
392     LOG.info('Writing finished')
393
394     return result_handler(consts.API_SUCCESS, {'pod_info': data})
395
396
397 def update_pod_file(args):
398     try:
399         pod_dic = args['pod']
400     except KeyError:
401         return result_handler(consts.API_ERROR, 'pod must be provided')
402     else:
403         if not isinstance(pod_dic, collections.Mapping):
404             return result_handler(consts.API_ERROR, 'pod should be a dict')
405
406     LOG.info('Writing file')
407     with open(consts.POD_FILE, 'w') as f:
408         yaml.dump(pod_dic, f, default_flow_style=False)
409     LOG.info('Writing finished')
410
411     return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic})
412
413
414 def update_hosts(hosts_ip):
415     if not isinstance(hosts_ip, dict):
416         return result_handler(consts.API_ERROR, 'Error, args should be a dict')
417     LOG.info('Writing hosts: Writing')
418     hosts_list = ['\n{} {}'.format(ip, host_name)
419                   for host_name, ip in hosts_ip.items()]
420     LOG.debug('Writing: %s', hosts_list)
421     with open(consts.ETC_HOSTS, 'a') as f:
422         f.writelines(hosts_list)
423     LOG.info('Writing hosts: Done')
424     return result_handler(consts.API_SUCCESS, 'success')