Open storperf testcase to huawei-pod2
[yardstick.git] / api / resources / env_action.py
1 ##############################################################################
2 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
3 #
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9 from __future__ import absolute_import
10
11 import errno
12 import logging
13 import os
14 import subprocess
15 import threading
16 import time
17 import uuid
18 import glob
19 import yaml
20 import collections
21 from subprocess import PIPE
22
23 from six.moves import configparser
24 from oslo_serialization import jsonutils
25 from docker import Client
26
27 from api.database.v1.handlers import AsyncTaskHandler
28 from api.utils import influx
29 from api.utils.common import result_handler
30 from yardstick.common import constants as consts
31 from yardstick.common import utils as common_utils
32 from yardstick.common import openstack_utils
33 from yardstick.common.httpClient import HttpClient
34
35
36 LOG = logging.getLogger(__name__)
37 LOG.setLevel(logging.DEBUG)
38
39 async_handler = AsyncTaskHandler()
40
41
42 def create_grafana(args):
43     task_id = str(uuid.uuid4())
44
45     thread = threading.Thread(target=_create_grafana, args=(task_id,))
46     thread.start()
47
48     return result_handler(consts.API_SUCCESS, {'task_id': task_id})
49
50
51 def _create_grafana(task_id):
52     _create_task(task_id)
53
54     client = Client(base_url=consts.DOCKER_URL)
55
56     try:
57         LOG.info('Checking if grafana image exist')
58         image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
59         if not _check_image_exist(client, image):
60             LOG.info('Grafana image not exist, start pulling')
61             client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
62
63         LOG.info('Createing grafana container')
64         _create_grafana_container(client)
65         LOG.info('Grafana container is created')
66
67         time.sleep(5)
68
69         LOG.info('Creating data source for grafana')
70         _create_data_source()
71
72         LOG.info('Creating dashboard for grafana')
73         _create_dashboard()
74
75         _update_task_status(task_id)
76         LOG.info('Finished')
77     except Exception as e:
78         _update_task_error(task_id, str(e))
79         LOG.exception('Create grafana failed')
80
81
82 def _create_dashboard():
83     url = 'http://admin:admin@%s:3000/api/dashboards/db' % consts.GRAFANA_IP
84     path = os.path.join(consts.REPOS_DIR, 'dashboard', '*dashboard.json')
85
86     for i in sorted(glob.iglob(path)):
87         with open(i) as f:
88             data = jsonutils.load(f)
89         try:
90             HttpClient().post(url, data)
91         except Exception:
92             LOG.exception('Create dashboard %s failed', i)
93             raise
94
95
96 def _create_data_source():
97     url = 'http://admin:admin@%s:3000/api/datasources' % consts.GRAFANA_IP
98     data = {
99         "name": "yardstick",
100         "type": "influxdb",
101         "access": "proxy",
102         "url": "http://%s:8086" % consts.INFLUXDB_IP,
103         "password": "root",
104         "user": "root",
105         "database": "yardstick",
106         "basicAuth": True,
107         "basicAuthUser": "admin",
108         "basicAuthPassword": "admin",
109         "isDefault": False,
110     }
111     try:
112         HttpClient().post(url, data)
113     except Exception:
114         LOG.exception('Create datasources failed')
115         raise
116
117
118 def _create_grafana_container(client):
119     ports = [3000]
120     port_bindings = {k: k for k in ports}
121     restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
122     host_config = client.create_host_config(port_bindings=port_bindings,
123                                             restart_policy=restart_policy)
124
125     LOG.info('Creating container')
126     container = client.create_container(image='%s:%s' % (consts.GRAFANA_IMAGE,
127                                                          consts.GRAFANA_TAG),
128                                         ports=ports,
129                                         detach=True,
130                                         tty=True,
131                                         host_config=host_config)
132     LOG.info('Starting container')
133     client.start(container)
134
135
136 def _check_image_exist(client, t):
137     return any(t in a['RepoTags'][0] for a in client.images() if a['RepoTags'])
138
139
140 def create_influxdb(args):
141     task_id = str(uuid.uuid4())
142
143     thread = threading.Thread(target=_create_influxdb, args=(task_id,))
144     thread.start()
145
146     return result_handler(consts.API_SUCCESS, {'task_id': task_id})
147
148
149 def _create_influxdb(task_id):
150     _create_task(task_id)
151
152     client = Client(base_url=consts.DOCKER_URL)
153
154     try:
155         LOG.info('Changing output to influxdb')
156         _change_output_to_influxdb()
157
158         LOG.info('Checking if influxdb image exist')
159         if not _check_image_exist(client, '%s:%s' % (consts.INFLUXDB_IMAGE,
160                                                      consts.INFLUXDB_TAG)):
161             LOG.info('Influxdb image not exist, start pulling')
162             client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG)
163
164         LOG.info('Createing influxdb container')
165         _create_influxdb_container(client)
166         LOG.info('Influxdb container is created')
167
168         time.sleep(5)
169
170         LOG.info('Config influxdb')
171         _config_influxdb()
172
173         _update_task_status(task_id)
174
175         LOG.info('Finished')
176     except Exception as e:
177         _update_task_error(task_id, str(e))
178         LOG.exception('Creating influxdb failed')
179
180
181 def _create_influxdb_container(client):
182
183     ports = [8083, 8086]
184     port_bindings = {k: k for k in ports}
185     restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
186     host_config = client.create_host_config(port_bindings=port_bindings,
187                                             restart_policy=restart_policy)
188
189     LOG.info('Creating container')
190     container = client.create_container(image='%s:%s' % (consts.INFLUXDB_IMAGE,
191                                                          consts.INFLUXDB_TAG),
192                                         ports=ports,
193                                         detach=True,
194                                         tty=True,
195                                         host_config=host_config)
196     LOG.info('Starting container')
197     client.start(container)
198
199
200 def _config_influxdb():
201     try:
202         client = influx.get_data_db_client()
203         client.create_user(consts.INFLUXDB_USER,
204                            consts.INFLUXDB_PASS,
205                            consts.INFLUXDB_DB_NAME)
206         client.create_database(consts.INFLUXDB_DB_NAME)
207         LOG.info('Success to config influxDB')
208     except Exception:
209         LOG.exception('Config influxdb failed')
210
211
212 def _change_output_to_influxdb():
213     common_utils.makedirs(consts.CONF_DIR)
214
215     parser = configparser.ConfigParser()
216     LOG.info('Reading output sample configuration')
217     parser.read(consts.CONF_SAMPLE_FILE)
218
219     LOG.info('Set dispatcher to influxdb')
220     parser.set('DEFAULT', 'dispatcher', 'influxdb')
221     parser.set('dispatcher_influxdb', 'target',
222                'http://%s:8086' % consts.INFLUXDB_IP)
223
224     LOG.info('Writing to %s', consts.CONF_FILE)
225     with open(consts.CONF_FILE, 'w') as f:
226         parser.write(f)
227
228
229 def prepare_env(args):
230     task_id = str(uuid.uuid4())
231
232     thread = threading.Thread(target=_prepare_env_daemon, args=(task_id,))
233     thread.start()
234
235     return result_handler(consts.API_SUCCESS, {'task_id': task_id})
236
237
238 def _already_source_openrc():
239     """Check if openrc is sourced already"""
240     return all(os.environ.get(k) for k in ['OS_AUTH_URL', 'OS_USERNAME',
241                                            'OS_PASSWORD', 'EXTERNAL_NETWORK'])
242
243
244 def _prepare_env_daemon(task_id):
245     _create_task(task_id)
246
247     try:
248         _create_directories()
249
250         rc_file = consts.OPENRC
251
252         LOG.info('Checkout Openrc Environment variable')
253         if not _already_source_openrc():
254             LOG.info('Openrc variable not found in Environment')
255             if not os.path.exists(rc_file):
256                 LOG.info('Openrc file not found')
257                 installer_ip = os.environ.get('INSTALLER_IP', '192.168.200.2')
258                 installer_type = os.environ.get('INSTALLER_TYPE', 'compass')
259                 LOG.info('Getting openrc file from %s', installer_type)
260                 _get_remote_rc_file(rc_file, installer_ip, installer_type)
261                 LOG.info('Source openrc file')
262                 _source_file(rc_file)
263                 LOG.info('Appending external network')
264                 _append_external_network(rc_file)
265             LOG.info('Openrc file exist, source openrc file')
266             _source_file(rc_file)
267
268         LOG.info('Cleaning images')
269         _clean_images()
270
271         LOG.info('Loading images')
272         _load_images()
273
274         _update_task_status(task_id)
275         LOG.info('Finished')
276     except Exception as e:
277         _update_task_error(task_id, str(e))
278         LOG.exception('Prepare env failed')
279
280
281 def _create_directories():
282     common_utils.makedirs(consts.CONF_DIR)
283
284
285 def _source_file(rc_file):
286     common_utils.source_env(rc_file)
287
288
289 def _get_remote_rc_file(rc_file, installer_ip, installer_type):
290
291     os_fetch_script = os.path.join(consts.RELENG_DIR, consts.FETCH_SCRIPT)
292
293     try:
294         cmd = [os_fetch_script, '-d', rc_file, '-i', installer_type,
295                '-a', installer_ip]
296         p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
297         p.communicate()
298
299         if p.returncode != 0:
300             LOG.error('Failed to fetch credentials from installer')
301     except OSError as e:
302         if e.errno != errno.EEXIST:
303             raise
304
305
306 def _append_external_network(rc_file):
307     neutron_client = openstack_utils.get_neutron_client()
308     networks = neutron_client.list_networks()['networks']
309     try:
310         ext_network = next(n['name'] for n in networks if n['router:external'])
311     except StopIteration:
312         LOG.warning("Can't find external network")
313     else:
314         cmd = 'export EXTERNAL_NETWORK=%s' % ext_network
315         try:
316             with open(rc_file, 'a') as f:
317                 f.write(cmd + '\n')
318         except OSError as e:
319             if e.errno != errno.EEXIST:
320                 raise
321
322
323 def _clean_images():
324     cmd = [consts.CLEAN_IMAGES_SCRIPT]
325     p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
326     output = p.communicate()[0]
327     LOG.debug(output)
328
329
330 def _load_images():
331     cmd = [consts.LOAD_IMAGES_SCRIPT]
332     p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
333     output = p.communicate()[0]
334     LOG.debug(output)
335
336
337 def _create_task(task_id):
338     async_handler.insert({'status': 0, 'task_id': task_id})
339
340
341 def _update_task_status(task_id):
342     async_handler.update_attr(task_id, {'status': 1})
343
344
345 def _update_task_error(task_id, error):
346     async_handler.update_attr(task_id, {'status': 2, 'error': error})
347
348
349 def update_openrc(args):
350     try:
351         openrc_vars = args['openrc']
352     except KeyError:
353         return result_handler(consts.API_ERROR, 'openrc must be provided')
354     else:
355         if not isinstance(openrc_vars, collections.Mapping):
356             return result_handler(consts.API_ERROR, 'args should be a dict')
357
358     lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
359     LOG.debug('Writing: %s', ''.join(lines))
360
361     LOG.info('Writing openrc: Writing')
362     common_utils.makedirs(consts.CONF_DIR)
363
364     with open(consts.OPENRC, 'w') as f:
365         f.writelines(lines)
366     LOG.info('Writing openrc: Done')
367
368     LOG.info('Source openrc: Sourcing')
369     try:
370         _source_file(consts.OPENRC)
371     except Exception as e:
372         LOG.exception('Failed to source openrc')
373         return result_handler(consts.API_ERROR, str(e))
374     LOG.info('Source openrc: Done')
375
376     return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars})
377
378
379 def upload_pod_file(args):
380     try:
381         pod_file = args['file']
382     except KeyError:
383         return result_handler(consts.API_ERROR, 'file must be provided')
384
385     LOG.info('Checking file')
386     data = yaml.load(pod_file.read())
387     if not isinstance(data, collections.Mapping):
388         return result_handler(consts.API_ERROR, 'invalid yaml file')
389
390     LOG.info('Writing file')
391     with open(consts.POD_FILE, 'w') as f:
392         yaml.dump(data, f, default_flow_style=False)
393     LOG.info('Writing finished')
394
395     return result_handler(consts.API_SUCCESS, {'pod_info': data})
396
397
398 def update_pod_file(args):
399     try:
400         pod_dic = args['pod']
401     except KeyError:
402         return result_handler(consts.API_ERROR, 'pod must be provided')
403     else:
404         if not isinstance(pod_dic, collections.Mapping):
405             return result_handler(consts.API_ERROR, 'pod should be a dict')
406
407     LOG.info('Writing file')
408     with open(consts.POD_FILE, 'w') as f:
409         yaml.dump(pod_dic, f, default_flow_style=False)
410     LOG.info('Writing finished')
411
412     return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic})
413
414
415 def update_hosts(hosts_ip):
416     if not isinstance(hosts_ip, dict):
417         return result_handler(consts.API_ERROR, 'Error, args should be a dict')
418     LOG.info('Writing hosts: Writing')
419     LOG.debug('Writing: %s', hosts_ip)
420     cmd = ["sudo", "python", "write_hosts.py"]
421     p = subprocess.Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE,
422                          cwd = os.path.join(consts.REPOS_DIR, "api/resources"))
423     _, err = p.communicate(jsonutils.dumps(hosts_ip))
424     if p.returncode != 0 :
425         return result_handler(consts.API_ERROR, err)
426     LOG.info('Writing hosts: Done')
427     return result_handler(consts.API_SUCCESS, 'success')