1 # -*- coding: utf-8 -*-
3 # Licensed under the Apache License, Version 2.0 (the "License"); you may
4 # not use this file except in compliance with the License. You may obtain
5 # a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 # License for the specific language governing permissions and limitations
15 """Classes used by collectd.py"""
23 from opnfv.deployment import factory
24 ID_RSA_PATH = '/root/.ssh/id_rsa'
25 SSH_KEYS_SCRIPT = '/home/opnfv/barometer/baro_utils/get_ssh_keys.sh'
26 DEF_PLUGIN_INTERVAL = 10
27 COLLECTD_CONF = '/etc/collectd.conf'
28 COLLECTD_CONF_DIR = '/etc/collectd/collectd.conf.d'
29 NOTIFICATION_FILE = '/var/log/python-notifications.dump'
30 COLLECTD_NOTIFICATION = '/etc/collectd_notification_dump.py'
31 APEX_IP = subprocess.check_output("echo $INSTALLER_IP", shell=True)
33 APEX_USER_STACK = 'stack'
34 APEX_PKEY = '/root/.ssh/id_rsa'
38 """Node configuration class"""
39 def __init__(self, attrs):
40 self.__null = attrs[0]
42 self.__name = attrs[2]
43 self.__status = attrs[3] if attrs[3] else None
44 self.__taskState = attrs[4]
45 self.__pwrState = attrs[5]
46 self.__ip = re.sub('^[a-z]+=', '', attrs[6])
57 """Get node IP address"""
66 handler = factory.Factory.get_handler('apex',
70 nodes = handler.get_nodes()
74 class ConfigServer(object):
75 """Class to get env configuration"""
76 def __init__(self, host, user, logger, priv_key=None):
80 self.__priv_key = priv_key
82 self.__logger = logger
84 self.__private_key_file = ID_RSA_PATH
85 if not os.path.isfile(self.__private_key_file):
87 "Private key file '{}'".format(self.__private_key_file)
89 raise IOError("Private key file '{}' not found.".format(
90 self.__private_key_file))
92 # get list of available nodes
93 ssh, sftp = self.__open_sftp_session(
94 self.__host, self.__user, self.__passwd)
96 fuel_node_passed = False
98 while (attempt <= 10) and not fuel_node_passed:
99 stdin, stdout, stderr = ssh.exec_command(
100 "source stackrc; nova list")
101 stderr_lines = stderr.readlines()
103 self.__logger.warning(
104 "'fuel node' command failed (try {}):".format(attempt))
105 for line in stderr_lines:
106 self.__logger.debug(line.strip())
108 fuel_node_passed = True
111 "'fuel node' command passed (try {})".format(attempt))
113 if not fuel_node_passed:
115 "'fuel node' command failed. This was the last try.")
117 "'fuel node' command failed. This was the last try.")
118 node_table = stdout.readlines()\
120 # skip table title and parse table values
122 for entry in node_table[3:]:
123 if entry[0] == '+' or entry[0] == '\n':
128 Node([str(x.strip(' \n')) for x in entry.split('|')]))
130 def get_controllers(self):
131 # Get list of controllers
132 print self.__nodes[0]._Node__ip
134 [node for node in self.__nodes if 'controller' in node.get_name()])
136 def get_computes(self):
137 # Get list of computes
139 [node for node in self.__nodes if 'compute' in node.get_name()])
145 def __open_sftp_session(self, host, user, passwd=None):
146 # Connect to given host.
147 """Keyword arguments:
148 host -- host to connect
150 passwd -- password to use
152 Return tuple of SSH and SFTP client instances.
155 ssh = paramiko.SSHClient()
156 ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
158 # try a direct access using password or private key
159 if not passwd and not self.__priv_key:
161 self.__priv_key = paramiko.RSAKey.from_private_key_file(
162 self.__private_key_file)
164 # connect to the server
166 host, username=user, password=passwd, pkey=self.__priv_key)
167 sftp = ssh.open_sftp()
169 # return SFTP client instance
172 def get_plugin_interval(self, compute, plugin):
173 """Find the plugin interval in collectd configuration.
176 compute -- compute node instance
177 plugin -- plug-in name
179 If found, return interval value, otherwise the default value"""
180 default_interval = DEF_PLUGIN_INTERVAL
181 compute_name = compute.get_name()
182 nodes = get_apex_nodes()
184 if compute_name == node.get_dict()['name']:
185 stdout = node.run_cmd(
186 'cat /etc/collectd/collectd.conf.d/{}.conf'.format(plugin))
187 for line in stdout.split('\n'):
188 if 'Interval' in line:
189 # line = line.strip('Interval')
191 return default_interval
193 def get_plugin_config_values(self, compute, plugin, parameter):
194 """Get parameter values from collectd config file.
197 compute -- compute node instance
198 plugin -- plug-in name
199 parameter -- plug-in parameter
201 Return list of found values."""
203 compute_name = compute.get_name()
204 nodes = get_apex_nodes()
206 if compute_name == node.get_dict()['name']:
207 stdout = node.run_cmd(
208 'cat /etc/collectd/collectd.conf.d/{}.conf' .format(plugin))
209 for line in stdout.split('\n'):
210 if 'Interfaces' in line:
211 return line.split(' ', 1)[1]
212 elif 'Bridges' in line:
213 return line.split(' ', 1)[1]
214 elif 'Cores' in line:
215 return line.split(' ', 1)[1]
218 return default_values
220 def execute_command(self, command, host_ip=None, ssh=None):
221 """Execute command on node and return list of lines of standard output.
225 host_ip -- IP of the node
226 ssh -- existing open SSH session to use
228 One of host_ip or ssh must not be None. If both are not None,
229 existing ssh session is used.
231 if host_ip is None and ssh is None:
232 raise ValueError('One of host_ip or ssh must not be None.')
234 ssh, sftp = self.__open_sftp_session(host_ip, 'root', 'opnfvapex')
235 stdin, stdout, stderr = ssh.exec_command(command)
236 return stdout.readlines()
238 def get_ovs_interfaces(self, compute):
239 """Get list of configured OVS interfaces
242 compute -- compute node instance
244 compute_name = compute.get_name()
245 nodes = get_apex_nodes()
247 if compute_name == node.get_dict()['name']:
248 stdout = node.run_cmd('sudo ovs-vsctl list-br')
251 def is_gnocchi_running(self, controller):
252 """Check whether Gnocchi is running on controller.
255 controller -- controller node instance
257 Return boolean value whether Gnocchi is running.
259 gnocchi_present = False
260 controller_name = controller.get_name()
261 nodes = get_apex_nodes()
263 if controller_name == node.get_dict()['name']:
265 '/home/opnfv/functest/conf/openstack.creds',
267 stdout = node.run_cmd(
268 "source overcloudrc.v3;"
269 + "openstack catalog list | grep gnocchi")
270 if 'gnocchi' in stdout:
271 gnocchi_present = True
272 return gnocchi_present
274 def is_aodh_running(self, controller):
275 """Check whether aodh service is running on controller
278 controller_name = controller.get_name()
279 nodes = get_apex_nodes()
281 if controller_name == node.get_dict()['name']:
283 '/home/opnfv/functest/conf/openstack.creds',
285 stdout = node.run_cmd(
286 "source overcloudrc.v3;"
287 + "openstack catalog list | grep aodh")
292 def is_mcelog_installed(self, compute, package):
293 """Check whether package exists on compute node.
296 compute -- compute node instance
297 package -- Linux package to search for
299 Return boolean value whether package is installed.
301 compute_name = compute.get_name()
302 nodes = get_apex_nodes()
304 if compute_name == node.get_dict()['name']:
305 stdout = node.run_cmd(
306 'yum list installed | grep mcelog')
307 if 'mcelog' in stdout:
312 def is_libpqos_on_node(self, compute):
313 """Check whether libpqos is present on compute node"""
315 compute_name = compute.get_name()
316 nodes = get_apex_nodes()
318 if compute_name == node.get_dict()['name']:
319 stdout = node.run_cmd('ls /usr/local/lib/ | grep libpqos')
320 if 'libpqos' in stdout:
324 def check_aodh_plugin_included(self, compute):
325 """Check if aodh plugin is included in collectd.conf file.
326 If not, try to enable it.
329 compute -- compute node instance
331 Return boolean value whether AODH plugin is included
332 or it's enabling was successful.
334 compute_name = compute.get_name()
335 nodes = get_apex_nodes()
337 if compute_name == node.get_dict()['name']:
338 aodh_conf = node.run_cmd('ls /etc/collectd/collectd.conf.d')
339 if 'aodh.conf' not in aodh_conf:
341 "AODH Plugin not included in compute node")
345 "AODH plugin present in compute node {}" .format(
350 def check_gnocchi_plugin_included(self, compute):
351 """Check if gnocchi plugin is included in collectd.conf file.
352 If not, try to enable it.
355 compute -- compute node instance
357 Return boolean value whether gnocchi plugin is included
358 or it's enabling was successful.
360 compute_name = compute.get_name()
361 nodes = get_apex_nodes()
363 if compute_name == node.get_dict()['name']:
364 gnocchi_conf = node.run_cmd('ls /etc/collectd/collectd.conf.d')
365 if 'collectd-ceilometer-plugin.conf' not in gnocchi_conf:
366 self.__logger.info("Gnocchi Plugin not included")
370 "Gnochi plugin available in compute node {}" .format(
376 self, compute, plugins, error_plugins, create_backup=True):
377 """Enable plugins on compute node
380 compute -- compute node instance
381 plugins -- list of plugins to be enabled
383 Return boolean value indicating whether function was successful.
385 plugins = sorted(plugins)
386 compute_name = compute.get_name()
387 nodes = get_apex_nodes()
389 if compute_name == node.get_dict()['name']:
391 '/usr/local/lib/python2.7/dist-packages/baro_tests/'
392 + 'csv.conf', 'csv.conf')
395 + '/etc/collectd/collectd.conf.d/csv.conf')
398 def restart_collectd(self, compute):
399 """Restart collectd on compute node.
402 compute -- compute node instance
404 Retrun tuple with boolean indicating success and list of warnings
405 received during collectd start.
407 compute_name = compute.get_name()
408 nodes = get_apex_nodes()
410 def get_collectd_processes(compute_node):
411 """Get number of running collectd processes.
414 ssh_session -- instance of SSH session in which to check
417 stdout = compute_node.run_cmd("pgrep collectd")
421 if compute_name == node.get_dict()['name']:
422 # node.run_cmd('su; "opnfvapex"')
423 self.__logger.info('Stopping collectd service...')
424 node.run_cmd('sudo systemctl stop collectd')
426 if get_collectd_processes(node):
427 self.__logger.error('Collectd is still running...')
429 self.__logger.info('Starting collectd service...')
430 stdout = node.run_cmd('sudo systemctl start collectd')
433 output.strip() for output in stdout if 'WARN: ' in output]
434 if get_collectd_processes(node) == 0:
435 self.__logger.error('Collectd is still not running...')
436 return False, warning
439 def test_plugins_with_aodh(
440 self, compute, plugin_interval, logger,
446 nodes = get_apex_nodes()
448 if node.is_controller():
449 self.__logger.info('Getting AODH Alarm list on {}' .format(
450 (node.get_dict()['name'])))
452 '/home/opnfv/functest/conf/openstack.creds',
454 stdout = node.run_cmd(
455 "source overcloudrc.v3;"
456 + "aodh alarm list | grep {0} | grep {1}"
457 .format(criteria_list, compute))
458 for line in stdout.splitlines():
459 line = line.replace('|', "")
460 metric_id = line.split()[0]
461 stdout = node.run_cmd(
462 'source overcloudrc.v3; aodh alarm show {}' .format(
464 for line in stdout.splitlines()[3: -1]:
465 line = line.replace('|', "")
466 if line.split()[0] == 'timestamp':
467 timestamps1 = line.split()[1]
468 self.__logger.info("timestamp_before = {}" .format(
473 stdout = node.run_cmd(
474 "source overcloudrc.v3; aodh alarm show {}" .format(
476 for line in stdout.splitlines()[3:-1]:
477 line = line.replace('|', "")
478 if line.split()[0] == 'timestamp':
479 timestamps2 = line.split()[1]
480 self.__logger.info("timestamp_after = {}" .format(
484 if timestamps1 == timestamps2:
486 "Data not updated after interval of 12 seconds")
489 self.__logger.info("PASS")
492 def test_plugins_with_gnocchi(
493 self, compute, plugin_interval, logger,
499 nodes = get_apex_nodes()
501 if node.is_controller():
502 self.__logger.info('Getting gnocchi metric list on {}' .format(
503 (node.get_dict()['name'])))
505 '/home/opnfv/functest/conf/openstack.creds',
507 stdout = node.run_cmd(
508 "source overcloudrc.v3;"
509 + "gnocchi metric list | grep {0} | grep {1}"
510 .format(criteria_list, compute))
511 for line in stdout.splitlines():
512 line = line.replace('|', "")
513 metric_id = line.split()[0]
514 stdout = node.run_cmd(
515 'source overcloudrc.v3;gnocchi measures show {}'.format(
517 for line in stdout.splitlines()[3: -1]:
521 timestamps1 = line.replace('|', "")
522 timestamps1 = timestamps1.split()[0]
524 stdout = node.run_cmd(
525 "source overcloudrc.v3;gnocchi measures show {}".format(
527 for line in stdout.splitlines()[3:-1]:
531 timestamps2 = line.replace('|', "")
532 timestamps2 = timestamps2.split()[0]
533 if timestamps1 == timestamps2:
534 self.__logger.info("Data not updated after 12 seconds")
537 self.__logger.info("PASS")