Adding improvements to SNMP testcases
[barometer.git] / baro_tests / config_server.py
1 # -*- coding: utf-8 -*-
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License"); you may
4 # not use this file except in compliance with the License. You may obtain
5 # a copy of the License at
6 #
7 #      http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 # License for the specific language governing permissions and limitations
13 # under the License.
14
15 """Classes used by collectd.py"""
16
17 import paramiko
18 import time
19 import os.path
20 import os
21 import re
22 import subprocess
23 from opnfv.deployment import factory
24 ID_RSA_PATH = '/root/.ssh/id_rsa'
25 SSH_KEYS_SCRIPT = '/home/opnfv/barometer/baro_utils/get_ssh_keys.sh'
26 DEF_PLUGIN_INTERVAL = 10
27 COLLECTD_CONF = '/etc/collectd.conf'
28 COLLECTD_CONF_DIR = '/etc/collectd/collectd.conf.d'
29 NOTIFICATION_FILE = '/var/log/python-notifications.dump'
30 COLLECTD_NOTIFICATION = '/etc/collectd_notification_dump.py'
31 APEX_IP = subprocess.check_output("echo $INSTALLER_IP", shell=True)
32 APEX_USER = 'root'
33 APEX_USER_STACK = 'stack'
34 APEX_PKEY = '/root/.ssh/id_rsa'
35
36
37 class Node(object):
38     """Node configuration class"""
39     def __init__(self, attrs):
40         self.__null = attrs[0]
41         self.__id = attrs[1]
42         self.__name = attrs[2]
43         self.__status = attrs[3] if attrs[3] else None
44         self.__taskState = attrs[4]
45         self.__pwrState = attrs[5]
46         self.__ip = re.sub('^[a-z]+=', '', attrs[6])
47
48     def get_name(self):
49         """Get node name"""
50         return self.__name
51
52     def get_id(self):
53         """Get node ID"""
54         return self.__id
55
56     def get_ip(self):
57         """Get node IP address"""
58         return self.__ip
59
60     def get_roles(self):
61         """Get node role"""
62         return self.__roles
63
64
65 def get_apex_nodes():
66     handler = factory.Factory.get_handler('apex',
67                                           APEX_IP,
68                                           APEX_USER_STACK,
69                                           APEX_PKEY)
70     nodes = handler.get_nodes()
71     return nodes
72
73
74 class ConfigServer(object):
75     """Class to get env configuration"""
76     def __init__(self, host, user, logger, priv_key=None):
77         self.__host = host
78         self.__user = user
79         self.__passwd = None
80         self.__priv_key = priv_key
81         self.__nodes = list()
82         self.__logger = logger
83
84         self.__private_key_file = ID_RSA_PATH
85         if not os.path.isfile(self.__private_key_file):
86             self.__logger.error(
87                 "Private key file '{}'".format(self.__private_key_file)
88                 + " not found.")
89             raise IOError("Private key file '{}' not found.".format(
90                 self.__private_key_file))
91
92         # get list of available nodes
93         ssh, sftp = self.__open_sftp_session(
94             self.__host, self.__user, self.__passwd)
95         attempt = 1
96         fuel_node_passed = False
97
98         while (attempt <= 10) and not fuel_node_passed:
99             stdin, stdout, stderr = ssh.exec_command(
100                 "source stackrc; nova list")
101             stderr_lines = stderr.readlines()
102             if stderr_lines:
103                 self.__logger.warning(
104                     "'Apex node' command failed (try {}):".format(attempt))
105                 for line in stderr_lines:
106                     self.__logger.debug(line.strip())
107             else:
108                 fuel_node_passed = True
109                 if attempt > 1:
110                     self.__logger.info(
111                         "'Apex node' command passed (try {})".format(attempt))
112             attempt += 1
113         if not fuel_node_passed:
114             self.__logger.error(
115                 "'Apex node' command failed. This was the last try.")
116             raise OSError(
117                 "'Apex node' command failed. This was the last try.")
118         node_table = stdout.readlines()\
119
120         # skip table title and parse table values
121
122         for entry in node_table[3:]:
123             if entry[0] == '+' or entry[0] == '\n':
124                 print entry
125                 pass
126             else:
127                 self.__nodes.append(
128                     Node([str(x.strip(' \n')) for x in entry.split('|')]))
129
130     def get_controllers(self):
131         # Get list of controllers
132         print self.__nodes[0]._Node__ip
133         return (
134             [node for node in self.__nodes if 'controller' in node.get_name()])
135
136     def get_computes(self):
137         # Get list of computes
138         return (
139             [node for node in self.__nodes if 'compute' in node.get_name()])
140
141     def get_nodes(self):
142         # Get list of nodes
143         return self.__nodes
144
145     def __open_sftp_session(self, host, user, passwd=None):
146         # Connect to given host.
147         """Keyword arguments:
148         host -- host to connect
149         user -- user to use
150         passwd -- password to use
151
152         Return tuple of SSH and SFTP client instances.
153         """
154         # create SSH client
155         ssh = paramiko.SSHClient()
156         ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
157
158         # try a direct access using password or private key
159         if not passwd and not self.__priv_key:
160             # get private key
161             self.__priv_key = paramiko.RSAKey.from_private_key_file(
162                 self.__private_key_file)
163
164         # connect to the server
165         ssh.connect(
166             host, username=user, password=passwd, pkey=self.__priv_key)
167         sftp = ssh.open_sftp()
168
169         # return SFTP client instance
170         return ssh, sftp
171
172     def get_plugin_interval(self, compute, plugin):
173         """Find the plugin interval in collectd configuration.
174
175         Keyword arguments:
176         compute -- compute node instance
177         plugin -- plug-in name
178
179         If found, return interval value, otherwise the default value"""
180         default_interval = DEF_PLUGIN_INTERVAL
181         compute_name = compute.get_name()
182         nodes = get_apex_nodes()
183         for node in nodes:
184             if compute_name == node.get_dict()['name']:
185                 stdout = node.run_cmd(
186                     'cat /etc/collectd/collectd.conf.d/{}.conf'.format(plugin))
187                 for line in stdout.split('\n'):
188                     if 'Interval' in line:
189                         return 1
190         return default_interval
191
192     def get_plugin_config_values(self, compute, plugin, parameter):
193         """Get parameter values from collectd config file.
194
195         Keyword arguments:
196         compute -- compute node instance
197         plugin -- plug-in name
198         parameter -- plug-in parameter
199
200         Return list of found values."""
201         default_values = []
202         compute_name = compute.get_name()
203         nodes = get_apex_nodes()
204         for node in nodes:
205             if compute_name == node.get_dict()['name']:
206                 stdout = node.run_cmd(
207                     'cat /etc/collectd/collectd.conf.d/{}.conf' .format(plugin))
208                 for line in stdout.split('\n'):
209                     if 'Interfaces' in line:
210                         return line.split(' ', 1)[1]
211                     elif 'Bridges' in line:
212                         return line.split(' ', 1)[1]
213                     elif 'Cores' in line:
214                         return line.split(' ', 1)[1]
215                     else:
216                         pass
217         return default_values
218
219     def execute_command(self, command, host_ip=None, ssh=None):
220         """Execute command on node and return list of lines of standard output.
221
222         Keyword arguments:
223         command -- command
224         host_ip -- IP of the node
225         ssh -- existing open SSH session to use
226
227         One of host_ip or ssh must not be None. If both are not None,
228         existing ssh session is used.
229         """
230         if host_ip is None and ssh is None:
231             raise ValueError('One of host_ip or ssh must not be None.')
232         if ssh is None:
233             ssh, sftp = self.__open_sftp_session(host_ip, 'root', 'opnfvapex')
234         stdin, stdout, stderr = ssh.exec_command(command)
235         return stdout.readlines()
236
237     def get_ovs_interfaces(self, compute):
238         """Get list of configured OVS interfaces
239
240         Keyword arguments:
241         compute -- compute node instance
242         """
243         compute_name = compute.get_name()
244         nodes = get_apex_nodes()
245         for node in nodes:
246             if compute_name == node.get_dict()['name']:
247                 stdout = node.run_cmd('sudo ovs-vsctl list-br')
248         return stdout
249
250     def is_gnocchi_running(self, controller):
251         """Check whether Gnocchi is running on controller.
252
253         Keyword arguments:
254         controller -- controller node instance
255
256         Return boolean value whether Gnocchi is running.
257         """
258         gnocchi_present = False
259         controller_name = controller.get_name()
260         nodes = get_apex_nodes()
261         for node in nodes:
262             if controller_name == node.get_dict()['name']:
263                 node.put_file(
264                     '/home/opnfv/functest/conf/openstack.creds',
265                     'overcloudrc.v3')
266                 stdout = node.run_cmd(
267                     "source overcloudrc.v3;"
268                     + "openstack catalog list | grep gnocchi")
269                 if 'gnocchi' in stdout:
270                     gnocchi_present = True
271         return gnocchi_present
272
273     def is_aodh_running(self, controller):
274         """Check whether aodh service is running on controller
275         """
276         aodh_present = False
277         controller_name = controller.get_name()
278         nodes = get_apex_nodes()
279         for node in nodes:
280             if controller_name == node.get_dict()['name']:
281                 node.put_file(
282                     '/home/opnfv/functest/conf/openstack.creds',
283                     'overcloudrc.v3')
284                 stdout = node.run_cmd(
285                     "source overcloudrc.v3;"
286                     + "openstack catalog list | grep aodh")
287                 if 'aodh' in stdout:
288                     aodh_present = True
289         return aodh_present
290
291     def is_mcelog_installed(self, compute, package):
292         """Check whether package exists on compute node.
293
294         Keyword arguments:
295         compute -- compute node instance
296         package -- Linux package to search for
297
298         Return boolean value whether package is installed.
299         """
300         compute_name = compute.get_name()
301         nodes = get_apex_nodes()
302         for node in nodes:
303             if compute_name == node.get_dict()['name']:
304                 stdout = node.run_cmd(
305                     'yum list installed | grep mcelog')
306                 if 'mcelog' in stdout:
307                     return 1
308                 else:
309                     return 0
310
311     def is_libpqos_on_node(self, compute):
312         """Check whether libpqos is present on compute node"""
313
314         compute_name = compute.get_name()
315         nodes = get_apex_nodes()
316         for node in nodes:
317             if compute_name == node.get_dict()['name']:
318                 stdout = node.run_cmd('ls /usr/local/lib/ | grep libpqos')
319                 if 'libpqos' in stdout:
320                     return True
321         return False
322
323     def check_aodh_plugin_included(self, compute):
324         """Check if aodh plugin is included in collectd.conf file.
325         If not, try to enable it.
326
327         Keyword arguments:
328         compute -- compute node instance
329
330         Return boolean value whether AODH plugin is included
331         or it's enabling was successful.
332         """
333         compute_name = compute.get_name()
334         nodes = get_apex_nodes()
335         for node in nodes:
336             if compute_name == node.get_dict()['name']:
337                 aodh_conf = node.run_cmd('ls /etc/collectd/collectd.conf.d')
338                 if 'aodh.conf' not in aodh_conf:
339                     self.__logger.info(
340                         "AODH Plugin not included in compute node")
341                     return False
342                 else:
343                     self.__logger.info(
344                         "AODH plugin present in compute node {}" .format(
345                             compute_name))
346                     return True
347         return True
348
349     def check_gnocchi_plugin_included(self, compute):
350         """Check if gnocchi plugin is included in collectd.conf file.
351         If not, try to enable it.
352
353         Keyword arguments:
354         compute -- compute node instance
355
356         Return boolean value whether gnocchi plugin is included
357         or it's enabling was successful.
358         """
359         compute_name = compute.get_name()
360         nodes = get_apex_nodes()
361         for node in nodes:
362             if compute_name == node.get_dict()['name']:
363                 gnocchi_conf = node.run_cmd('ls /etc/collectd/collectd.conf.d')
364                 if 'collectd-ceilometer-plugin.conf' not in gnocchi_conf:
365                     self.__logger.info("Gnocchi Plugin not included")
366                     return False
367                 else:
368                     self.__logger.info(
369                         "Gnochi plugin available in compute node {}" .format(
370                             compute_name))
371                     return True
372         return True
373
374     def check_snmp_plugin_included(self, compute):
375         """Check if SNMP plugin is active in compute node.
376         """
377         snmp_mib = '/usr/share/snmp/mibs/Intel-Rdt.txt'
378         snmp_string = 'INTEL-RDT-MIB::intelRdt'
379         compute_name = compute.get_name()
380         nodes = get_apex_nodes()
381         for node in nodes:
382             if compute_name == node.get_dict()['name']:
383                 stdout = node.run_cmd(
384                     'snmpwalk -v2c -m {0} -c public localhost {1}' .format(
385                         snmp_mib, snmp_string))
386                 self.__logger.info("snmp output = {}" .format(stdout))
387                 if 'OID' in stdout:
388                     return False
389                 else:
390                     return True
391
392     def enable_plugins(
393             self, compute, plugins, error_plugins, create_backup=True):
394         """Enable plugins on compute node
395
396         Keyword arguments:
397         compute -- compute node instance
398         plugins -- list of plugins to be enabled
399
400         Return boolean value indicating whether function was successful.
401         """
402         plugins = sorted(plugins)
403         compute_name = compute.get_name()
404         nodes = get_apex_nodes()
405         for node in nodes:
406             if compute_name == node.get_dict()['name']:
407                 node.put_file(
408                     '/usr/local/lib/python2.7/dist-packages/baro_tests/'
409                     + 'csv.conf', 'csv.conf')
410                 node.run_cmd(
411                     'sudo cp csv.conf '
412                     + '/etc/collectd/collectd.conf.d/csv.conf')
413         return True
414
415     def restart_collectd(self, compute):
416         """Restart collectd on compute node.
417
418         Keyword arguments:
419         compute -- compute node instance
420
421         Retrun tuple with boolean indicating success and list of warnings
422         received during collectd start.
423         """
424         compute_name = compute.get_name()
425         nodes = get_apex_nodes()
426
427         def get_collectd_processes(compute_node):
428             """Get number of running collectd processes.
429
430             Keyword arguments:
431             ssh_session -- instance of SSH session in which to check
432                 for processes
433             """
434             stdout = compute_node.run_cmd("pgrep collectd")
435             return len(stdout)
436
437         for node in nodes:
438             if compute_name == node.get_dict()['name']:
439                 # node.run_cmd('su; "opnfvapex"')
440                 self.__logger.info('Stopping collectd service...')
441                 node.run_cmd('sudo systemctl stop collectd')
442                 time.sleep(10)
443                 if get_collectd_processes(node):
444                     self.__logger.error('Collectd is still running...')
445                     return False, []
446                 self.__logger.info('Starting collectd service...')
447                 stdout = node.run_cmd('sudo systemctl start collectd')
448                 time.sleep(10)
449                 warning = [
450                     output.strip() for output in stdout if 'WARN: ' in output]
451                 if get_collectd_processes(node) == 0:
452                     self.__logger.error('Collectd is still not running...')
453                     return False, warning
454         return True, warning
455
456     def test_plugins_with_aodh(
457             self, compute, plugin_interval, logger,
458             criteria_list=[]):
459
460         metric_id = {}
461         timestamps1 = {}
462         timestamps2 = {}
463         nodes = get_apex_nodes()
464         for node in nodes:
465             if node.is_controller():
466                 self.__logger.info('Getting AODH Alarm list on {}' .format(
467                     (node.get_dict()['name'])))
468                 node.put_file(
469                     '/home/opnfv/functest/conf/openstack.creds',
470                     'overcloudrc.v3')
471                 stdout = node.run_cmd(
472                     "source overcloudrc.v3;"
473                     + "aodh alarm list | grep {0} | grep {1}"
474                     .format(criteria_list, compute))
475                 for line in stdout.splitlines():
476                     line = line.replace('|', "")
477                     metric_id = line.split()[0]
478                     stdout = node.run_cmd(
479                         'source overcloudrc.v3; aodh alarm show {}' .format(
480                             metric_id))
481                     for line in stdout.splitlines()[3: -1]:
482                         line = line.replace('|', "")
483                         if line.split()[0] == 'timestamp':
484                             timestamps1 = line.split()[1]
485                         else:
486                             pass
487                     time.sleep(12)
488                     stdout = node.run_cmd(
489                         "source overcloudrc.v3; aodh alarm show {}" .format(
490                             metric_id))
491                     for line in stdout.splitlines()[3:-1]:
492                         line = line.replace('|', "")
493                         if line.split()[0] == 'timestamp':
494                             timestamps2 = line.split()[1]
495                         else:
496                             pass
497                     if timestamps1 == timestamps2:
498                         self.__logger.info(
499                             "Data not updated after interval of 12 seconds")
500                         return False
501                     else:
502                         self.__logger.info("PASS")
503                         return True
504
505     def test_plugins_with_gnocchi(
506             self, compute, plugin_interval, logger,
507             criteria_list=[]):
508
509         metric_id = {}
510         timestamps1 = {}
511         timestamps2 = {}
512         nodes = get_apex_nodes()
513         for node in nodes:
514             if node.is_controller():
515                 self.__logger.info('Getting gnocchi metric list on {}' .format(
516                     (node.get_dict()['name'])))
517                 node.put_file(
518                     '/home/opnfv/functest/conf/openstack.creds',
519                     'overcloudrc.v3')
520                 stdout = node.run_cmd(
521                     "source overcloudrc.v3;"
522                     + "gnocchi metric list | grep {0} | grep {1}"
523                     .format(criteria_list, compute))
524                 for line in stdout.splitlines():
525                     line = line.replace('|', "")
526                     metric_id = line.split()[0]
527                     stdout = node.run_cmd(
528                         'source overcloudrc.v3;gnocchi measures show {}'.format(
529                             metric_id))
530                     for line in stdout.splitlines()[3: -1]:
531                         if line[0] == '+':
532                             pass
533                         else:
534                             timestamps1 = line.replace('|', "")
535                             timestamps1 = timestamps1.split()[0]
536                     time.sleep(10)
537                     stdout = node.run_cmd(
538                         "source overcloudrc.v3;gnocchi measures show {}".format(
539                             metric_id))
540                     for line in stdout.splitlines()[3:-1]:
541                         if line[0] == '+':
542                             pass
543                         else:
544                             timestamps2 = line.replace('|', "")
545                             timestamps2 = timestamps2.split()[0]
546                     if timestamps1 == timestamps2:
547                         self.__logger.info("Data not updated after 12 seconds")
548                         return False
549                     else:
550                         self.__logger.info("PASS")
551                         return True
552
553     def test_plugins_with_snmp(
554             self, compute, plugin_interval, logger, plugin, snmp_mib_files=[],
555             snmp_mib_strings=[], snmp_in_commands=[]):
556
557         if plugin == 'hugepages' or 'intel_rdt' or 'mcelog':
558             nodes = get_apex_nodes()
559             for node in nodes:
560                 if compute == node.get_dict()['name']:
561                     stdout = node.run_cmd(
562                         'snmpwalk -v2c -m {0} -c public localhost {1}' .format(
563                             snmp_mib_files, snmp_mib_strings))
564                     self.__logger.info("{}" .format(stdout))
565                     if stdout is None:
566                         self.__logger.info("No output from snmpwalk")
567                         return False
568                     elif 'OID' in stdout:
569                         self.__logger.info("SNMP query failed")
570                         return False
571                     else:
572                         counter1 = stdout.split()[3]
573                     time.sleep(10)
574                     stdout = node.run_cmd(
575                         'snmpwalk -v2c -m {0} -c public localhost {1}' .format(
576                             snmp_mib_files, snmp_mib_strings))
577                     self.__logger.info("{}" .format(stdout))
578                     if stdout is None:
579                         self.__logger.info("No output from snmpwalk")
580                     elif 'OID' in stdout:
581                         self.__logger.info(
582                             "SNMP query failed during second check")
583                         self.__logger.info("waiting for 10 sec")
584                         time.sleep(10)
585                     stdout = node.run_cmd(
586                         'snmpwalk -v2c -m {0} -c public localhost {1}' .format(
587                             snmp_mib_files, snmp_mib_strings))
588                     self.__logger.info("{}" .format(stdout))
589                     if stdout is None:
590                         self.__logger.info("No output from snmpwalk")
591                     elif 'OID' in stdout:
592                         self.__logger.info("SNMP query failed again")
593                         self.__logger.info("Failing this test case")
594                         return False
595                     else:
596                         counter2 = stdout.split()[3]
597
598                     if counter1 == counter2:
599                         return False
600                     else:
601                         return True
602         else:
603             return False