X-Git-Url: https://gerrit.opnfv.org/gerrit/gitweb?a=blobdiff_plain;f=functest%2Fci%2Frun_tests.py;h=b95e1008b42a87401738b96a7bffb8b867e04283;hb=7f3ab61c2a072350f55a22bcd6a995ee18a3ba3e;hp=af45c3211da3b461e11ba46e502808f2f36f1021;hpb=a28e2b40877f022f6cc8bbc3ad9b586ef3dd126c;p=functest.git diff --git a/functest/ci/run_tests.py b/functest/ci/run_tests.py old mode 100755 new mode 100644 index af45c3211..b95e1008b --- a/functest/ci/run_tests.py +++ b/functest/ci/run_tests.py @@ -14,6 +14,7 @@ import importlib import logging import logging.config import os +import pkg_resources import re import sys @@ -201,7 +202,7 @@ class Runner(object): _tiers = tb.TierBuilder( CONST.__getattribute__('INSTALLER_TYPE'), CONST.__getattribute__('DEPLOY_SCENARIO'), - CONST.__getattribute__("functest_testcases_yaml")) + pkg_resources.resource_filename('functest', 'ci/testcases.yaml')) if kwargs['noclean']: self.clean_flag = False @@ -251,25 +252,27 @@ class Runner(object): msg.add_row([env_var, CONST.__getattribute__(env_var)]) logger.info("Deployment description: \n\n%s\n", msg) - msg = prettytable.PrettyTable( - header_style='upper', padding_width=5, - field_names=['test case', 'project', 'tier', 'duration', 'result']) - for test_case in self.executed_test_cases: - result = 'PASS' if(test_case.is_successful( - ) == test_case.EX_OK) else 'FAIL' - msg.add_row([test_case.case_name, test_case.project_name, - _tiers.get_tier_name(test_case.case_name), - test_case.get_duration(), result]) - logger.info("FUNCTEST REPORT: \n\n%s\n", msg) + if len(self.executed_test_cases) > 1: + msg = prettytable.PrettyTable( + header_style='upper', padding_width=5, + field_names=['test case', 'project', 'tier', + 'duration', 'result']) + for test_case in self.executed_test_cases: + result = 'PASS' if(test_case.is_successful( + ) == test_case.EX_OK) else 'FAIL' + msg.add_row([test_case.case_name, test_case.project_name, + _tiers.get_tier_name(test_case.case_name), + test_case.get_duration(), result]) + logger.info("FUNCTEST REPORT: \n\n%s\n", msg) logger.info("Execution exit value: %s" % self.overall_result) return self.overall_result -if __name__ == '__main__': - logging.config.fileConfig( - CONST.__getattribute__('dir_functest_logging_cfg')) +def main(): + logging.config.fileConfig(pkg_resources.resource_filename( + 'functest', 'ci/logging.ini')) parser = RunTestsParser() args = parser.parse_args(sys.argv[1:]) runner = Runner() - sys.exit(runner.main(**args).value) + return runner.main(**args).value