Add deny_skipping parameter 21/73421/9
authorBellengé Maxime <maxime.bellenge@orange.com>
Thu, 30 Jun 2022 09:56:51 +0000 (11:56 +0200)
committerCédric Ollivier <cedric.ollivier@orange.com>
Thu, 7 Jul 2022 13:07:09 +0000 (15:07 +0200)
It takes into account or not skip tests in the global result
It also adds unit tests to cover it.

It should be noted that if follows the deny skipping
model proposed by Functest (tempest).

Change-Id: I15fa7a3946c6e3b2ae190e4f8abf3b9361a391a4
Signed-off-by: Cédric Ollivier <cedric.ollivier@orange.com>
xtesting/core/robotframework.py
xtesting/tests/unit/core/test_robotframework.py

index 775ed1c..2952de6 100644 (file)
@@ -56,6 +56,7 @@ class RobotFramework(testcase.TestCase):
     def __init__(self, **kwargs):
         super().__init__(**kwargs)
         self.xml_file = os.path.join(self.res_dir, 'output.xml')
+        self.deny_skipping = kwargs.get("deny_skipping", False)
 
     def parse_results(self):
         """Parse output.xml and get the details in it."""
@@ -63,9 +64,15 @@ class RobotFramework(testcase.TestCase):
         visitor = ResultVisitor()
         result.visit(visitor)
         try:
-            self.result = 100 * (
-                result.suite.statistics.passed /
-                result.suite.statistics.total)
+            if self.deny_skipping:
+                self.result = 100 * (
+                    result.suite.statistics.passed /
+                    result.suite.statistics.total)
+            else:
+                self.result = 100 * ((
+                    result.suite.statistics.passed +
+                    result.suite.statistics.skipped) /
+                    result.suite.statistics.total)
         except ZeroDivisionError:
             self.__logger.error("No test has been run")
         self.start_time = timestamp_to_secs(result.suite.starttime)
index f36625e..bbd99f5 100644 (file)
@@ -86,25 +86,42 @@ class ParseResultTesting(unittest.TestCase):
                              {'description': config['name'], 'tests': []})
 
     def test_null_passed(self):
-        self._config.update({'statistics.passed': 0,
+        self._config.update({'statistics.skipped': 0,
+                             'statistics.passed': 0,
                              'statistics.total': 20})
         self._test_result(self._config, 0)
 
     def test_no_test(self):
-        self._config.update({'statistics.passed': 20,
+        self._config.update({'statistics.skipped': 0,
+                             'statistics.passed': 20,
                              'statistics.total': 0})
         self._test_result(self._config, 0)
 
     def test_half_success(self):
-        self._config.update({'statistics.passed': 10,
+        self._config.update({'statistics.skipped': 0,
+                             'statistics.passed': 10,
                              'statistics.total': 20})
         self._test_result(self._config, 50)
 
     def test_success(self):
-        self._config.update({'statistics.passed': 20,
+        self._config.update({'statistics.skipped': 0,
+                             'statistics.passed': 20,
                              'statistics.total': 20})
         self._test_result(self._config, 100)
 
+    def test_skip_excluded(self):
+        self.test.deny_skipping = True
+        self._config.update({'statistics.skipped': 1,
+                             'statistics.passed': 4,
+                             'statistics.total': 5})
+        self._test_result(self._config, 80)
+
+    def test_skip_included(self):
+        self._config.update({'statistics.skipped': 1,
+                             'statistics.passed': 4,
+                             'statistics.total': 5})
+        self._test_result(self._config, 100)
+
 
 class GenerateReportTesting(unittest.TestCase):