Adding first patch for behave feature 06/68206/3
authorDeepak Chandella <deepak.chandella@orange.com>
Fri, 5 Jul 2019 17:01:36 +0000 (22:31 +0530)
committerCédric Ollivier <cedric.ollivier@orange.com>
Thu, 11 Jul 2019 09:35:54 +0000 (11:35 +0200)
Change-Id: Ic975c301103b49cdec2bd26746b708388f21e892
Signed-off-by: Deepak Chandella <deepak.chandella@orange.com>
15 files changed:
ansible/site.yml
api/apidoc/xtesting.core.behaveframework.rst [new file with mode: 0644]
api/apidoc/xtesting.core.rst
docker/testcases.yaml
requirements.txt
setup.cfg
tox.ini
upper-constraints.txt
xtesting/ci/testcases.yaml
xtesting/core/behaveframework.py [new file with mode: 0644]
xtesting/core/robotframework.py
xtesting/samples/features/hello.feature [new file with mode: 0644]
xtesting/samples/features/steps/hello.py [new file with mode: 0644]
xtesting/tests/unit/core/test_behaveframework.py [new file with mode: 0644]
xtesting/tests/unit/core/test_robotframework.py

index 4c1f495..934ef83 100644 (file)
@@ -1,4 +1,13 @@
 ---
 - hosts: 127.0.0.1
   roles:
-    - collivier.xtesting
+    - role: collivier.xtesting
+      suites:
+        - container: xtesting
+          tests:
+            - first
+            - second
+            - third
+            - fourth
+            - fifth
+            - sixth
diff --git a/api/apidoc/xtesting.core.behaveframework.rst b/api/apidoc/xtesting.core.behaveframework.rst
new file mode 100644 (file)
index 0000000..1d048c9
--- /dev/null
@@ -0,0 +1,7 @@
+xtesting\.core\.behaveframework module
+======================================
+
+.. automodule:: xtesting.core.behaveframework
+    :members:
+    :undoc-members:
+    :show-inheritance:
index ca38ef3..f02824a 100644 (file)
@@ -11,6 +11,7 @@ Submodules
 
 .. toctree::
 
+   xtesting.core.behaveframework
    xtesting.core.feature
    xtesting.core.robotframework
    xtesting.core.testcase
index 0fc5048..4acb3eb 100644 (file)
@@ -64,3 +64,18 @@ tiers:
                         variable:
                             - 'var01:foo'
                             - 'var02:bar'
+
+            -
+                case_name: sixth
+                project_name: xtesting
+                criteria: 100
+                blocking: false
+                clean_flag: false
+                description: ''
+                run:
+                    name: 'behaveframework'
+                    args:
+                        suites:
+                            - /usr/lib/python3.6/site-packages/xtesting/samples/features/
+                        tags:
+                            - foo
index 6be9227..2344827 100644 (file)
@@ -7,6 +7,7 @@ PyYAML # MIT
 enum34;python_version=='2.7' or python_version=='2.6' or python_version=='3.3' # BSD
 requests!=2.20.0 # Apache-2.0
 robotframework>=3.0
+behave>=1.2.6
 mock # BSD
 PrettyTable<0.8 # BSD
 six # MIT
index 279e46a..76d0b88 100644 (file)
--- a/setup.cfg
+++ b/setup.cfg
@@ -26,6 +26,7 @@ console_scripts =
 xtesting.testcase =
     bashfeature = xtesting.core.feature:BashFeature
     robotframework = xtesting.core.robotframework:RobotFramework
+    behaveframework = xtesting.core.behaveframework:BehaveFramework
     unit = xtesting.core.unit:Suite
     first = xtesting.samples.first:Test
     second = xtesting.samples.second:Test
diff --git a/tox.ini b/tox.ini
index c80bcb4..6dcf9fc 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -32,7 +32,8 @@ commands = flake8
 basepython = python2.7
 whitelist_externals = bash
 commands =
-  pylint --disable=locally-disabled --ignore-imports=y --reports=n xtesting
+  pylint --min-similarity-lines=10 \
+    --disable=locally-disabled --ignore-imports=y --reports=n xtesting
 
 [testenv:yamllint]
 basepython = python2.7
index 8b12ecc..920bb47 100644 (file)
@@ -1,2 +1,3 @@
 robotframework===3.1.1
 bandit===1.1.0
+behave===1.2.6
index d3df1ef..9de9c4b 100644 (file)
@@ -65,3 +65,19 @@ tiers:
                         variable:
                             - 'var01:foo'
                             - 'var02:bar'
+
+            -
+                case_name: sixth
+                project_name: xtesting
+                enabled: false
+                criteria: 100
+                blocking: false
+                clean_flag: false
+                description: ''
+                run:
+                    name: 'behaveframework'
+                    args:
+                        suites:
+                            - /usr/lib/python3.6/site-packages/xtesting/samples/features/
+                        tags:
+                            - foo
diff --git a/xtesting/core/behaveframework.py b/xtesting/core/behaveframework.py
new file mode 100644 (file)
index 0000000..d8a61ef
--- /dev/null
@@ -0,0 +1,120 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2019 Orange and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+"""Define classes required to run any Behave test suites."""
+
+from __future__ import division
+
+import logging
+import os
+import time
+import json
+
+from behave.__main__ import main as behave_main
+
+from xtesting.core import testcase
+
+__author__ = "Deepak Chandella <deepak.chandella@orange.com>"
+
+
+class BehaveFramework(testcase.TestCase):
+    """BehaveFramework runner."""
+    # pylint: disable=too-many-instance-attributes
+
+    __logger = logging.getLogger(__name__)
+    dir_results = "/var/lib/xtesting/results"
+
+    def __init__(self, **kwargs):
+        super(BehaveFramework, self).__init__(**kwargs)
+        self.res_dir = os.path.join(self.dir_results, self.case_name)
+        self.json_file = os.path.join(self.res_dir, 'output.json')
+        self.total_tests = 0
+        self.pass_tests = 0
+        self.fail_tests = 0
+        self.skip_tests = 0
+        self.response = None
+
+    def parse_results(self):
+        """Parse output.json and get the details in it."""
+
+        try:
+            with open(self.json_file) as stream_:
+                self.response = json.load(stream_)
+        except IOError:
+            self.__logger.error("Error reading the file %s", self.json_file)
+
+        try:
+            if self.response:
+                self.total_tests = len(self.response)
+            for item in self.response:
+                if item['status'] == 'passed':
+                    self.pass_tests += 1
+                elif item['status'] == 'failed':
+                    self.fail_tests += 1
+                elif item['status'] == 'skipped':
+                    self.skip_tests += 1
+        except KeyError:
+            self.__logger.error("Error in json - %s", self.response)
+
+        try:
+            self.result = 100 * (
+                self.pass_tests / self.total_tests)
+        except ZeroDivisionError:
+            self.__logger.error("No test has been run")
+
+        self.details = {}
+        self.details['total_tests'] = self.total_tests
+        self.details['pass_tests'] = self.pass_tests
+        self.details['fail_tests'] = self.fail_tests
+        self.details['skip_tests'] = self.skip_tests
+        self.details['tests'] = self.response
+
+    def run(self, **kwargs):
+        """Run the BehaveFramework feature files
+
+        Here are the steps:
+           * create the output directories if required,
+           * run behave features with parameters
+           * get the results in output.json,
+
+        Args:
+            kwargs: Arbitrary keyword arguments.
+
+        Returns:
+            EX_OK if all suites ran well.
+            EX_RUN_ERROR otherwise.
+        """
+        try:
+            suites = kwargs["suites"]
+            tags = kwargs.get("tags", [])
+        except KeyError:
+            self.__logger.exception("Mandatory args were not passed")
+            return self.EX_RUN_ERROR
+        if not os.path.exists(self.res_dir):
+            try:
+                os.makedirs(self.res_dir)
+            except Exception:  # pylint: disable=broad-except
+                self.__logger.exception("Cannot create %s", self.res_dir)
+                return self.EX_RUN_ERROR
+        config = ['--tags='+','.join(tags),
+                  '--format=json',
+                  '--outfile='+self.json_file]
+        for feature in suites:
+            config.append(feature)
+        self.start_time = time.time()
+        behave_main(config)
+        self.stop_time = time.time()
+
+        try:
+            self.parse_results()
+            self.__logger.info("Results were successfully parsed")
+        except Exception:  # pylint: disable=broad-except
+            self.__logger.exception("Cannot parse results")
+            return self.EX_RUN_ERROR
+        return self.EX_OK
index 2791b55..3cb0ad3 100644 (file)
@@ -11,7 +11,6 @@
 
 from __future__ import division
 
-import errno
 import logging
 import os
 
@@ -110,15 +109,12 @@ class RobotFramework(testcase.TestCase):
         except KeyError:
             self.__logger.exception("Mandatory args were not passed")
             return self.EX_RUN_ERROR
-        try:
-            os.makedirs(self.res_dir)
-        except OSError as ex:
-            if ex.errno != errno.EEXIST:
+        if not os.path.exists(self.res_dir):
+            try:
+                os.makedirs(self.res_dir)
+            except Exception:  # pylint: disable=broad-except
                 self.__logger.exception("Cannot create %s", self.res_dir)
                 return self.EX_RUN_ERROR
-        except Exception:  # pylint: disable=broad-except
-            self.__logger.exception("Cannot create %s", self.res_dir)
-            return self.EX_RUN_ERROR
         stream = StringIO()
         robot.run(*suites, variable=variable, variablefile=variablefile,
                   include=include, output=self.xml_file, log='NONE',
diff --git a/xtesting/samples/features/hello.feature b/xtesting/samples/features/hello.feature
new file mode 100644 (file)
index 0000000..7975d28
--- /dev/null
@@ -0,0 +1,7 @@
+Feature: showing off behave
+
+  @foo
+  Scenario: run a simple test
+     Given we have behave installed
+      When we implement a test
+      Then behave will test it for us!
diff --git a/xtesting/samples/features/steps/hello.py b/xtesting/samples/features/steps/hello.py
new file mode 100644 (file)
index 0000000..8d78016
--- /dev/null
@@ -0,0 +1,16 @@
+from behave import when, then, step
+
+
+class Hello():
+
+    @step('we have behave installed')
+    def step_impl_installation(context):
+        pass
+
+    @when('we implement a test')
+    def step_impl_test(context):
+        assert True is not False
+
+    @then('behave will test it for us!')
+    def step_impl_verify(context):
+        assert context.failed is False
diff --git a/xtesting/tests/unit/core/test_behaveframework.py b/xtesting/tests/unit/core/test_behaveframework.py
new file mode 100644 (file)
index 0000000..f18cac0
--- /dev/null
@@ -0,0 +1,162 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2019 Orange and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+"""Define the classes required to fully cover behave."""
+
+import logging
+import unittest
+
+import mock
+from xtesting.core import behaveframework
+
+__author__ = "Deepak Chandella <deepak.chandella@orange.com>"
+
+
+class ParseResultTesting(unittest.TestCase):
+
+    """The class testing BehaveFramework.parse_results()."""
+    # pylint: disable=missing-docstring
+
+    _response = [{'status': 'passed'}]
+
+    def setUp(self):
+        self.test = behaveframework.BehaveFramework(
+            case_name='behave', project_name='xtesting')
+
+    def test_raises_exc_open(self):
+        self.test.json_file = 'dummy_file'
+        self.test.response = self._response
+        with mock.patch('six.moves.builtins.open',
+                        mock.mock_open()) as mock_file:
+            mock_file.side_effect = IOError()
+            self.assertRaises(IOError, self.test.parse_results())
+        mock_file.assert_called_once_with('dummy_file')
+
+    def test_raises_exc_key(self):
+        with mock.patch('six.moves.builtins.open', mock.mock_open()), \
+                mock.patch('json.load', return_value=[{'foo': 'bar'}]):
+            self.assertRaises(KeyError, self.test.parse_results())
+
+    def test_raises_exe_zerodivision(self):
+        with mock.patch('six.moves.builtins.open', mock.mock_open()), \
+                mock.patch('json.load', mock.Mock(return_value=[])):
+            self.assertRaises(ZeroDivisionError, self.test.parse_results())
+
+    def _test_result(self, response, result):
+        with mock.patch('six.moves.builtins.open', mock.mock_open()), \
+                mock.patch('json.load', mock.Mock(return_value=response)):
+            self.test.parse_results()
+            self.assertEqual(self.test.result, result)
+
+    def test_null_passed(self):
+        data = [{'status': 'dummy'}]
+        self._test_result(data, 0)
+
+    def test_half_success(self):
+        data = [{'status': 'passed'}, {'status': 'failed'}]
+        self._test_result(data, 50)
+
+    def test_success(self):
+        data = [{'status': 'passed'}, {'status': 'passed'}]
+        self._test_result(data, 100)
+
+    def test_count(self):
+        self._response.extend([{'status': 'failed'}, {'status': 'skipped'}])
+        with mock.patch('six.moves.builtins.open', mock.mock_open()), \
+                mock.patch('json.load', mock.Mock(
+                    return_value=self._response)):
+            self.test.parse_results()
+            self.assertEqual(self.test.details['pass_tests'], 1)
+            self.assertEqual(self.test.details['fail_tests'], 1)
+            self.assertEqual(self.test.details['skip_tests'], 1)
+            self.assertEqual(self.test.details['total_tests'], 3)
+
+
+class RunTesting(unittest.TestCase):
+
+    """The class testing BehaveFramework.run()."""
+    # pylint: disable=missing-docstring
+
+    suites = ["foo"]
+    tags = []
+
+    def setUp(self):
+        self.test = behaveframework.BehaveFramework(
+            case_name='behave', project_name='xtesting')
+
+    def test_exc_key_error(self):
+        self.assertEqual(self.test.run(), self.test.EX_RUN_ERROR)
+
+    @mock.patch('xtesting.core.behaveframework.behave_main')
+    def _test_makedirs_exc(self, *args):
+        with mock.patch.object(self.test, 'parse_results') as mock_method:
+            self.assertEqual(
+                self.test.run(
+                    suites=self.suites, tags=self.tags),
+                self.test.EX_RUN_ERROR)
+            args[0].assert_not_called()
+            mock_method.asser_not_called()
+
+    @mock.patch('os.makedirs', side_effect=Exception)
+    @mock.patch('os.path.exists', return_value=False)
+    def test_makedirs_exc(self, *args):
+        self._test_makedirs_exc()
+        args[0].assert_called_once_with(self.test.res_dir)
+        args[1].assert_called_once_with(self.test.res_dir)
+
+    @mock.patch('xtesting.core.behaveframework.behave_main')
+    def _test_makedirs(self, *args):
+        with mock.patch.object(self.test, 'parse_results') as mock_method:
+            self.assertEqual(
+                self.test.run(suites=self.suites, tags=self.tags),
+                self.test.EX_OK)
+            args[0].assert_called_once_with(
+                ['--tags=',
+                 '--format=json',
+                 '--outfile={}'.format(self.test.json_file),
+                 'foo'])
+            mock_method.assert_called_once_with()
+
+    @mock.patch('os.makedirs')
+    @mock.patch('os.path.exists', return_value=False)
+    def test_makedirs(self, *args):
+        self._test_makedirs()
+        args[0].assert_called_once_with(self.test.res_dir)
+        args[1].assert_called_once_with(self.test.res_dir)
+
+    @mock.patch('os.makedirs')
+    @mock.patch('os.path.exists', return_value=True)
+    def test_makedirs_oserror17(self, *args):
+        self._test_makedirs()
+        args[0].assert_called_once_with(self.test.res_dir)
+        args[1].assert_not_called()
+
+    @mock.patch('os.makedirs')
+    @mock.patch('xtesting.core.behaveframework.behave_main')
+    def _test_parse_results(self, status, *args):
+        self.assertEqual(
+            self.test.run(
+                suites=self.suites, tags=self.tags),
+            status)
+        args[0].assert_called_once_with(
+            ['--tags=',
+             '--format=json',
+             '--outfile={}'.format(self.test.json_file),
+             'foo'])
+        args[1].assert_called_once_with(self.test.res_dir)
+
+    def test_parse_results_exc(self):
+        with mock.patch.object(self.test, 'parse_results',
+                               side_effect=Exception) as mock_method:
+            self._test_parse_results(self.test.EX_RUN_ERROR)
+            mock_method.assert_called_once_with()
+
+if __name__ == "__main__":
+    logging.disable(logging.CRITICAL)
+    unittest.main(verbosity=2)
index 398cf87..19c4e0f 100644 (file)
@@ -9,7 +9,6 @@
 
 """Define the classes required to fully cover robot."""
 
-import errno
 import logging
 import os
 import unittest
@@ -194,14 +193,11 @@ class RunTesting(unittest.TestCase):
             mmethod.asser_not_called()
 
     @mock.patch('os.makedirs', side_effect=Exception)
+    @mock.patch('os.path.exists', return_value=False)
     def test_makedirs_exc(self, *args):
         self._test_makedirs_exc()
         args[0].assert_called_once_with(self.test.res_dir)
-
-    @mock.patch('os.makedirs', side_effect=OSError)
-    def test_makedirs_oserror(self, *args):
-        self._test_makedirs_exc()
-        args[0].assert_called_once_with(self.test.res_dir)
+        args[1].assert_called_once_with(self.test.res_dir)
 
     @mock.patch('robot.run')
     def _test_makedirs(self, *args):
@@ -218,15 +214,19 @@ class RunTesting(unittest.TestCase):
             mock_method.assert_called_once_with()
             mmethod.assert_called_once_with()
 
-    @mock.patch('os.makedirs', side_effect=OSError(errno.EEXIST, ''))
+    @mock.patch('os.makedirs')
+    @mock.patch('os.path.exists', return_value=True)
     def test_makedirs_oserror17(self, *args):
         self._test_makedirs()
         args[0].assert_called_once_with(self.test.res_dir)
+        args[1].assert_not_called()
 
     @mock.patch('os.makedirs')
+    @mock.patch('os.path.exists', return_value=False)
     def test_makedirs(self, *args):
         self._test_makedirs()
         args[0].assert_called_once_with(self.test.res_dir)
+        args[1].assert_called_once_with(self.test.res_dir)
 
     @mock.patch('os.makedirs')
     @mock.patch('robot.run')