Merge "[fuel] Skip test_server_basic_ops tempest test"
authorJose Lausuch <jose.lausuch@ericsson.com>
Thu, 23 Mar 2017 14:00:27 +0000 (14:00 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Thu, 23 Mar 2017 14:00:27 +0000 (14:00 +0000)
50 files changed:
.gitmodules [new file with mode: 0644]
docker/Dockerfile
docker/Dockerfile.aarch64
docs/com/README.txt [deleted file]
docs/com/pres/Summit/Berlin-2016/conversation.html
docs/com/pres/Summit/Berlin-2016/summit-Berlin.html
docs/com/pres/Summit/Berlin-2016/testapi.html
docs/com/pres/dashboard/dashboard_status.html
docs/com/pres/reveal.js [new submodule]
functest/ci/__init__.py [changed mode: 0755->0644]
functest/ci/check_os.sh
functest/ci/config_functest.yaml
functest/ci/generate_report.py
functest/ci/prepare_env.py
functest/ci/testcases.yaml
functest/ci/tier_builder.py [changed mode: 0755->0644]
functest/ci/tier_handler.py [changed mode: 0755->0644]
functest/core/vnf_base.py
functest/opnfv_tests/features/copper.py [changed mode: 0755->0644]
functest/opnfv_tests/features/doctor.py [changed mode: 0755->0644]
functest/opnfv_tests/features/domino.py [changed mode: 0755->0644]
functest/opnfv_tests/features/odl_sfc.py
functest/opnfv_tests/features/promise.py [changed mode: 0755->0644]
functest/opnfv_tests/features/sdnvpn.py [changed mode: 0755->0644]
functest/opnfv_tests/features/security_scan.py [changed mode: 0755->0644]
functest/opnfv_tests/mano/orchestra.py [changed mode: 0755->0644]
functest/opnfv_tests/openstack/healthcheck/healthcheck.sh [changed mode: 0755->0644]
functest/opnfv_tests/openstack/refstack_client/defcore.txt [moved from functest/opnfv_tests/openstack/refstack_client/defcore_201608.txt with 99% similarity]
functest/opnfv_tests/openstack/refstack_client/refstack_client.py
functest/opnfv_tests/openstack/tempest/conf_utils.py
functest/opnfv_tests/vnf/ims/cloudify_ims.py
functest/opnfv_tests/vnf/ims/cloudify_ims.yaml
functest/opnfv_tests/vnf/ims/opera_ims.py [changed mode: 0644->0755]
functest/opnfv_tests/vnf/ims/orchestra_ims.py [changed mode: 0644->0755]
functest/opnfv_tests/vnf/ims/orchestra_ims.yaml
functest/opnfv_tests/vnf/router/__init__.py [changed mode: 0755->0644]
functest/opnfv_tests/vnf/router/vyos_vrouter.py [changed mode: 0755->0644]
functest/tests/unit/ci/test_generate_report.py [new file with mode: 0644]
functest/tests/unit/ci/test_run_tests.py [new file with mode: 0644]
functest/tests/unit/ci/test_tier_builder.py [new file with mode: 0644]
functest/tests/unit/ci/test_tier_handler.py [new file with mode: 0644]
functest/tests/unit/core/test_vnf_base.py
functest/tests/unit/opnfv_tests/openstack/refstack_client/test_refstack_client.py
functest/tests/unit/utils/test_functest_utils.py
functest/tests/unit/utils/test_openstack_utils.py
functest/utils/decorators.py
functest/utils/functest_logger.py [changed mode: 0755->0644]
functest/utils/functest_utils.py
functest/utils/openstack_utils.py [changed mode: 0755->0644]
kingbird_requirements.txt [new file with mode: 0644]

diff --git a/.gitmodules b/.gitmodules
new file mode 100644 (file)
index 0000000..78668cf
--- /dev/null
@@ -0,0 +1,3 @@
+[submodule "docs/com/pres/reveal.js"]
+       path = docs/com/pres/reveal.js
+       url = https://github.com/hakimel/reveal.js.git
index 30c31da..4c0995b 100644 (file)
@@ -120,8 +120,20 @@ RUN cd ${REPOS_DIR}/barometer \
     && pip install .
 
 RUN find ${FUNCTEST_REPO_DIR} -name "*.py" \
-    -not -path "*tests/unit*" |xargs grep __main__ |cut -d\: -f 1 |xargs chmod -c 755 \
-    && find ${FUNCTEST_REPO_DIR} -name "*.sh" |xargs grep \#\! |cut -d\:  -f 1 |xargs chmod -c 755
+    -not -path "*tests/unit*" \
+    -not -path "*functest_venv*" \
+    |xargs grep -L __main__ |cut -d\: -f 1 |xargs chmod -c 644 \
+    && find ${FUNCTEST_REPO_DIR} -name "*.sh" \
+    -not -path "*functest_venv*" \
+    |xargs grep -L \#\! |cut -d\:  -f 1 |xargs chmod -c 644
+
+RUN find ${FUNCTEST_REPO_DIR} -name "*.py" \
+    -not -path "*tests/unit*" \
+    -not -path "*functest_venv*" \
+    |xargs grep __main__ |cut -d\: -f 1 |xargs chmod -c 755 \
+    && find ${FUNCTEST_REPO_DIR} -name "*.sh" \
+    -not -path "*functest_venv*" \
+    |xargs grep \#\! |cut -d\:  -f 1 |xargs chmod -c 755
 
 RUN /bin/bash ${REPOS_DIR}/parser/tests/parser_install.sh ${REPOS_DIR}
 RUN ${REPOS_DIR}/rally/install_rally.sh --yes
@@ -144,6 +156,7 @@ RUN cd ${REPOS_DIR}/bgpvpn && pip install -e .
 
 # Kingbird integration
 RUN cd ${REPOS_DIR}/kingbird && pip install -e .
+RUN cd ${FUNCTEST_REPO_DIR} && pip install -r kingbird_requirements.txt
 
 # refstack-client integration
 RUN cd ${REPOS_DIR}/refstack-client && ./setup_env -t ${REFSTACK_TAG}
index da1ce2d..60f72a2 100644 (file)
@@ -15,6 +15,7 @@ LABEL version="0.1" description="OPNFV Functest Aarch64 Docker container"
 ARG BRANCH=master
 ARG RALLY_TAG=0.8.1
 ARG TEMPEST_TAG=15.0.0
+ARG REFSTACK_TAG=15.0.0
 ARG ODL_TAG=release/beryllium-sr4
 ARG OPENSTACK_TAG=stable/mitaka
 ARG KINGBIRD_TAG=0.2.2
@@ -93,6 +94,7 @@ RUN git clone --depth 1 -b $OPENSTACK_TAG https://github.com/openstack/networkin
 RUN git clone --depth 1 -b $KINGBIRD_TAG https://github.com/openstack/kingbird.git ${REPOS_DIR}/kingbird
 RUN git clone --depth 1 -b $RALLY_TAG https://github.com/openstack/rally.git ${REPOS_DIR}/rally
 RUN git clone --depth 1 -b $TEMPEST_TAG https://github.com/openstack/tempest.git ${REPOS_DIR}/tempest
+RUN git clone https://github.com/openstack/refstack-client ${REPOS_DIR}/refstack-client
 
 # other repositories
 RUN git clone --depth 1 -b $ODL_TAG https://git.opendaylight.org/gerrit/p/integration/test.git ${REPOS_DIR}/odl_test
@@ -110,8 +112,20 @@ RUN cd ${RELENG_MODULE_DIR} \
     && pip install -e .
 
 RUN find ${FUNCTEST_REPO_DIR} -name "*.py" \
-    -not -path "*tests/unit*" |xargs grep __main__ |cut -d\: -f 1 |xargs chmod -c 755 \
-    && find ${FUNCTEST_REPO_DIR} -name "*.sh" |xargs grep \#\! |cut -d\:  -f 1 |xargs chmod -c 755
+    -not -path "*tests/unit*" \
+    -not -path "*functest_venv*" \
+    |xargs grep -L __main__ |cut -d\: -f 1 |xargs chmod -c 644 &&
+    find ${FUNCTEST_REPO_DIR} -name "*.sh" \
+    -not -path "*functest_venv*" \
+    |xargs grep -L \#\! |cut -d\:  -f 1 |xargs chmod -c 644
+
+RUN find ${FUNCTEST_REPO_DIR} -name "*.py" \
+    -not -path "*tests/unit*" \
+    -not -path "*functest_venv*" \
+    |xargs grep __main__ |cut -d\: -f 1 |xargs chmod -c 755 &&
+    find ${FUNCTEST_REPO_DIR} -name "*.sh" \
+    -not -path "*functest_venv*" \
+    |xargs grep \#\! |cut -d\:  -f 1 |xargs chmod -c 755
 
 RUN /bin/bash ${REPOS_DIR}/parser/tests/parser_install.sh ${REPOS_DIR}
 RUN ${REPOS_DIR}/rally/install_rally.sh --yes
@@ -135,6 +149,9 @@ RUN cd ${REPOS_DIR}/bgpvpn && pip install -e .
 # Kingbird integration
 RUN cd ${REPOS_DIR}/kingbird && pip install -e .
 
+# refstack-client integration
+RUN cd ${REPOS_DIR}/refstack-client && ./setup_env -t ${REFSTACK_TAG}
+
 RUN /bin/bash -c ". /etc/profile.d/rvm.sh \
     && cd ${REPOS_VNFS_DIR}/vims-test \
     && rvm autolibs enable"
diff --git a/docs/com/README.txt b/docs/com/README.txt
deleted file mode 100644 (file)
index 62d616b..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-This com folder contains the images, html and css files used to create
-communication based on reveal.js
-All the files are licensed under Creative Commons Attribution 4.0
-International License.
-.. http://creativecommons.org/licenses/by/4.0
-
-You can download reveal.js at:  https://github.com/hakimel/reveal.js/
-Then you must put images and css on existing directory and add the pres
-directory
index b56b1e1..356c2ad 100755 (executable)
 
                <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no, minimal-ui">
 
-               <link rel="stylesheet" href="../../../css/reveal.css">
+               <link rel="stylesheet" href="../../reveal.js/css/reveal.css">
                <link rel="stylesheet" href="../../../css/theme/OPNFV-Berlin.css" id="theme">
 
                <!-- Code syntax highlighting -->
-               <link rel="stylesheet" href="../../../lib/css/zenburn.css">
+               <link rel="stylesheet" href="../../reveal.js/lib/css/zenburn.css">
 
                <!-- Printing and PDF exports -->
                <script>
                        var link = document.createElement( 'link' );
                        link.rel = 'stylesheet';
                        link.type = 'text/css';
-                       link.href = window.location.search.match( /print-pdf/gi ) ? '../../../css/print/pdf.css' : '../../../css/print/paper.css';
+                       link.href = window.location.search.match( /print-pdf/gi ) ? '../../reveal.js/css/print/pdf.css' : '../../reveal.js/css/print/paper.css';
                        document.getElementsByTagName( 'head' )[0].appendChild( link );
                </script>
 
                </div>
                </div>
 
-               <script src="../../../lib/js/head.min.js"></script>
-               <script src="../../../js/reveal.js"></script>
+               <script src="../../reveal.js/lib/js/head.min.js"></script>
+               <script src="../../reveal.js/js/reveal.js"></script>
 
                <script>
 
 
                                // Optional reveal.js plugins
                                dependencies: [
-                                       { src: '../../../lib/js/classList.js', condition: function() { return !document.body.classList; } },
-                                       { src: '../../../plugin/markdown/marked.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
-                                       { src: '../../../plugin/markdown/markdown.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
-                                       { src: '../../../plugin/highlight/highlight.js', async: true, condition: function() { return !!document.querySelector( 'pre code' ); }, callback: function() { hljs.initHighlightingOnLoad(); } },
-                                       { src: '../../../plugin/zoom-js/zoom.js', async: true },
-                                       { src: '../../../plugin/notes/notes.js', async: true }
+                                       { src: '../../reveal.js/lib/js/classList.js', condition: function() { return !document.body.classList; } },
+                                       { src: '../../reveal.js/plugin/markdown/marked.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
+                                       { src: '../../reveal.js/plugin/markdown/markdown.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
+                                       { src: '../../reveal.js/plugin/highlight/highlight.js', async: true, condition: function() { return !!document.querySelector( 'pre code' ); }, callback: function() { hljs.initHighlightingOnLoad(); } },
+                                       { src: '../../reveal.js/plugin/zoom-js/zoom.js', async: true },
+                                       { src: '../../reveal.js/plugin/notes/notes.js', async: true }
                                ]
                        });
 
index 8369443..97fa66c 100755 (executable)
 
                <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no, minimal-ui">
 
-               <link rel="stylesheet" href="../../../css/reveal.css">
+               <link rel="stylesheet" href="../../reveal.js/css/reveal.css">
                <link rel="stylesheet" href="../../../css/theme/OPNFV-Berlin.css" id="theme">
 
                <!-- Code syntax highlighting -->
-               <link rel="stylesheet" href="../../../lib/css/zenburn.css">
+               <link rel="stylesheet" href="../../reveal.js/lib/css/zenburn.css">
 
                <!-- Printing and PDF exports -->
                <script>
                        var link = document.createElement( 'link' );
                        link.rel = 'stylesheet';
                        link.type = 'text/css';
-                       link.href = window.location.search.match( /print-pdf/gi ) ? '../../../css/print/pdf.css' : '../../../css/print/paper.css';
+                       link.href = window.location.search.match( /print-pdf/gi ) ? '../../reveal.js/css/print/pdf.css' : '../../../css/print/paper.css';
                        document.getElementsByTagName( 'head' )[0].appendChild( link );
                </script>
 
                <!--[if lt IE 9]>
-               <script src="lib/js/html5shiv.js"></script>
+               <script src="l../../reveal.jsml5shiv.js"></script>
                <![endif]-->
        </head>
 
 
                        </div>
             <div class='footer'>
-                                <img src="../../../img/logo-OPNFV-Berlin.png" alt="OPNFV logo"> 
+                                <img src="../../../img/logo-OPNFV-Berlin.png" alt="OPNFV logo">
                </div>
                </div>
 
-               <script src="../../../lib/js/head.min.js"></script>
-               <script src="../../../js/reveal.js"></script>
+               <script src="../../reveal.js/lib/js/head.min.js"></script>
+               <script src="../../reveal.js/js/reveal.js"></script>
 
                <script>
 
 
                                // Optional reveal.js plugins
                                dependencies: [
-                                       { src: '../../../lib/js/classList.js', condition: function() { return !document.body.classList; } },
-                                       { src: '../../../plugin/markdown/marked.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
-                                       { src: '../../../plugin/markdown/markdown.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
-                                       { src: '../../../plugin/highlight/highlight.js', async: true, condition: function() { return !!document.querySelector( 'pre code' ); }, callback: function() { hljs.initHighlightingOnLoad(); } },
-                                       { src: '../../../plugin/zoom-js/zoom.js', async: true },
-                                       { src: '../../../plugin/notes/notes.js', async: true }
+                                       { src: '../../reveal.js/lib/js/classList.js', condition: function() { return !document.body.classList; } },
+                                       { src: '../../reveal.js/plugin/markdown/marked.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
+                                       { src: '../../reveal.js/plugin/markdown/markdown.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
+                                       { src: '../../reveal.js/plugin/highlight/highlight.js', async: true, condition: function() { return !!document.querySelector( 'pre code' ); }, callback: function() { hljs.initHighlightingOnLoad(); } },
+                                       { src: '../../reveal.js/plugin/zoom-js/zoom.js', async: true },
+                                       { src: '../../reveal.js/plugin/notes/notes.js', async: true }
                                ]
                        });
 
index 16f97c4..c40637c 100755 (executable)
 
                <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no, minimal-ui">
 
-               <link rel="stylesheet" href="../../../css/reveal.css">
+               <link rel="stylesheet" href="../../reveal.js/css/reveal.css">
                <link rel="stylesheet" href="../../../css/theme/OPNFV-Berlin.css" id="theme">
 
                <!-- Code syntax highlighting -->
-               <link rel="stylesheet" href="../../../lib/css/zenburn.css">
+               <link rel="stylesheet" href="../../reveal.js/lib/css/zenburn.css">
 
                <!-- Printing and PDF exports -->
                <script>
                        var link = document.createElement( 'link' );
                        link.rel = 'stylesheet';
                        link.type = 'text/css';
-                       link.href = window.location.search.match( /print-pdf/gi ) ? '../../../css/print/pdf.css' : '../../../css/print/paper.css';
+                       link.href = window.location.search.match( /print-pdf/gi ) ? '../../reveal.js/css/print/pdf.css' : '../../reveal.js/css/print/paper.css';
                        document.getElementsByTagName( 'head' )[0].appendChild( link );
                </script>
 
@@ -242,8 +242,8 @@ OK
                        </div>
                </div>
 
-               <script src="../../../lib/js/head.min.js"></script>
-               <script src="../../../js/reveal.js"></script>
+               <script src="../../reveal.js/lib/js/head.min.js"></script>
+               <script src="../../reveal.js/js/reveal.js"></script>
 
                <script>
 
@@ -259,12 +259,12 @@ OK
 
                                // Optional reveal.js plugins
                                dependencies: [
-                                       { src: '../../../lib/js/classList.js', condition: function() { return !document.body.classList; } },
-                                       { src: '../../../plugin/markdown/marked.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
-                                       { src: '../../../plugin/markdown/markdown.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
-                                       { src: '../../../plugin/highlight/highlight.js', async: true, condition: function() { return !!document.querySelector( 'pre code' ); }, callback: function() { hljs.initHighlightingOnLoad(); } },
-                                       { src: '../../../plugin/zoom-js/zoom.js', async: true },
-                                       { src: '../../../plugin/notes/notes.js', async: true }
+                                       { src: '../../reveal.js/lib/js/classList.js', condition: function() { return !document.body.classList; } },
+                                       { src: '../../reveal.js/plugin/markdown/marked.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
+                                       { src: '../../reveal.js/plugin/markdown/markdown.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
+                                       { src: '../../reveal.js/plugin/highlight/highlight.js', async: true, condition: function() { return !!document.querySelector( 'pre code' ); }, callback: function() { hljs.initHighlightingOnLoad(); } },
+                                       { src: '../../reveal.js/plugin/zoom-js/zoom.js', async: true },
+                                       { src: '../../reveal.js/plugin/notes/notes.js', async: true }
                                ]
                        });
 
index 7d46a74..1321afa 100755 (executable)
 
                <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no, minimal-ui">
 
-               <link rel="stylesheet" href="../../css/reveal.css">
+               <link rel="stylesheet" href="../reveal.js/css/reveal.css">
                <link rel="stylesheet" href="../../css/theme/OPNFV.css" id="theme">
 
                <!-- Code syntax highlighting -->
-               <link rel="stylesheet" href="../../lib/css/zenburn.css">
+               <link rel="stylesheet" href="../reveal.js/lib/css/zenburn.css">
 
                <!-- Printing and PDF exports -->
                <script>
                        var link = document.createElement( 'link' );
                        link.rel = 'stylesheet';
                        link.type = 'text/css';
-                       link.href = window.location.search.match( /print-pdf/gi ) ? '../../css/print/pdf.css' : '../../css/print/paper.css';
+                       link.href = window.location.search.match( /print-pdf/gi ) ? '../reveal.js/css/print/pdf.css' : '../reveal.js/css/print/paper.css';
                        document.getElementsByTagName( 'head' )[0].appendChild( link );
                </script>
 
                </div>
                </div>
 
-               <script src="../../lib/js/head.min.js"></script>
-               <script src="../../js/reveal.js"></script>
+               <script src="../reveal.js/lib/js/head.min.js"></script>
+               <script src="../reveal.js/js/reveal.js"></script>
 
                <script>
 
 
                                // Optional reveal.js plugins
                                dependencies: [
-                                       { src: '../../lib/js/classList.js', condition: function() { return !document.body.classList; } },
-                                       { src: '../../plugin/markdown/marked.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
-                                       { src: '../../plugin/markdown/markdown.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
-                                       { src: '../../plugin/highlight/highlight.js', async: true, condition: function() { return !!document.querySelector( 'pre code' ); }, callback: function() { hljs.initHighlightingOnLoad(); } },
-                                       { src: '../../plugin/zoom-js/zoom.js', async: true },
-                                       { src: '../../plugin/notes/notes.js', async: true }
+                                       { src: '../reveal.js/lib/js/classList.js', condition: function() { return !document.body.classList; } },
+                                       { src: '../reveal.js/plugin/markdown/marked.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
+                                       { src: '../reveal.js/plugin/markdown/markdown.js', condition: function() { return !!document.querySelector( '[data-markdown]' ); } },
+                                       { src: '../reveal.js/plugin/highlight/highlight.js', async: true, condition: function() { return !!document.querySelector( 'pre code' ); }, callback: function() { hljs.initHighlightingOnLoad(); } },
+                                       { src: '../reveal.js/plugin/zoom-js/zoom.js', async: true },
+                                       { src: '../reveal.js/plugin/notes/notes.js', async: true }
                                ]
                        });
 
diff --git a/docs/com/pres/reveal.js b/docs/com/pres/reveal.js
new file mode 160000 (submodule)
index 0000000..a349ff4
--- /dev/null
@@ -0,0 +1 @@
+Subproject commit a349ff43c58c23f9c837b8ea9b5fc7d4761b8de3
old mode 100755 (executable)
new mode 100644 (file)
index 2c5c021..3920b7a 100755 (executable)
@@ -26,6 +26,11 @@ verify_connectivity() {
     return 1
 }
 
+verify_SSL_connectivity() {
+    openssl s_client -connect $1:$2 &>/dev/null
+    return $?
+}
+
 check_service() {
     local service cmd
     service=$1
@@ -63,10 +68,16 @@ fi
 
 echo "Checking OpenStack endpoints:"
 publicURL=$(openstack catalog show  identity |awk '/public/ {print $4}')
-publicIP=$(echo $publicURL|sed 's/^.*http\:\/\///'|sed 's/.[^:]*$//')
+publicIP=$(echo $publicURL|sed 's/^.*http.*\:\/\///'|sed 's/.[^:]*$//')
 publicPort=$(echo $publicURL|sed 's/^.*://'|sed 's/\/.*$//')
-echo ">>Verifying connectivity to the public endpoint $publicIP:$publicPort..."
-verify_connectivity $publicIP $publicPort
+https_enabled=$(echo $publicURL | grep 'https')
+if [[ -n $https_enabled ]]; then
+    echo ">>Verifying SSL connectivity to the public endpoint $publicIP:$publicPort..."
+    verify_SSL_connectivity $publicIP $publicPort
+else
+    echo ">>Verifying connectivity to the public endpoint $publicIP:$publicPort..."
+    verify_connectivity $publicIP $publicPort
+fi
 RETVAL=$?
 if [ $RETVAL -ne 0 ]; then
     echo "ERROR: Cannot talk to the public endpoint $publicIP:$publicPort ."
@@ -81,10 +92,16 @@ if [ -z ${adminURL} ]; then
     openstack catalog show identity
     exit 1
 fi
-adminIP=$(echo $adminURL|sed 's/^.*http\:\/\///'|sed 's/.[^:]*$//')
+adminIP=$(echo $adminURL|sed 's/^.*http.*\:\/\///'|sed 's/.[^:]*$//')
 adminPort=$(echo $adminURL|sed 's/^.*://'|sed 's/.[^\/]*$//')
-echo ">>Verifying connectivity to the admin endpoint $adminIP:$adminPort..."
-verify_connectivity $adminIP $adminPort
+https_enabled=$(echo $adminURL | grep 'https')
+if [[ -n $https_enabled ]]; then
+    echo ">>Verifying SSL connectivity to the admin endpoint $adminIP:$adminPort..."
+    verify_SSL_connectivity $adminIP $adminPort
+else
+    echo ">>Verifying connectivity to the admin endpoint $adminIP:$adminPort..."
+    verify_connectivity $adminIP $adminPort
+fi
 RETVAL=$?
 if [ $RETVAL -ne 0 ]; then
     echo "ERROR: Cannot talk to the admin endpoint $adminIP:$adminPort ."
index 00e44ad..78f6257 100755 (executable)
@@ -125,7 +125,7 @@ rally:
 
 refstack:
     tempest_conf_path: openstack/refstack_client/tempest.conf
-    defcore_list: openstack/refstack_client/defcore_201608.txt
+    defcore_list: openstack/refstack_client/defcore.txt
 
 vnf:
     aaa:
@@ -200,4 +200,6 @@ example:
     sg_desc: Example Security group
 
 results:
+    # you can also set a dir (e.g. /home/opnfv/db) to dump results
+    # test_db_url: file:///home/opnfv/db
     test_db_url: http://testresults.opnfv.org/test/api/v1
index 89d8fc6..3872a07 100755 (executable)
@@ -26,7 +26,7 @@ COL_5_LEN = 75
 logger = ft_logger.Logger("generate_report").getLogger()
 
 
-def init(tiers_to_run):
+def init(tiers_to_run=[]):
     test_cases_arr = []
     for tier in tiers_to_run:
         for test in tier.get_tests():
@@ -91,7 +91,7 @@ def print_separator(char="=", delimiter="+"):
     return str
 
 
-def main(args):
+def main(args=[]):
     executed_test_cases = args
 
     if CONST.IS_CI_RUN:
index 724ea14..e9a470f 100755 (executable)
@@ -312,7 +312,7 @@ def install_tempest():
             logger.debug("Tempest %s does not exist" %
                          CONST.tempest_deployment_name)
             cmd = ("rally verify create-verifier --source {0} "
-                   "--name {1} --type tempest"
+                   "--name {1} --type tempest --system-wide"
                    .format(CONST.dir_repo_tempest,
                            CONST.tempest_deployment_name))
             error_msg = "Problem while installing Tempest."
index bfbc3fd..5f54b97 100755 (executable)
@@ -168,7 +168,7 @@ tiers:
             -
                 name: odl_netvirt
                 criteria: 'success_rate == 100%'
-                blocking: true
+                blocking: false
                 clean_flag: false
                 description: >-
                     Test Suite for the OpenDaylight SDN Controller when
@@ -435,20 +435,6 @@ tiers:
 #                run:
 #                    module: 'functest.opnfv_tests.openstack.tempest.tempest'
 #                    class: 'TempestFullParallel'
-            -
-                name: tempest_defcore
-                criteria: 'success_rate == 100%'
-                blocking: false
-                clean_flag: false
-                description: >-
-                    This is the set of Tempest test cases created by OpenStack
-                    Interop Working Group for certification purposes.
-                dependencies:
-                    installer: ''
-                    scenario: 'nosdn-nofeature-ha'
-                run:
-                    module: 'functest.opnfv_tests.openstack.tempest.tempest'
-                    class: 'TempestDefcore'
             -
                 name: tempest_custom
                 criteria: 'success_rate == 100%'
old mode 100755 (executable)
new mode 100644 (file)
old mode 100755 (executable)
new mode 100644 (file)
index 0300dd2..f5e8605 100644 (file)
@@ -52,8 +52,13 @@ class VnfOnBoardingBase(base.TestcaseBase):
     def execute(self):
         self.start_time = time.time()
         # Prepare the test (Create Tenant, User, ...)
-        self.logger.info("Create VNF Onboarding environment")
-        self.prepare()
+        try:
+            self.logger.info("Create VNF Onboarding environment")
+            self.prepare()
+        except Exception:
+            self.logger.error("Error during VNF Onboarding environment" +
+                              "creation", exc_info=True)
+            return base.TestcaseBase.EX_TESTCASE_FAILED
 
         # Deploy orchestrator
         try:
@@ -179,11 +184,11 @@ class VnfOnBoardingBase(base.TestcaseBase):
     # TODO see how to use built-in exception from releng module
     def deploy_vnf(self):
         self.logger.error("VNF must be deployed")
-        return base.TestcaseBase.EX_TESTCASE_FAILED
+        raise Exception("VNF not deployed")
 
     def test_vnf(self):
         self.logger.error("VNF must be tested")
-        return base.TestcaseBase.EX_TESTCASE_FAILED
+        raise Exception("VNF not tested")
 
     def clean(self):
         self.logger.info("test cleaning")
@@ -232,4 +237,4 @@ class VnfOnBoardingBase(base.TestcaseBase):
         self.details[part]['status'] = 'FAIL'
         self.details[part]['result'] = error_msg
         self.logger.error("Step failure:{}".format(error_msg))
-        return base.TestcaseBase.EX_TESTCASE_FAILED
+        raise Exception(error_msg)
old mode 100755 (executable)
new mode 100644 (file)
old mode 100755 (executable)
new mode 100644 (file)
old mode 100755 (executable)
new mode 100644 (file)
index 3b68d42..431cd47 100644 (file)
@@ -8,13 +8,15 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 #
 import functest.core.feature_base as base
+from sfc.tests.functest import run_tests
 
 
 class OpenDaylightSFC(base.FeatureBase):
 
     def __init__(self):
         super(OpenDaylightSFC, self).__init__(project='sfc',
-                                              case='functest-odl-sfc"',
+                                              case='functest-odl-sfc',
                                               repo='dir_repo_sfc')
-        dir_sfc_functest = '{}/sfc/tests/functest'.format(self.repo)
-        self.cmd = 'cd %s && python ./run_tests.py' % dir_sfc_functest
+
+    def execute(self):
+        return run_tests.main()
old mode 100755 (executable)
new mode 100644 (file)
old mode 100755 (executable)
new mode 100644 (file)
old mode 100755 (executable)
new mode 100644 (file)
old mode 100755 (executable)
new mode 100644 (file)
@@ -77,7 +77,7 @@ tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_u
 tempest.api.compute.test_quotas.QuotasTestJSON.test_get_default_quotas[id-9bfecac7-b966-4f47-913f-1a9e2c12134a]
 tempest.api.compute.test_quotas.QuotasTestJSON.test_get_quotas[id-f1ef0a97-dbbb-4cca-adc5-c9fbc4f76107]
 tempest.api.compute.test_versions.TestVersions.test_list_api_versions[id-6c0a0990-43b6-4529-9b61-5fd8daf7c55c]
-tempest.api.compute.volumes.test_attach_volume.AttachVolumeTestJSON.test_attach_detach_volume[id-52e9045a-e90d-4c0d-9087-79d657faffff]
+tempest.api.compute.volumes.test_attach_volume.AttachVolumeTestJSON.test_attach_detach_volume[id-52e9045a-e90d-4c0d-9087-79d657faffff]
 tempest.api.compute.volumes.test_attach_volume.AttachVolumeTestJSON.test_list_get_volume_attachments[id-7fa563fe-f0f7-43eb-9e22-a1ece036b513]
 tempest.api.identity.v3.TestApiDiscovery.test_api_media_types[id-657c1970-4722-4189-8831-7325f3bc4265]
 tempest.api.identity.v3.TestApiDiscovery.test_api_version_resources[id-b9232f5e-d9e5-4d97-b96c-28d3db4de1bd]
index d388dcd..7d4c568 100755 (executable)
@@ -7,7 +7,10 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 import argparse
 import os
+import re
 import sys
+import subprocess
+import time
 
 from functest.core import testcase_base
 from functest.opnfv_tests.openstack.tempest import conf_utils
@@ -24,6 +27,7 @@ class RefstackClient(testcase_base.TestcaseBase):
 
     def __init__(self):
         super(RefstackClient, self).__init__()
+        self.case_name = "refstack_defcore"
         self.FUNCTEST_TEST = CONST.dir_functest_test
         self.CONF_PATH = CONST.refstack_tempest_conf_path
         self.DEFCORE_LIST = CONST.refstack_defcore_list
@@ -63,7 +67,80 @@ class RefstackClient(testcase_base.TestcaseBase):
                "cd -;".format(CONST.dir_refstack_client,
                               self.confpath,
                               self.defcorelist))
-        ft_utils.execute_command(cmd)
+        logger.info("Starting Refstack_defcore test case: '%s'." % cmd)
+
+        header = ("Tempest environment:\n"
+                  "  Installer: %s\n  Scenario: %s\n  Node: %s\n  Date: %s\n" %
+                  (CONST.INSTALLER_TYPE,
+                   CONST.DEPLOY_SCENARIO,
+                   CONST.NODE_NAME,
+                   time.strftime("%a %b %d %H:%M:%S %Z %Y")))
+
+        f_stdout = open(
+            os.path.join(conf_utils.REFSTACK_RESULTS_DIR,
+                         "refstack.log"), 'w+')
+        f_stderr = open(
+            os.path.join(conf_utils.REFSTACK_RESULTS_DIR,
+                         "refstack-error.log"), 'w+')
+        f_env = open(os.path.join(conf_utils.REFSTACK_RESULTS_DIR,
+                                  "environment.log"), 'w+')
+        f_env.write(header)
+
+        p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
+                             stderr=f_stderr, bufsize=1)
+
+        with p.stdout:
+            for line in iter(p.stdout.readline, b''):
+                if 'Tests' in line:
+                    break
+                logger.info(line.replace('\n', ''))
+                f_stdout.write(line)
+        p.wait()
+
+        f_stdout.close()
+        f_stderr.close()
+        f_env.close()
+
+    def parse_refstack_result(self):
+        try:
+            with open(os.path.join(conf_utils.REFSTACK_RESULTS_DIR,
+                                   "refstack.log"), 'r') as logfile:
+                output = logfile.read()
+
+            for match in re.findall("Ran: (\d+) tests in (\d+\.\d{4}) sec.",
+                                    output):
+                num_tests = match[0]
+            for match in re.findall("- Passed: (\d+)", output):
+                num_success = match
+            for match in re.findall("- Skipped: (\d+)", output):
+                num_skipped = match
+            for match in re.findall("- Failed: (\d+)", output):
+                num_failures = match
+            success_testcases = ""
+            for match in re.findall(r"\{0\}(.*?)[. ]*ok", output):
+                success_testcases += match + ", "
+            failed_testcases = ""
+            for match in re.findall(r"\{0\}(.*?)[. ]*FAILED", output):
+                failed_testcases += match + ", "
+            skipped_testcases = ""
+            for match in re.findall(r"\{0\}(.*?)[. ]*SKIPPED:", output):
+                skipped_testcases += match + ", "
+
+            num_executed = int(num_tests) - int(num_skipped)
+            success_rate = 100 * int(num_success) / int(num_executed)
+
+            self.details = {"tests": int(num_tests),
+                            "failures": int(num_failures),
+                            "success": success_testcases,
+                            "errors": failed_testcases,
+                            "skipped": skipped_testcases}
+        except Exception:
+            success_rate = 0
+
+        self.criteria = ft_utils.check_success_rate(
+            self.case_name, success_rate)
+        logger.info("Testcase %s success_rate is %s%%, is marked as %s"
+                    % (self.case_name, success_rate, self.criteria))
 
     def defcore_env_prepare(self):
         try:
@@ -80,14 +157,21 @@ class RefstackClient(testcase_base.TestcaseBase):
         return res
 
     def run(self):
+        self.start_time = time.time()
+
+        if not os.path.exists(conf_utils.REFSTACK_RESULTS_DIR):
+            os.makedirs(conf_utils.REFSTACK_RESULTS_DIR)
+
         try:
             self.defcore_env_prepare()
             self.run_defcore_default()
+            self.parse_refstack_result()
             res = testcase_base.TestcaseBase.EX_OK
         except Exception as e:
             logger.error('Error with run: %s', e)
             res = testcase_base.TestcaseBase.EX_RUN_ERROR
 
+        self.stop_time = time.time()
         return res
 
     def main(self, **kwargs):
index 1854997..a21322d 100644 (file)
@@ -35,6 +35,8 @@ TEMPEST_DEFCORE = os.path.join(REPO_PATH, TEMPEST_TEST_LIST_DIR,
                                'defcore_req.txt')
 TEMPEST_RAW_LIST = os.path.join(TEMPEST_RESULTS_DIR, 'test_raw_list.txt')
 TEMPEST_LIST = os.path.join(TEMPEST_RESULTS_DIR, 'test_list.txt')
+REFSTACK_RESULTS_DIR = os.path.join(CONST.dir_results,
+                                    'refstack')
 
 CI_INSTALLER_TYPE = CONST.INSTALLER_TYPE
 CI_INSTALLER_IP = CONST.INSTALLER_IP
index 2ced92e..f7dfd53 100644 (file)
@@ -203,7 +203,7 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
         flavor_exist, flavor_id = os_utils.get_or_create_flavor(
             "m1.small",
             self.vnf['requirements']['ram_min'],
-            '20',
+            '30',
             '1',
             public=True)
         self.logger.debug("Flavor id: %s" % flavor_id)
@@ -261,6 +261,9 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
         dns_ip = dep_outputs.json()['outputs']['dns_ip']
         ellis_ip = dep_outputs.json()['outputs']['ellis_ip']
 
+        self.logger.debug("DNS ip : %s" % dns_ip)
+        self.logger.debug("ELLIS ip : %s" % ellis_ip)
+
         ellis_url = "http://" + ellis_ip + "/"
         url = ellis_url + "accounts"
 
@@ -270,9 +273,11 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
                   "signup_code": "secret"}
 
         rq = requests.post(url, data=params)
-        i = 20
+        i = 30
         while rq.status_code != 201 and i > 0:
             rq = requests.post(url, data=params)
+            self.logger.debug("Account creation http status code: %s"
+                              % rq.status_code)
             i = i - 1
             time.sleep(10)
 
@@ -281,8 +286,7 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
             rq = requests.post(url, data=params)
             cookies = rq.cookies
         else:
-            self.step_failure("Unable to create an account for number" +
-                              " provision: %s" % rq.json()['reason'])
+            self.step_failure("Unable to create an account")
 
         url = ellis_url + "accounts/" + params['email'] + "/numbers"
         if cookies != "":
@@ -290,6 +294,8 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
             i = 24
             while rq.status_code != 200 and i > 0:
                 rq = requests.post(url, cookies=cookies)
+                self.logger.debug("Number creation http status code: %s"
+                                  % rq.status_code)
                 i = i - 1
                 time.sleep(25)
 
index b84ef8f..74b9e95 100644 (file)
@@ -6,7 +6,7 @@ cloudify:
         url: https://github.com/boucherv-orange/cloudify-manager-blueprints.git
         branch: '3.3.1-build'
     requirements:
-        ram_min: 4000
+        ram_min: 4096
         os_image: centos_7
     inputs:
       keystone_username: ""
@@ -29,7 +29,7 @@ clearwater:
         branch: stable
     deployment_name: clearwater-opnfv
     requirements:
-        ram_min: 2000
+        ram_min: 2048
         os_image: ubuntu_14.04
     inputs:
         image_id: ''
old mode 100644 (file)
new mode 100755 (executable)
old mode 100644 (file)
new mode 100755 (executable)
index 42b218e..d13fe8f
@@ -245,7 +245,7 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
                          % (self.imagename, network_id, userdata))
 
         instance = os_utils.create_instance_and_wait_for_active(
-            "m1.medium",
+            "orchestra",
             os_utils.get_image_id(glance_client, self.imagename),
             network_id,
             "orchestra-openbaton",
@@ -308,12 +308,15 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
         if self.ob_projectid == "":
             self.step_failure("Default project id was not found!")
 
+        creds = os_utils.get_credentials()
+        self.logger.info("PoP creds: %s" % creds)
+
         vim_json = {
             "name": "vim-instance",
-            "authUrl": os_utils.get_credentials().get("auth_url"),
-            "tenant": os_utils.get_credentials().get("tenant_name"),
-            "username": os_utils.get_credentials().get("username"),
-            "password": os_utils.get_credentials().get("password"),
+            "authUrl": creds.get("auth_url"),
+            "tenant": os.environ.get("OS_PROJECT_ID"),
+            "username": creds.get("username"),
+            "password": creds.get("password"),
             "securityGroups": [
                 "default",
                 "orchestra-sec-group"
index 86d6e60..5923a77 100644 (file)
@@ -2,8 +2,8 @@ tenant_images:
     ubuntu_14.04: http://cloud-images.ubuntu.com/trusty/current/trusty-server-cloudimg-amd64-disk1.img
     openims: http://marketplace.openbaton.org:8082/api/v1/images/52e2ccc0-1dce-4663-894d-28aab49323aa/img
 openbaton:
-    bootstrap_link: http://get.openbaton.org/bootstrap
-    bootstrap_config_link: http://get.openbaton.org/bootstrap-config-file
+    bootstrap_link: http://get.openbaton.org/bootstraps/bootstrap_3.2.0_opnfv/bootstrap
+    bootstrap_config_link: http://get.openbaton.org/bootstraps/bootstrap_3.2.0_opnfv/bootstrap-config-file
     marketplace_link: http://marketplace.openbaton.org:8082/api/v1/nsds/fokus/OpenImsCore/3.2.0/json
     imagename: ubuntu_14.04
 vIMS:
old mode 100755 (executable)
new mode 100644 (file)
old mode 100755 (executable)
new mode 100644 (file)
diff --git a/functest/tests/unit/ci/test_generate_report.py b/functest/tests/unit/ci/test_generate_report.py
new file mode 100644 (file)
index 0000000..2225586
--- /dev/null
@@ -0,0 +1,129 @@
+#!/usr/bin/env python
+
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+import logging
+import unittest
+import urllib2
+
+import mock
+
+from functest.ci import generate_report as gen_report
+from functest.tests.unit import test_utils
+from functest.utils import functest_utils as ft_utils
+from functest.utils.constants import CONST
+
+
+class GenerateReportTesting(unittest.TestCase):
+
+    logging.disable(logging.CRITICAL)
+
+    def test_init(self):
+        test_array = gen_report.init()
+        self.assertEqual(test_array, [])
+
+    @mock.patch('functest.ci.generate_report.urllib2.urlopen',
+                side_effect=urllib2.URLError('no host given'))
+    def test_get_results_from_db_fail(self, mock_method):
+        url = "%s/results?build_tag=%s" % (ft_utils.get_db_url(),
+                                           CONST.BUILD_TAG)
+        self.assertIsNone(gen_report.get_results_from_db())
+        mock_method.assert_called_once_with(url)
+
+    @mock.patch('functest.ci.generate_report.urllib2.urlopen',
+                return_value={'results': []})
+    def test_get_results_from_db_success(self, mock_method):
+        url = "%s/results?build_tag=%s" % (ft_utils.get_db_url(),
+                                           CONST.BUILD_TAG)
+        self.assertEqual(gen_report.get_results_from_db(), None)
+        mock_method.assert_called_once_with(url)
+
+    def test_get_data(self):
+        self.assertIsInstance(gen_report.get_data({'result': ''}, ''), dict)
+
+    def test_print_line_with_ci_run(self):
+        CONST.IS_CI_RUN = True
+        w1 = 'test_print_line'
+        test_str = ("| %s| %s| %s| %s| %s|\n"
+                    % (w1.ljust(gen_report.COL_1_LEN - 1),
+                       ''.ljust(gen_report.COL_2_LEN - 1),
+                       ''.ljust(gen_report.COL_3_LEN - 1),
+                       ''.ljust(gen_report.COL_4_LEN - 1),
+                       ''.ljust(gen_report.COL_5_LEN - 1)))
+        self.assertEqual(gen_report.print_line(w1), test_str)
+
+    def test_print_line_without_ci_run(self):
+        CONST.IS_CI_RUN = False
+        w1 = 'test_print_line'
+        test_str = ("| %s| %s| %s| %s|\n"
+                    % (w1.ljust(gen_report.COL_1_LEN - 1),
+                       ''.ljust(gen_report.COL_2_LEN - 1),
+                       ''.ljust(gen_report.COL_3_LEN - 1),
+                       ''.ljust(gen_report.COL_4_LEN - 1)))
+        self.assertEqual(gen_report.print_line(w1), test_str)
+
+    def test_print_line_no_column_with_ci_run(self):
+        CONST.IS_CI_RUN = True
+        TOTAL_LEN = gen_report.COL_1_LEN + gen_report.COL_2_LEN
+        TOTAL_LEN += gen_report.COL_3_LEN + gen_report.COL_4_LEN + 2
+        TOTAL_LEN += gen_report.COL_5_LEN + 1
+        test_str = ("| %s|\n" % 'test'.ljust(TOTAL_LEN))
+        self.assertEqual(gen_report.print_line_no_columns('test'), test_str)
+
+    def test_print_line_no_column_without_ci_run(self):
+        CONST.IS_CI_RUN = False
+        TOTAL_LEN = gen_report.COL_1_LEN + gen_report.COL_2_LEN
+        TOTAL_LEN += gen_report.COL_3_LEN + gen_report.COL_4_LEN + 2
+        test_str = ("| %s|\n" % 'test'.ljust(TOTAL_LEN))
+        self.assertEqual(gen_report.print_line_no_columns('test'), test_str)
+
+    def test_print_separator_with_ci_run(self):
+        CONST.IS_CI_RUN = True
+        test_str = ("+" + "=" * gen_report.COL_1_LEN +
+                    "+" + "=" * gen_report.COL_2_LEN +
+                    "+" + "=" * gen_report.COL_3_LEN +
+                    "+" + "=" * gen_report.COL_4_LEN +
+                    "+" + "=" * gen_report.COL_5_LEN)
+        test_str += '+\n'
+        self.assertEqual(gen_report.print_separator(), test_str)
+
+    def test_print_separator_without_ci_run(self):
+        CONST.IS_CI_RUN = False
+        test_str = ("+" + "=" * gen_report.COL_1_LEN +
+                    "+" + "=" * gen_report.COL_2_LEN +
+                    "+" + "=" * gen_report.COL_3_LEN +
+                    "+" + "=" * gen_report.COL_4_LEN)
+        test_str += "+\n"
+        self.assertEqual(gen_report.print_separator(), test_str)
+
+    @mock.patch('functest.ci.generate_report.logger.info')
+    def test_main_with_ci_run(self, mock_method):
+        CONST.IS_CI_RUN = True
+        gen_report.main()
+        mock_method.assert_called_once_with(test_utils.SubstrMatch('URL'))
+
+    @mock.patch('functest.ci.generate_report.logger.info')
+    def test_main_with_ci_loop(self, mock_method):
+        CONST.CI_LOOP = 'daily'
+        gen_report.main()
+        mock_method.assert_called_once_with(test_utils.SubstrMatch('CI LOOP'))
+
+    @mock.patch('functest.ci.generate_report.logger.info')
+    def test_main_with_scenario(self, mock_method):
+        CONST.DEPLOY_SCENARIO = 'test_scenario'
+        gen_report.main()
+        mock_method.assert_called_once_with(test_utils.SubstrMatch('SCENARIO'))
+
+    @mock.patch('functest.ci.generate_report.logger.info')
+    def test_main_with_build_tag(self, mock_method):
+        CONST.BUILD_TAG = 'test_build_tag'
+        gen_report.main()
+        mock_method.assert_called_once_with(test_utils.
+                                            SubstrMatch('BUILD TAG'))
+
+
+if __name__ == "__main__":
+    unittest.main(verbosity=2)
diff --git a/functest/tests/unit/ci/test_run_tests.py b/functest/tests/unit/ci/test_run_tests.py
new file mode 100644 (file)
index 0000000..0214061
--- /dev/null
@@ -0,0 +1,192 @@
+#!/usr/bin/env python
+
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+
+import unittest
+import logging
+
+import mock
+
+from functest.ci import run_tests
+from functest.utils.constants import CONST
+
+
+class RunTestsTesting(unittest.TestCase):
+
+    logging.disable(logging.CRITICAL)
+
+    def setUp(self):
+        self.sep = 'test_sep'
+        self.creds = {'OS_AUTH_URL': 'http://test_ip:test_port/v2.0',
+                      'OS_USERNAME': 'test_os_username',
+                      'OS_TENANT_NAME': 'test_tenant',
+                      'OS_PASSWORD': 'test_password'}
+        self.test = {'test_name': 'test_name'}
+        self.tier = mock.Mock()
+        attrs = {'get_name.return_value': 'test_tier',
+                 'get_tests.return_value': ['test1', 'test2'],
+                 'get_ci_loop.return_value': 'test_ci_loop',
+                 'get_test_names.return_value': ['test1', 'test2']}
+        self.tier.configure_mock(**attrs)
+
+        self.tiers = mock.Mock()
+        attrs = {'get_tiers.return_value': [self.tier]}
+        self.tiers.configure_mock(**attrs)
+
+    @mock.patch('functest.ci.run_tests.logger.info')
+    def test_print_separator(self, mock_logger_info):
+        run_tests.print_separator(self.sep)
+        mock_logger_info.assert_called_once_with(self.sep * 44)
+
+    @mock.patch('functest.ci.run_tests.logger.error')
+    def test_source_rc_file_missing_file(self, mock_logger_error):
+        with mock.patch('functest.ci.run_tests.os.path.isfile',
+                        return_value=False), \
+                self.assertRaises(Exception):
+            run_tests.source_rc_file()
+
+    @mock.patch('functest.ci.run_tests.logger.debug')
+    def test_source_rc_file_default(self, mock_logger_debug):
+        with mock.patch('functest.ci.run_tests.os.path.isfile',
+                        return_value=True), \
+            mock.patch('functest.ci.run_tests.os_utils.source_credentials',
+                       return_value=self.creds):
+            run_tests.source_rc_file()
+
+    @mock.patch('functest.ci.run_tests.os_snapshot.main')
+    def test_generate_os_snapshot(self, mock_os_snap):
+            run_tests.generate_os_snapshot()
+            self.assertTrue(mock_os_snap.called)
+
+    @mock.patch('functest.ci.run_tests.os_clean.main')
+    def test_cleanup(self, mock_os_clean):
+            run_tests.cleanup()
+            self.assertTrue(mock_os_clean.called)
+
+    def test_update_test_info(self):
+        run_tests.GlobalVariables.EXECUTED_TEST_CASES = [self.test]
+        run_tests.update_test_info('test_name',
+                                   'test_result',
+                                   'test_duration')
+        exp = self.test
+        exp.update({"result": 'test_result',
+                    "duration": 'test_duration'})
+        self.assertEqual(run_tests.GlobalVariables.EXECUTED_TEST_CASES,
+                         [exp])
+
+    def test_get_run_dict_if_defined_default(self):
+        mock_obj = mock.Mock()
+        with mock.patch('functest.ci.run_tests.'
+                        'ft_utils.get_dict_by_test',
+                        return_value={'run': mock_obj}):
+            self.assertEqual(run_tests.get_run_dict('test_name'),
+                             mock_obj)
+
+    @mock.patch('functest.ci.run_tests.logger.error')
+    def test_get_run_dict_if_defined_missing_config_option(self,
+                                                           mock_logger_error):
+        with mock.patch('functest.ci.run_tests.'
+                        'ft_utils.get_dict_by_test',
+                        return_value=None):
+            testname = 'test_name'
+            self.assertEqual(run_tests.get_run_dict(testname),
+                             None)
+            mock_logger_error.assert_called_once_with("Cannot get {}'s config "
+                                                      "options"
+                                                      .format(testname))
+
+        with mock.patch('functest.ci.run_tests.'
+                        'ft_utils.get_dict_by_test',
+                        return_value={}):
+            testname = 'test_name'
+            self.assertEqual(run_tests.get_run_dict(testname),
+                             None)
+
+    @mock.patch('functest.ci.run_tests.logger.exception')
+    def test_get_run_dict_if_defined_exception(self,
+                                               mock_logger_except):
+        with mock.patch('functest.ci.run_tests.'
+                        'ft_utils.get_dict_by_test',
+                        side_effect=Exception):
+            testname = 'test_name'
+            self.assertEqual(run_tests.get_run_dict(testname),
+                             None)
+            mock_logger_except.assert_called_once_with("Cannot get {}'s config"
+                                                       " options"
+                                                       .format(testname))
+
+    def test_run_tests_import_test_class_exception(self):
+        mock_test = mock.Mock()
+        args = {'get_name': 'test_name',
+                'needs_clean': False}
+        mock_test.configure_mock(**args)
+        with mock.patch('functest.ci.run_tests.print_separator'),\
+            mock.patch('functest.ci.run_tests.source_rc_file'), \
+            mock.patch('functest.ci.run_tests.get_run_dict',
+                       return_value=None), \
+                self.assertRaises(Exception) as context:
+            run_tests.run_test(mock_test, 'tier_name')
+            msg = "Cannot import the class for the test case."
+            self.assertTrue(msg in context)
+
+    @mock.patch('functest.ci.run_tests.logger.info')
+    def test_run_tier_default(self, mock_logger_info):
+        with mock.patch('functest.ci.run_tests.print_separator'), \
+                mock.patch('functest.ci.run_tests.run_test') as mock_method:
+            run_tests.run_tier(self.tier)
+            mock_method.assert_any_call('test1', 'test_tier')
+            mock_method.assert_any_call('test2', 'test_tier')
+            self.assertTrue(mock_logger_info.called)
+
+    @mock.patch('functest.ci.run_tests.logger.info')
+    def test_run_tier_missing_test(self, mock_logger_info):
+        with mock.patch('functest.ci.run_tests.print_separator'):
+            self.tier.get_tests.return_value = None
+            self.assertEqual(run_tests.run_tier(self.tier), 0)
+            self.assertTrue(mock_logger_info.called)
+
+    @mock.patch('functest.ci.run_tests.logger.info')
+    def test_run_all_default(self, mock_logger_info):
+        with mock.patch('functest.ci.run_tests.run_tier') as mock_method, \
+            mock.patch('functest.ci.run_tests.generate_report.init'), \
+                mock.patch('functest.ci.run_tests.generate_report.main'):
+            CONST.CI_LOOP = 'test_ci_loop'
+            run_tests.run_all(self.tiers)
+            mock_method.assert_any_call(self.tier)
+            self.assertTrue(mock_logger_info.called)
+
+    @mock.patch('functest.ci.run_tests.logger.info')
+    def test_run_all__missing_tier(self, mock_logger_info):
+        with mock.patch('functest.ci.run_tests.generate_report.init'), \
+                mock.patch('functest.ci.run_tests.generate_report.main'):
+            CONST.CI_LOOP = 'loop_re_not_available'
+            run_tests.run_all(self.tiers)
+            self.assertTrue(mock_logger_info.called)
+
+    def test_main_failed(self):
+        kwargs = {'test': 'test_name', 'noclean': True, 'report': True}
+        mock_obj = mock.Mock()
+        args = {'get_tier.return_value': False,
+                'get_test.return_value': False}
+        mock_obj.configure_mock(**args)
+
+        with mock.patch('functest.ci.run_tests.tb.TierBuilder'), \
+            mock.patch('functest.ci.run_tests.source_rc_file',
+                       side_effect=Exception):
+            self.assertEqual(run_tests.main(**kwargs),
+                             run_tests.Result.EX_ERROR)
+
+        with mock.patch('functest.ci.run_tests.tb.TierBuilder',
+                        return_value=mock_obj), \
+            mock.patch('functest.ci.run_tests.source_rc_file',
+                       side_effect=Exception):
+            self.assertEqual(run_tests.main(**kwargs),
+                             run_tests.Result.EX_ERROR)
+
+
+if __name__ == "__main__":
+    unittest.main(verbosity=2)
diff --git a/functest/tests/unit/ci/test_tier_builder.py b/functest/tests/unit/ci/test_tier_builder.py
new file mode 100644 (file)
index 0000000..48c94a5
--- /dev/null
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+import logging
+import unittest
+
+import mock
+
+from functest.ci import tier_builder
+
+
+class TierBuilderTesting(unittest.TestCase):
+
+    logging.disable(logging.CRITICAL)
+
+    def setUp(self):
+        self.dependency = {'installer': 'test_installer',
+                           'scenario': 'test_scenario'}
+
+        self.testcase = {'dependencies': self.dependency,
+                         'name': 'test_name',
+                         'criteria': 'test_criteria',
+                         'blocking': 'test_blocking',
+                         'clean_flag': 'test_clean_flag',
+                         'description': 'test_desc'}
+
+        self.dic_tier = {'name': 'test_tier',
+                         'order': 'test_order',
+                         'ci_loop': 'test_ci_loop',
+                         'description': 'test_desc',
+                         'testcases': [self.testcase]}
+
+        self.mock_yaml = mock.Mock()
+        attrs = {'get.return_value': [self.dic_tier]}
+        self.mock_yaml.configure_mock(**attrs)
+
+        with mock.patch('functest.ci.tier_builder.yaml.safe_load',
+                        return_value=self.mock_yaml), \
+                mock.patch('__builtin__.open', mock.mock_open()):
+            self.tierbuilder = tier_builder.TierBuilder('test_installer',
+                                                        'test_scenario',
+                                                        'testcases_file')
+        self.tier_obj = self.tierbuilder.tier_objects[0]
+
+    def test_get_tiers(self):
+        self.assertEqual(self.tierbuilder.get_tiers(),
+                         [self.tier_obj])
+
+    def test_get_tier_names(self):
+        self.assertEqual(self.tierbuilder.get_tier_names(),
+                         ['test_tier'])
+
+    def test_get_tier_present_tier(self):
+        self.assertEqual(self.tierbuilder.get_tier('test_tier'),
+                         self.tier_obj)
+
+    def test_get_tier_missing_tier(self):
+        self.assertEqual(self.tierbuilder.get_tier('test_tier2'),
+                         None)
+
+    def test_get_test_present_test(self):
+        self.assertEqual(self.tierbuilder.get_test('test_name'),
+                         self.tier_obj.get_test('test_name'))
+
+    def test_get_test_missing_test(self):
+        self.assertEqual(self.tierbuilder.get_test('test_name2'),
+                         None)
+
+    def test_get_tests_present_tier(self):
+        self.assertEqual(self.tierbuilder.get_tests('test_tier'),
+                         self.tier_obj.tests_array)
+
+    def test_get_tests_missing_tier(self):
+        self.assertEqual(self.tierbuilder.get_tests('test_tier2'),
+                         None)
+
+
+if __name__ == "__main__":
+    unittest.main(verbosity=2)
diff --git a/functest/tests/unit/ci/test_tier_handler.py b/functest/tests/unit/ci/test_tier_handler.py
new file mode 100644 (file)
index 0000000..01d99d7
--- /dev/null
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+import logging
+import unittest
+
+import mock
+
+from functest.ci import tier_handler
+
+
+class TierHandlerTesting(unittest.TestCase):
+
+    logging.disable(logging.CRITICAL)
+
+    def setUp(self):
+        self.test = mock.Mock()
+        attrs = {'get_name.return_value': 'test_name'}
+        self.test.configure_mock(**attrs)
+
+        self.mock_depend = mock.Mock()
+        attrs = {'get_scenario.return_value': 'test_scenario',
+                 'get_installer.return_value': 'test_installer'}
+        self.mock_depend.configure_mock(**attrs)
+
+        self.tier = tier_handler.Tier('test_tier',
+                                      'test_order',
+                                      'test_ci_loop',
+                                      description='test_desc')
+        self.testcase = tier_handler.TestCase('test_name',
+                                              self.mock_depend,
+                                              'test_criteria',
+                                              'test_blocking',
+                                              'test_clean_flag',
+                                              description='test_desc')
+
+        self.dependency = tier_handler.Dependency('test_installer',
+                                                  'test_scenario')
+
+    def test_add_test(self):
+        self.tier.add_test(self.test)
+        self.assertEqual(self.tier.tests_array,
+                         [self.test])
+
+    def test_get_tests(self):
+        self.tier.tests_array = [self.test]
+        self.assertEqual(self.tier.get_tests(),
+                         [self.test])
+
+    def test_get_test_names(self):
+        self.tier.tests_array = [self.test]
+        self.assertEqual(self.tier.get_test_names(),
+                         ['test_name'])
+
+    def test_get_test(self):
+        self.tier.tests_array = [self.test]
+        with mock.patch.object(self.tier, 'is_test',
+                               return_value=True):
+            self.assertEqual(self.tier.get_test('test_name'),
+                             self.test)
+
+    def test_get_test_missing_test(self):
+        self.tier.tests_array = [self.test]
+        with mock.patch.object(self.tier, 'is_test',
+                               return_value=False):
+            self.assertEqual(self.tier.get_test('test_name'),
+                             None)
+
+    def test_get_name(self):
+        self.assertEqual(self.tier.get_name(),
+                         'test_tier')
+
+    def test_get_order(self):
+        self.assertEqual(self.tier.get_order(),
+                         'test_order')
+
+    def test_get_ci_loop(self):
+        self.assertEqual(self.tier.get_ci_loop(),
+                         'test_ci_loop')
+
+    def test_testcase_is_none_present_item(self):
+        self.assertEqual(tier_handler.TestCase.is_none("item"),
+                         False)
+
+    def test_testcase_is_none_missing_item(self):
+        self.assertEqual(tier_handler.TestCase.is_none(None),
+                         True)
+
+    def test_testcase_is_compatible(self):
+        self.assertEqual(self.testcase.is_compatible('test_installer',
+                                                     'test_scenario'),
+                         True)
+
+    def test_testcase_is_compatible_missing_installer_scenario(self):
+        self.assertEqual(self.testcase.is_compatible('missing_installer',
+                                                     'test_scenario'),
+                         False)
+        self.assertEqual(self.testcase.is_compatible('test_installer',
+                                                     'missing_scenario'),
+                         False)
+
+    def test_testcase_get_name(self):
+        self.assertEqual(self.tier.get_name(),
+                         'test_tier')
+
+    def test_testcase_get_criteria(self):
+        self.assertEqual(self.tier.get_order(),
+                         'test_order')
+
+    def test_testcase_is_blocking(self):
+        self.assertEqual(self.tier.get_ci_loop(),
+                         'test_ci_loop')
+
+    def test_dependency_get_installer(self):
+        self.assertEqual(self.dependency.get_installer(),
+                         'test_installer')
+
+    def test_dependency_get_scenario(self):
+        self.assertEqual(self.dependency.get_scenario(),
+                         'test_scenario')
+
+
+if __name__ == "__main__":
+    unittest.main(verbosity=2)
index 25a74b7..1680f03 100644 (file)
@@ -8,11 +8,9 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 
 import logging
-import mock
 import unittest
 
 from functest.core import vnf_base
-from functest.core import testcase_base
 
 
 class VnfBaseTesting(unittest.TestCase):
@@ -37,17 +35,15 @@ class VnfBaseTesting(unittest.TestCase):
                                           "result": "",
                                           "duration": 5}}
 
-    @mock.patch('logging.Logger.error')
-    def test_deploy_vnf_unimplemented(self, mock):
-        self.assertEqual(self.test.deploy_vnf(),
-                         testcase_base.TestcaseBase.EX_TESTCASE_FAILED)
-        mock.assert_called_with('VNF must be deployed')
-
-    @mock.patch('logging.Logger.error')
-    def test_test_vnf_unimplemented(self, mock):
-        self.assertEqual(self.test.test_vnf(),
-                         testcase_base.TestcaseBase.EX_TESTCASE_FAILED)
-        mock.assert_called_with('VNF must be tested')
+    def test_deploy_vnf_unimplemented(self):
+        with self.assertRaises(Exception) as context:
+            self.test.deploy_vnf()
+        self.assertTrue('VNF not deployed' in context.exception)
+
+    def test_test_vnf_unimplemented(self):
+        with self.assertRaises(Exception) as context:
+            self.test.test_vnf()()
+        self.assertTrue('VNF not tested' in context.exception)
 
     def test_parse_results(self):
         self.assertNotEqual(self.test.parse_results(), 0)
index 4eb5a25..4e83f6b 100644 (file)
@@ -38,17 +38,6 @@ class OSRefstackClientTesting(unittest.TestCase):
             self.refstackclient.source_venv()
             m.assert_any_call(cmd)
 
-    def test_run_defcore_default(self):
-        with mock.patch('functest.opnfv_tests.openstack.refstack_client.'
-                        'refstack_client.ft_utils.execute_command') as m:
-            cmd = ("cd {0};"
-                   "./refstack-client test -c {1} -v --test-list {2};"
-                   "cd -;".format(CONST.dir_refstack_client,
-                                  self._config,
-                                  self._testlist))
-            self.refstackclient.run_defcore_default()
-            m.assert_any_call(cmd)
-
     def test_run_defcore(self):
         config = 'tempest.conf'
         testlist = 'testlist'
index bb83601..eb241e5 100644 (file)
@@ -33,6 +33,7 @@ class FunctestUtilsTesting(unittest.TestCase):
         self.installer = 'test_installer'
         self.scenario = 'test_scenario'
         self.build_tag = 'jenkins-functest-fuel-opnfv-jump-2-daily-master-190'
+        self.build_tag_week = 'jenkins-functest-fuel-baremetal-weekly-master-8'
         self.version = 'master'
         self.node_name = 'test_node_name'
         self.project = 'test_project'
@@ -56,6 +57,7 @@ class FunctestUtilsTesting(unittest.TestCase):
         self.testcase_dict = {'name': 'testname', 'criteria': self.criteria}
         self.parameter = 'general.openstack.image_name'
         self.config_yaml = 'test_config_yaml-'
+        self.db_url_env = 'http://foo/testdb'
         self.file_yaml = {'general': {'openstack': {'image_name':
                                                     'test_image_name'}}}
 
@@ -151,10 +153,20 @@ class FunctestUtilsTesting(unittest.TestCase):
                              self.scenario)
 
     @mock.patch('functest.utils.functest_utils.get_build_tag')
-    def test_get_version_default(self, mock_get_build_tag):
+    def test_get_version_daily_job(self, mock_get_build_tag):
         mock_get_build_tag.return_value = self.build_tag
         self.assertEqual(functest_utils.get_version(), self.version)
 
+    @mock.patch('functest.utils.functest_utils.get_build_tag')
+    def test_get_version_weekly_job(self, mock_get_build_tag):
+        mock_get_build_tag.return_value = self.build_tag_week
+        self.assertEqual(functest_utils.get_version(), self.version)
+
+    @mock.patch('functest.utils.functest_utils.get_build_tag')
+    def test_get_version_with_dummy_build_tag(self, mock_get_build_tag):
+        mock_get_build_tag.return_value = 'whatever'
+        self.assertEqual(functest_utils.get_version(), 'unknown')
+
     @mock.patch('functest.utils.functest_utils.get_build_tag')
     def test_get_version_unknown(self, mock_get_build_tag):
         mock_get_build_tag.return_value = "unknown_build_tag"
@@ -196,8 +208,17 @@ class FunctestUtilsTesting(unittest.TestCase):
             self.assertEqual(functest_utils.get_build_tag(),
                              self.build_tag)
 
+    def test_get_db_url_env_var(self):
+        with mock.patch.dict(os.environ,
+                             {'TEST_DB_URL': self.db_url_env,
+                              'CONFIG_FUNCTEST_YAML':
+                              "./functest/ci/config_functest.yaml"},
+                             clear=True):
+            self.assertEqual(functest_utils.get_db_url(),
+                             self.db_url_env)
+
     @mock.patch('functest.utils.functest_utils.get_functest_config')
-    def test_get_db_url(self, mock_get_functest_config):
+    def test_get_db_url_default(self, mock_get_functest_config):
         mock_get_functest_config.return_value = self.db_url
         self.assertEqual(functest_utils.get_db_url(), self.db_url)
         mock_get_functest_config.assert_called_once_with('results.test_db_url')
@@ -274,25 +295,6 @@ class FunctestUtilsTesting(unittest.TestCase):
     def test_push_results_to_db_missing_buildtag(self):
         self._test_push_results_to_db_missing_env('BUILD_TAG')
 
-    def test_push_results_to_db_incorrect_buildtag(self):
-        dic = self._get_env_dict(None)
-        dic['BUILD_TAG'] = 'incorrect_build_tag'
-        with mock.patch('functest.utils.functest_utils.get_db_url',
-                        return_value=self.db_url), \
-                mock.patch.dict(os.environ,
-                                dic,
-                                clear=True), \
-                mock.patch('functest.utils.functest_utils.logger.error') \
-                as mock_logger_error:
-            self.assertFalse(functest_utils.
-                             push_results_to_db(self.project, self.case_name,
-                                                self.start_date,
-                                                self.stop_date,
-                                                self.criteria, self.details))
-            mock_logger_error.assert_called_once_with("Please fix BUILD_TAG"
-                                                      " env var: incorrect_"
-                                                      "build_tag")
-
     def test_push_results_to_db_request_post_failed(self):
         dic = self._get_env_dict(None)
         with mock.patch('functest.utils.functest_utils.get_db_url',
index ef3764c..f51a499 100644 (file)
@@ -28,7 +28,8 @@ class OSUtilsTesting(unittest.TestCase):
                 'OS_PROJECT_DOMAIN_NAME': os_prefix + 'project_domain_name',
                 'OS_PROJECT_NAME': os_prefix + 'project_name',
                 'OS_ENDPOINT_TYPE': os_prefix + 'endpoint_type',
-                'OS_REGION_NAME': os_prefix + 'region_name'}
+                'OS_REGION_NAME': os_prefix + 'region_name',
+                'OS_CACERT': os_prefix + 'https_cacert'}
 
     def _get_os_env_vars(self):
         return {'username': 'test_username', 'password': 'test_password',
@@ -37,7 +38,8 @@ class OSUtilsTesting(unittest.TestCase):
                 'project_domain_name': 'test_project_domain_name',
                 'project_name': 'test_project_name',
                 'endpoint_type': 'test_endpoint_type',
-                'region_name': 'test_region_name'}
+                'region_name': 'test_region_name',
+                'https_cacert': 'test_https_cacert'}
 
     def setUp(self):
         self.env_vars = ['OS_AUTH_URL', 'OS_USERNAME', 'OS_PASSWORD']
@@ -299,7 +301,7 @@ class OSUtilsTesting(unittest.TestCase):
                          'OS_PROJECT_DOMAIN_NAME'])
         self.assertEqual(openstack_utils.get_rc_env_vars(), exp_resp)
 
-    @mock.patch('functest.utils.openstack_utils.get_rc_env_vars')
+    @mock.patch('functest.utils.openstack_utils')
     def test_check_credentials_missing_env(self, mock_get_rc_env):
         exp_resp = self.env_vars
         exp_resp.extend(['OS_TENANT_NAME'])
index 99bcef3..276235d 100644 (file)
@@ -1,6 +1,8 @@
 #!/usr/bin/env python
 
+import errno
 import mock
+import os
 import requests.sessions
 import urlparse
 
@@ -10,7 +12,12 @@ def can_dump_request_to_file(method):
     def dump_preparedrequest(request, **kwargs):
         parseresult = urlparse.urlparse(request.url)
         if parseresult.scheme == "file":
-            with open(parseresult.path.replace('/results', ''), 'a') as f:
+            try:
+                os.makedirs(parseresult.path)
+            except OSError as e:
+                if e.errno != errno.EEXIST:
+                    raise
+            with open(os.path.join(parseresult.path, 'dump.txt'), 'a') as f:
                 headers = ""
                 for key in request.headers:
                     headers += key + " " + request.headers[key] + "\n"
old mode 100755 (executable)
new mode 100644 (file)
index 022211c..555e9c2
@@ -28,13 +28,24 @@ import json
 
 from functest.utils.constants import CONST
 
+ignore = ["paramiko",
+          "stevedore.extension",
+          "keystoneauth.session",
+          "keystoneauth.identity.v3.base",
+          "novaclient.v2.client",
+          "neutronclient.v2_0.client",
+          "glanceclient.common.http",
+          "cinderclient.v2.client",
+          "cinderclient.client"]
+
 
 class Logger(object):
 
     def __init__(self, logger_name):
         self.setup_logging()
         self.logger = logging.getLogger(logger_name)
-        logging.getLogger("paramiko").setLevel(logging.WARNING)
+        for module_name in ignore:
+            logging.getLogger(module_name).setLevel(logging.WARNING)
 
     def getLogger(self):
         return self.logger
index dbed811..7cc5029 100644 (file)
@@ -111,12 +111,13 @@ def get_version():
     # if launched through CI the build tag has the following format
     # jenkins-<project>-<installer>-<pod>-<job>-<branch>-<id>
     # e.g. jenkins-functest-fuel-opnfv-jump-2-daily-master-190
+    # jenkins-functest-fuel-baremetal-weekly-master-8
     # use regex to match branch info
-    rule = "daily-(.+?)-[0-9]*"
+    rule = "(dai|week)ly-(.+?)-[0-9]*"
     build_tag = get_build_tag()
     m = re.search(rule, build_tag)
     if m:
-        return m.group(1)
+        return m.group(2)
     else:
         return "unknown"
 
@@ -151,7 +152,13 @@ def get_db_url():
     """
     Returns DB URL
     """
-    return get_functest_config('results.test_db_url')
+    # TODO use CONST mechanism
+    try:
+        # if TEST_DB_URL declared in env variable, use it!
+        db_url = os.environ['TEST_DB_URL']
+    except KeyError:
+        db_url = get_functest_config('results.test_db_url')
+    return db_url
 
 
 def logger_test_results(project, case_name, status, details):
@@ -200,13 +207,7 @@ def push_results_to_db(project, case_name,
     except KeyError as e:
         logger.error("Please set env var: " + str(e))
         return False
-    rule = "daily-(.+?)-[0-9]*"
-    m = re.search(rule, build_tag)
-    if m:
-        version = m.group(1)
-    else:
-        logger.error("Please fix BUILD_TAG env var: " + build_tag)
-        return False
+    version = get_version()
     test_start = dt.fromtimestamp(start_date).strftime('%Y-%m-%d %H:%M:%S')
     test_stop = dt.fromtimestamp(stop_date).strftime('%Y-%m-%d %H:%M:%S')
 
old mode 100755 (executable)
new mode 100644 (file)
index e33af63..ffc870f
@@ -82,7 +82,8 @@ def get_env_cred_dict():
         'OS_PROJECT_DOMAIN_NAME': 'project_domain_name',
         'OS_PROJECT_NAME': 'project_name',
         'OS_ENDPOINT_TYPE': 'endpoint_type',
-        'OS_REGION_NAME': 'region_name'
+        'OS_REGION_NAME': 'region_name',
+        'OS_CACERT': 'https_cacert'
     }
     return env_cred_dict
 
@@ -149,6 +150,11 @@ def get_credentials_for_rally():
     if region_name is not None:
         cred_key = env_cred_dict.get('OS_REGION_NAME')
         rally_conf[cred_key] = region_name
+
+    cacert = os.getenv('OS_CACERT')
+    if cacert is not None:
+        cred_key = env_cred_dict.get('OS_CACERT')
+        rally_conf[cred_key] = cacert
     return rally_conf
 
 
@@ -168,7 +174,14 @@ def get_endpoint(service_type, endpoint_type='publicURL'):
 
 def get_session(other_creds={}):
     auth = get_session_auth(other_creds)
-    return session.Session(auth=auth)
+    cacert = os.getenv('OS_CACERT')
+    if cacert is not None:
+        if not os.path.isfile(cacert):
+            raise Exception("The 'OS_CACERT' environment"
+                            "variable is set to %s but the file"
+                            "does not exist.", cacert)
+
+    return session.Session(auth=auth, verify=cacert)
 
 
 # *********************************************
diff --git a/kingbird_requirements.txt b/kingbird_requirements.txt
new file mode 100644 (file)
index 0000000..adf1082
--- /dev/null
@@ -0,0 +1,15 @@
+#
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+ddt==1.1.1
+oslosphinx==4.11.0
+oslotest==2.14.0
+pylint==1.4.5
+requests-mock==1.3.0
+tempest-lib==1.0.0
+testresources==2.0.1
+testscenarios==0.5.0