Merge "Update Path to Anteater for Weekly Security Scan"
authorTrevor Bramwell <tbramwell@linuxfoundation.org>
Thu, 13 Jul 2017 21:45:41 +0000 (21:45 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Thu, 13 Jul 2017 21:45:41 +0000 (21:45 +0000)
27 files changed:
jjb/armband/armband-ci-jobs.yml
jjb/armband/armband-deploy.sh
jjb/armband/armband-download-artifact.sh
jjb/armband/build.sh
jjb/compass4nfv/compass-ci-jobs.yml
jjb/fuel/fuel-daily-jobs.yml
jjb/releng/automate.yml
jjb/releng/docker-deploy.sh
jjb/yardstick/yardstick-daily-jobs.yml
jjb/yardstick/yardstick-daily.sh
prototypes/xci/playbooks/configure-opnfvhost.yml
prototypes/xci/var/Debian.yml
prototypes/xci/var/RedHat.yml
prototypes/xci/var/Suse.yml
utils/test/testapi/.gitignore [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/tests/unit/resources/__init__.py [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/tests/unit/resources/scenario-c1.json [moved from utils/test/testapi/opnfv_testapi/tests/unit/scenario-c1.json with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/scenario-c2.json [moved from utils/test/testapi/opnfv_testapi/tests/unit/scenario-c2.json with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_base.py with 97% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_fake_pymongo.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_fake_pymongo.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_pod.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_pod.py with 97% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_project.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_project.py with 98% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_result.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_result.py with 99% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_scenario.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_scenario.py with 99% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_testcase.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_testcase.py with 99% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_token.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_token.py with 98% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_version.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_version.py with 94% similarity]

index 55d8ff9..e7de319 100644 (file)
             name: GS_URL
             default: artifacts.opnfv.org/$PROJECT{gs-pathname}
             description: "URL to Google Storage."
+        - string:
+            name: SSH_KEY
+            default: "/tmp/mcp.rsa"
+            description: "Path to private SSH key to access environment nodes. For MCP deployments only."
 
 ########################
 # trigger macros
index e445e08..9964ed5 100755 (executable)
@@ -2,7 +2,7 @@
 # SPDX-license-identifier: Apache-2.0
 ##############################################################################
 # Copyright (c) 2016 Ericsson AB and others.
-#           (c) 2016 Enea Software AB
+#           (c) 2017 Enea Software AB
 # All rights reserved. This program and the accompanying materials
 # are made available under the terms of the Apache License, Version 2.0
 # which accompanies this distribution, and is available at
@@ -13,16 +13,18 @@ set -o pipefail
 
 export TERM="vt220"
 
-# source the file so we get OPNFV vars
-source latest.properties
+if [[ "$BRANCH" != 'master' ]]; then
+    # source the file so we get OPNFV vars
+    source latest.properties
 
-# echo the info about artifact that is used during the deployment
-echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+    # echo the info about artifact that is used during the deployment
+    echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+fi
 
 if [[ "$JOB_NAME" =~ "merge" ]]; then
     # set simplest scenario for virtual deploys to run for merges
     DEPLOY_SCENARIO="os-nosdn-nofeature-ha"
-else
+elif [[ "$BRANCH" != 'master' ]]; then
     # for none-merge deployments
     # checkout the commit that was used for building the downloaded artifact
     # to make sure the ISO and deployment mechanism uses same versions
@@ -102,7 +104,7 @@ echo "--------------------------------------------------------"
 echo "Scenario: $DEPLOY_SCENARIO"
 echo "Lab: $LAB_NAME"
 echo "POD: $POD_NAME"
-echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
+[[ "$BRANCH" != 'master' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
 echo
 echo "Starting the deployment using $INSTALLER_TYPE. This could take some time..."
 echo "--------------------------------------------------------"
index e2dd097..4f83305 100755 (executable)
@@ -2,6 +2,7 @@
 # SPDX-license-identifier: Apache-2.0
 ##############################################################################
 # Copyright (c) 2016 Ericsson AB and others.
+#           (c) 2017 Enea AB
 # All rights reserved. This program and the accompanying materials
 # are made available under the terms of the Apache License, Version 2.0
 # which accompanies this distribution, and is available at
@@ -10,6 +11,9 @@
 set -o errexit
 set -o pipefail
 
+# disable Fuel ISO download for master branch
+[[ "$BRANCH" == 'master' ]] && exit 0
+
 echo "Host info: $(hostname) $(hostname -I)"
 
 # Configurable environment variables:
index a71cf11..29c01bb 100755 (executable)
@@ -2,12 +2,21 @@
 # SPDX-license-identifier: Apache-2.0
 ##############################################################################
 # Copyright (c) 2016 Ericsson AB and others.
-# Copyright (c) 2016 Enea AB.
+# Copyright (c) 2017 Enea AB.
 # All rights reserved. This program and the accompanying materials
 # are made available under the terms of the Apache License, Version 2.0
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+
+# disable Armband iso build for master branch
+if [[ "$BRANCH" == 'master' ]]; then
+    touch $WORKSPACE/.noupload
+    echo "--------------------------------------------------------"
+    echo "Done!"
+    exit 0
+fi
+
 set -o errexit
 set -o nounset
 set -o pipefail
index ddd90b6..3335391 100644 (file)
             steps:
                 - trigger-builds:
                     - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
+                      current-parameters: false
+                      predefined-parameters:
+                        DEPLOY_SCENARIO={scenario}
+                      block: true
+                      same-node: true
+                      block-thresholds:
+                        build-step-failure-threshold: 'never'
+                        failure-threshold: 'never'
+                        unstable-threshold: 'FAILURE'
 
 - job-template:
     name: 'compass-deploy-{pod}-daily-{stream}'
index 6867708..7a57cb5 100644 (file)
                         build-step-failure-threshold: 'never'
                         failure-threshold: 'never'
                         unstable-threshold: 'FAILURE'
+        # ZTE pod1 weekly(Sunday), os-odl_l2-nofeature-ha, run against master and danube
+        - conditional-step:
+            condition-kind: and
+            condition-operands:
+                - condition-kind: regex-match
+                  regex: os-odl_l2-nofeature-ha
+                  label: '{scenario}'
+                - condition-kind: regex-match
+                  regex: zte-pod1
+                  label: '{pod}'
+                - condition-kind: day-of-week
+                  day-selector: select-days
+                  days:
+                      SAT: true
+                  use-build-time: true
+            steps:
+                - trigger-builds:
+                    - project: 'dovetail-fuel-zte-pod1-proposed_tests-{stream}'
+                      current-parameters: false
+                      predefined-parameters:
+                        DEPLOY_SCENARIO={scenario}
+                      block: true
+                      same-node: true
+                      block-thresholds:
+                        build-step-failure-threshold: 'never'
+                        failure-threshold: 'never'
+                        unstable-threshold: 'FAILURE'
 
     publishers:
         - email:
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '' # '35 15 * * *'
+        - timed: '35 15 * * *'
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '' # '5 18 * * *'
+        - timed: '5 18 * * *'
 - trigger:
     name: 'fuel-os-onos-sfc-noha-virtual-daily-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-nosdn-ovs-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '' # '5 9 * * *'
+        - timed: '5 9 * * *'
 - trigger:
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-master-trigger'
     triggers:
index 17b6fa7..db67264 100644 (file)
     builders:
         - shell: |
             cd ./utils/test/{module}/
-            bash run_test.sh
+            tox
             cp *.xml $WORKSPACE
 
     publishers:
-        - junit:
-            results: nosetests.xml
-        - cobertura:
-            report-file: "coverage.xml"
-            only-stable: "true"
-            health-auto-update: "false"
-            stability-auto-update: "false"
-            zoom-coverage-chart: "true"
-            targets:
-                - files:
-                    healthy: 10
-                    unhealthy: 20
-                    failing: 30
-                - method:
-                    healthy: 50
-                    unhealthy: 40
-                    failing: 30
+        - publish-coverage
 
 - job-template:
     name: '{module}-automate-{stream}'
     name: 'testapi-automate-docker-deploy-macro'
     builders:
         - shell: |
-            bash ./jjb/releng/docker-deploy.sh 'sudo docker run -dti -p 8082:8000 -e mongodb_url=mongodb://172.17.0.1:27017 -e base_url=http://testresults.opnfv.org/test opnfv/testapi' "http://testresults.opnfv.org/test/swagger/APIs"
+            bash ./jjb/releng/docker-deploy.sh "sudo docker run -dti -p 8082:8000
+            -e mongodb_url=mongodb://172.17.0.1:27017
+            -e base_url=http://testresults.opnfv.org/test opnfv/testapi" \
+            "http://testresults.opnfv.org/test/swagger/APIs" "testapi"
+
 - builder:
     name: 'reporting-automate-docker-deploy-macro'
     builders:
         - shell: |
-            bash ./jjb/releng/docker-deploy.sh 'sudo docker run -itd -p 8084:8000 opnfv/reporting' "http://testresults.opnfv.org/reporting2/reporting/index.html"
+            bash ./jjb/releng/docker-deploy.sh "sudo docker run -itd -p 8084:8000 opnfv/reporting" \
+            "http://testresults.opnfv.org/reporting2/reporting/index.html" "reporting"
 
 - builder:
     name: mongodb-backup
index b3b930f..2a3e078 100644 (file)
@@ -19,6 +19,7 @@
 # Assigning Variables
 command=$1
 url=$2
+module=$3
 
 function check() {
 
@@ -38,24 +39,26 @@ function check() {
 }
 
 echo "Getting contianer Id of the currently running one"
-contId=$(sudo docker ps | grep "opnfv/testapi:latest" | awk '{print $1}')
+contId=$(sudo docker ps | grep "opnfv/${module}:latest" | awk '{print $1}')
+
+echo $contId
 
 echo "Pulling the latest image"
-sudo docker pull opnfv/testapi:latest
+sudo docker pull opnfv/${module}:latest
 
-echo "Deleting old containers of opnfv/testapi:old"
-sudo docker ps -a | grep "opnfv/testapi" | grep "old" | awk '{print $1}' | xargs -r sudo docker rm -f
+echo "Deleting old containers of opnfv/${module}:old"
+sudo docker ps -a | grep "opnfv/${module}" | grep "old" | awk '{print $1}' | xargs -r sudo docker rm -f
 
-echo "Deleting old images of opnfv/testapi:latest"
-sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}' | xargs -r sudo docker rmi -f
+echo "Deleting old images of opnfv/${module}:latest"
+sudo docker images | grep "opnfv/${module}" | grep "old" | awk '{print $3}' | xargs -r sudo docker rmi -f
 
 
 if [[ -z "$contId" ]]
 then
-    echo "No running testapi container"
+    echo "No running ${module} container"
 
-    echo "Removing stopped testapi containers in the previous iterations"
-    sudo docker ps -f status=exited | grep "opnfv_testapi" | awk '{print $1}' | xargs -r sudo docker rm -f
+    echo "Removing stopped ${module} containers in the previous iterations"
+    sudo docker ps -f status=exited | grep "opnfv_${module}" | awk '{print $1}' | xargs -r sudo docker rm -f
 else
     echo $contId
 
@@ -70,13 +73,13 @@ else
     fi
 
     echo "Changing current image tag to old"
-    sudo docker tag "$currImgId" opnfv/testapi:old
+    sudo docker tag "$currImgId" opnfv/${module}:old
 
-    echo "Removing stopped testapi containers in the previous iteration"
-    sudo docker ps -f status=exited | grep "opnfv_testapi" | awk '{print $1}' | xargs -r sudo docker rm -f
+    echo "Removing stopped ${module} containers in the previous iteration"
+    sudo docker ps -f status=exited | grep "opnfv_${module}" | awk '{print $1}' | xargs -r sudo docker rm -f
 
-    echo "Renaming the running container name to opnfv_testapi as to identify it."
-    sudo docker rename $contId opnfv_testapi
+    echo "Renaming the running container name to opnfv_${module} as to identify it."
+    sudo docker rename $contId opnfv_${module}
 
     echo "Stop the currently running container"
     sudo docker stop $contId
@@ -86,10 +89,10 @@ echo "Running a container with the new image"
 $command:latest
 
 if check; then
-    echo "TestResults Hosted."
+    echo "TestResults Module Hosted."
 else
-    echo "TestResults Hosting Failed"
-    if [[ $(sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}') ]]; then
+    echo "TestResults Module Failed"
+    if [[ $(sudo docker images | grep "opnfv/${module}" | grep "old" | awk '{print $3}') ]]; then
         echo "Running old Image"
         $command:old
         exit 1
index ff1d47e..4dcface 100644 (file)
 # that have been switched using labels for slaves
 #--------------------------------
     pod:
+# apex CI PODs
+        - virtual:
+            slave-label: apex-virtual-master
+            installer: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - baremetal:
+            slave-label: apex-baremetal-master
+            installer: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - virtual:
+            slave-label: apex-virtual-danube
+            installer: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *danube
+        - baremetal:
+            slave-label: apex-baremetal-danube
+            installer: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *danube
 # fuel CI PODs
         - baremetal:
             slave-label: fuel-baremetal
             installer: joid
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *danube
-
 # compass CI PODs
         - baremetal:
             slave-label: compass-baremetal
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *danube
 #--------------------------------
-#    Installers not using labels
-#            CI PODs
-# This section should only contain the installers
-# that have not been switched using labels for slaves
-#--------------------------------
-        - lf-pod1:
-            slave-label: '{pod}'
-            installer: apex
-            auto-trigger-name: 'daily-trigger-disabled'
-            <<: *master
-        - lf-pod1:
-            slave-label: '{pod}'
-            installer: apex
-            auto-trigger-name: 'daily-trigger-disabled'
-            <<: *danube
-#--------------------------------
 #        None-CI PODs
 #--------------------------------
         - orange-pod1:
         - description-setter:
             description: "POD: $NODE_NAME"
         - 'yardstick-cleanup'
-        #- 'yardstick-fetch-os-creds'
+        - 'yardstick-fetch-os-creds'
         - 'yardstick-{testsuite}'
         - 'yardstick-store-results'
 
 # parameter macros
 ########################
 - parameter:
-    name: 'yardstick-params-fuel-baremetal'
+    name: 'yardstick-params-apex-virtual-master'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-fuel-virtual'
+    name: 'yardstick-params-apex-baremetal-master'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-armband-baremetal'
+    name: 'yardstick-params-apex-virtual-danube'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-armband-virtual'
+    name: 'yardstick-params-apex-baremetal-danube'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-arm-virtual1'
+    name: 'yardstick-params-fuel-baremetal'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-joid-baremetal'
+    name: 'yardstick-params-fuel-virtual'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-joid-virtual'
+    name: 'yardstick-params-armband-baremetal'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-intel-pod8'
+    name: 'yardstick-params-armband-virtual'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-lf-pod1'
+    name: 'yardstick-params-arm-virtual1'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
-
 - parameter:
-    name: 'yardstick-params-lf-pod2'
+    name: 'yardstick-params-joid-baremetal'
+    parameters:
+        - string:
+            name: YARDSTICK_DB_BACKEND
+            default: '-i 104.197.68.199:8086'
+            description: 'Arguments to use in order to choose the backend DB'
+- parameter:
+    name: 'yardstick-params-joid-virtual'
+    parameters:
+        - string:
+            name: YARDSTICK_DB_BACKEND
+            default: '-i 104.197.68.199:8086'
+            description: 'Arguments to use in order to choose the backend DB'
+- parameter:
+    name: 'yardstick-params-intel-pod8'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
-
 - parameter:
     name: 'yardstick-params-compass-baremetal'
     parameters:
index 1c2abad..cf37ac2 100755 (executable)
@@ -2,9 +2,9 @@
 set -e
 [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
-# labconfig is used only for joid
-labconfig=""
+rc_file_vol=""
 sshkey=""
+
 if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
     instack_mac=$(sudo virsh domiflist undercloud | grep default | \
                   grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
@@ -15,12 +15,16 @@ if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
         sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
         sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
     fi
-elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then
+fi
+
+if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
     # If production lab then creds may be retrieved dynamically
     # creds are on the jumphost, always in the same folder
-    labconfig="-v $LAB_CONFIG/admin-openrc:/etc/yardstick/openstack.creds"
+    rc_file_vol="-v $LAB_CONFIG/admin-openrc:/etc/yardstick/openstack.creds"
     # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
     # replace the default one by the customized one provided by jenkins config
+else
+    rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/etc/yardstick/openstack.creds"
 fi
 
 # Set iptables rule to allow forwarding return traffic for container
@@ -46,7 +50,7 @@ sudo rm -rf ${dir_result}/*
 map_log_dir="-v ${dir_result}:/tmp/yardstick"
 
 # Run docker
-cmd="sudo docker run ${opts} ${envs} ${labconfig} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \
+cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \
     exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
 echo "Yardstick: Running docker cmd: ${cmd}"
 ${cmd}
index 8656ff9..a7ce521 100644 (file)
       command: "/bin/bash ./scripts/bootstrap-ansible.sh"
       args:
         chdir: "{{OPENSTACK_OSA_PATH}}"
+    - name: install python Crypto module
+      package:
+        name: "{{ python_crypto_package_name }}"
+    - name: install PyYAML
+      pip:
+        name: pyyaml
+        state: present
     - name: generate password token
       command: "python pw-token-gen.py --file {{OPENSTACK_OSA_ETC_PATH}}/user_secrets.yml"
       args:
index d13d080..33f1105 100644 (file)
@@ -9,3 +9,4 @@
 ##############################################################################
 # this is the interface the VM nodes are connected to libvirt network "default"
 interface: "ens3"
+python_crypto_package_name: python-crypto
index 6d03e0f..eae7d12 100644 (file)
@@ -8,3 +8,4 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 # this is placeholder and left blank intentionally to complete later on
+python_crypto_package_name: python-crypto
index 6d03e0f..9674ed2 100644 (file)
@@ -8,3 +8,4 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 # this is placeholder and left blank intentionally to complete later on
+python_crypto_package_name: python-pycrypto
diff --git a/utils/test/testapi/.gitignore b/utils/test/testapi/.gitignore
new file mode 100644 (file)
index 0000000..c7b63b5
--- /dev/null
@@ -0,0 +1,4 @@
+AUTHORS
+ChangeLog
+setup.cfg-e
+
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/resources/__init__.py b/utils/test/testapi/opnfv_testapi/tests/unit/resources/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
@@ -17,7 +17,7 @@ from opnfv_testapi.resources import models
 from opnfv_testapi.tests.unit import fake_pymongo
 
 config.Config.CONFIG = path.join(path.dirname(__file__),
-                                 '../../../etc/config.ini')
+                                 '../../../../etc/config.ini')
 
 
 class TestBase(testing.AsyncHTTPTestCase):
@@ -42,7 +42,7 @@ class TestBase(testing.AsyncHTTPTestCase):
         from opnfv_testapi.cmd import server
         server.parse_config([
             '--config-file',
-            path.join(path.dirname(__file__), 'common/normal.ini')
+            path.join(path.dirname(__file__), path.pardir, 'common/normal.ini')
         ])
         self.db_patcher = mock.patch('opnfv_testapi.cmd.server.get_db',
                                      self._fake_pymongo)
@@ -12,7 +12,7 @@ import unittest
 from opnfv_testapi.common import message
 from opnfv_testapi.resources import pod_models
 from opnfv_testapi.tests.unit import executor
-from opnfv_testapi.tests.unit import test_base as base
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class TestPodBase(base.TestBase):
@@ -4,7 +4,7 @@ import unittest
 from opnfv_testapi.common import message
 from opnfv_testapi.resources import project_models
 from opnfv_testapi.tests.unit import executor
-from opnfv_testapi.tests.unit import test_base as base
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class TestProjectBase(base.TestBase):
@@ -7,17 +7,17 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 import copy
-from datetime import datetime, timedelta
 import httplib
 import unittest
+from datetime import datetime, timedelta
 
 from opnfv_testapi.common import message
 from opnfv_testapi.resources import pod_models
 from opnfv_testapi.resources import project_models
 from opnfv_testapi.resources import result_models
 from opnfv_testapi.resources import testcase_models
-from opnfv_testapi.tests.unit import test_base as base
 from opnfv_testapi.tests.unit import executor
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class Details(object):
@@ -1,13 +1,13 @@
-from copy import deepcopy
-from datetime import datetime
 import functools
 import httplib
 import json
 import os
+from copy import deepcopy
+from datetime import datetime
 
-from opnfv_testapi.common import message
 import opnfv_testapi.resources.scenario_models as models
-from opnfv_testapi.tests.unit import test_base as base
+from opnfv_testapi.common import message
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class TestScenarioBase(base.TestBase):
@@ -13,8 +13,8 @@ import unittest
 from opnfv_testapi.common import message
 from opnfv_testapi.resources import project_models
 from opnfv_testapi.resources import testcase_models
-from opnfv_testapi.tests.unit import test_base as base
 from opnfv_testapi.tests.unit import executor
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class TestCaseBase(base.TestBase):
@@ -13,7 +13,7 @@ from opnfv_testapi.resources import project_models
 from opnfv_testapi.router import url_mappings
 from opnfv_testapi.tests.unit import executor
 from opnfv_testapi.tests.unit import fake_pymongo
-from opnfv_testapi.tests.unit import test_base as base
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class TestToken(base.TestBase):
@@ -11,7 +11,7 @@ import unittest
 
 from opnfv_testapi.resources import models
 from opnfv_testapi.tests.unit import executor
-from opnfv_testapi.tests.unit import test_base as base
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class TestVersionBase(base.TestBase):