Merge "initial JJB for orchestra"
authorTrevor Bramwell <tbramwell@linuxfoundation.org>
Wed, 26 Jul 2017 21:26:35 +0000 (21:26 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Wed, 26 Jul 2017 21:26:35 +0000 (21:26 +0000)
35 files changed:
jjb/apex/apex.yml
jjb/apex/apex.yml.j2
jjb/armband/armband-deploy.sh
jjb/compass4nfv/compass-ci-jobs.yml
jjb/compass4nfv/compass-deploy.sh
jjb/doctor/doctor.yml
jjb/dovetail/dovetail-ci-jobs.yml
jjb/functest/functest-alpine.sh
jjb/functest/functest-daily-jobs.yml
jjb/global/installer-params.yml
jjb/global/slave-params.yml
jjb/joid/joid-daily-jobs.yml
jjb/releng/opnfv-docker-arm.yml
jjb/releng/opnfv-docker.sh
jjb/releng/opnfv-docker.yml
jjb/yardstick/yardstick-daily.sh
utils/test/testapi/3rd_party/static/testapi-ui/app.js
utils/test/testapi/opnfv_testapi/cmd/server.py
utils/test/testapi/opnfv_testapi/common/check.py
utils/test/testapi/opnfv_testapi/common/config.py
utils/test/testapi/opnfv_testapi/db/__init__.py [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/db/api.py [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/resources/handlers.py
utils/test/testapi/opnfv_testapi/resources/result_handlers.py
utils/test/testapi/opnfv_testapi/router/url_mappings.py
utils/test/testapi/opnfv_testapi/tests/unit/common/test_config.py
utils/test/testapi/opnfv_testapi/tests/unit/conftest.py [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_result.py
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_token.py
utils/test/testapi/opnfv_testapi/ui/auth/sign.py
utils/test/testapi/opnfv_testapi/ui/auth/user.py
utils/test/testapi/opnfv_testapi/ui/root.py
utils/test/testapi/run_test.sh [deleted file]

index 2b69d59..fd4dc9f 100644 (file)
                 - 'apex-deploy.*'
                 - 'functest.*'
                 - 'yardstick.*'
+                - 'dovetail.*'
         - throttle:
             max-per-node: 1
             max-total: 10
                   kill-phase-on: NEVER
                   abort-all-job: false
                   git-revision: false
+        - multijob:
+            name: Dovetail
+            condition: ALWAYS
+            projects:
+                - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
+                  abort-all-job: false
+                  git-revision: false
         - conditional-step:
             condition-kind: current-status
             condition-worst: SUCCESS
index 06cc2ca..d7b67c3 100644 (file)
                 - 'apex-deploy.*'
                 - 'functest.*'
                 - 'yardstick.*'
+                - 'dovetail.*'
         - throttle:
             max-per-node: 1
             max-total: 10
                   kill-phase-on: NEVER
                   abort-all-job: false
                   git-revision: false
+        - multijob:
+            name: Dovetail
+            condition: ALWAYS
+            projects:
+                - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
+                  abort-all-job: false
+                  git-revision: false
         - conditional-step:
             condition-kind: current-status
             condition-worst: SUCCESS
index 9964ed5..358b55b 100755 (executable)
@@ -70,10 +70,6 @@ if [[ $LAB_CONFIG_URL =~ ^(git|ssh):// ]]; then
     fi
 fi
 
-if [[ "$NODE_NAME" =~ "virtual" ]]; then
-    POD_NAME="virtual_kvm"
-fi
-
 # releng wants us to use nothing else but opnfv.iso for now. We comply.
 ISO_FILE=$WORKSPACE/opnfv.iso
 
index 467e168..310347d 100644 (file)
         stream: master
         branch: '{stream}'
         gs-pathname: ''
+        ppa-pathname: '/{stream}'
         disabled: false
         openstack-version: ocata
     danube: &danube
         stream: danube
         branch: 'stable/{stream}'
         gs-pathname: '/{stream}'
+        ppa-pathname: '/{stream}'
         disabled: false
         openstack-version: newton
 #--------------------------------
         - 'os-nosdn-openo-ha':
             disabled: false
             auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+        - 'os-odl-sfc-ha':
+            disabled: false
+            auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+        - 'os-nosdn-dpdk-ha':
+            disabled: false
+            auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+        - 'k8-nosdn-nofeature-ha':
+            disabled: false
+            auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
 
 
     jobs:
         - compass-ci-parameter:
             installer: '{installer}'
             gs-pathname: '{gs-pathname}'
+            ppa-pathname: '{ppa-pathname}'
         - string:
             name: DEPLOY_SCENARIO
             default: '{scenario}'
         - compass-ci-parameter:
             installer: '{installer}'
             gs-pathname: '{gs-pathname}'
+            ppa-pathname: '{ppa-pathname}'
         - '{slave-label}-defaults'
         - '{installer}-defaults'
 
     builders:
         - description-setter:
             description: "POD: $NODE_NAME"
-        - shell:
-            !include-raw-escape: ./compass-download-artifact.sh
-        - shell:
-            !include-raw-escape: ./compass-deploy.sh
+        - conditional-step:
+            condition-kind: regex-match
+            regex: master
+            label: '{stream}'
+            steps:
+                - shell:
+                    !include-raw-escape: ./compass-build.sh
+                - shell:
+                    !include-raw-escape: ./compass-deploy.sh
+        - conditional-step:
+            condition-kind: regex-match
+            regex: danube
+            label: '{stream}'
+            steps:
+                - shell:
+                    !include-raw-escape: ./compass-download-artifact.sh
+                - shell:
+                    !include-raw-escape: ./compass-deploy.sh
+
 
 ########################
 # parameter macros
             name: GS_URL
             default: '$GS_BASE{gs-pathname}'
             description: "URL to Google Storage."
+        - string:
+            name: CACHE_DIRECTORY
+            default: "$HOME/opnfv/cache/$PROJECT{gs-pathname}"
+            description: "Directory where the cache to be used during the build is located."
+        - string:
+            name: PPA_REPO
+            default: "http://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
+        - string:
+            name: PPA_CACHE
+            default: "$WORKSPACE/work/repo/"
 
 ########################
 # trigger macros
     name: 'compass-os-nosdn-kvm-ha-baremetal-centos-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-dpdk-ha-baremetal-centos-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl-sfc-ha-baremetal-centos-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-k8-nosdn-nofeature-ha-baremetal-centos-master-trigger'
+    triggers:
+        - timed: ''
 
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-baremetal-master-trigger'
     name: 'compass-os-nosdn-kvm-ha-baremetal-master-trigger'
     triggers:
         - timed: '0 14 * * *'
+- trigger:
+    name: 'compass-os-nosdn-dpdk-ha-baremetal-master-trigger'
+    triggers:
+        - timed: '0 16 * * *'
+- trigger:
+    name: 'compass-k8-nosdn-nofeature-ha-baremetal-master-trigger'
+    triggers:
+        - timed: '0 20 * * *'
+- trigger:
+    name: 'compass-os-odl-sfc-ha-baremetal-master-trigger'
+    triggers:
+        - timed: '0 4 * * *'
+
 
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-baremetal-danube-trigger'
     name: 'compass-os-nosdn-kvm-ha-baremetal-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-dpdk-ha-baremetal-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-k8-nosdn-nofeature-ha-baremetal-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl-sfc-ha-baremetal-danube-trigger'
+    triggers:
+        - timed: ''
+
 
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-virtual-master-trigger'
     name: 'compass-os-nosdn-kvm-ha-virtual-master-trigger'
     triggers:
         - timed: '0 23 * * *'
+- trigger:
+    name: 'compass-os-nosdn-dpdk-ha-virtual-master-trigger'
+    triggers:
+        - timed: '0 17 * * *'
+- trigger:
+    name: 'compass-k8-nosdn-nofeature-ha-virtual-master-trigger'
+    triggers:
+        - timed: '0 18 * * *'
+- trigger:
+    name: 'compass-os-odl-sfc-ha-virtual-master-trigger'
+    triggers:
+        - timed: '0 16 * * *'
 
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-virtual-danube-trigger'
     name: 'compass-os-nosdn-kvm-ha-virtual-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-dpdk-ha-virtual-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl-sfc-ha-virtual-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-k8-nosdn-nofeature-ha-virtual-danube-trigger'
+    triggers:
+        - timed: ''
index 2668ccd..7a5af5f 100644 (file)
@@ -6,24 +6,23 @@ echo "Starting the deployment on baremetal environment using $INSTALLER_TYPE. Th
 echo "--------------------------------------------------------"
 echo
 
-# source the properties file so we get OPNFV vars
-source $BUILD_DIRECTORY/latest.properties
-
-# echo the info about artifact that is used during the deployment
-echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
-
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
-    # for none-merge deployments
-    # checkout the commit that was used for building the downloaded artifact
-    # to make sure the ISO and deployment mechanism uses same versions
-    echo "Checking out $OPNFV_GIT_SHA1"
-    git checkout $OPNFV_GIT_SHA1 --quiet
-fi
-
 echo 1 > /proc/sys/vm/drop_caches
 
 export CONFDIR=$WORKSPACE/deploy/conf
 if [[ "$BRANCH" = 'stable/danube' ]]; then
+    # source the properties file so we get OPNFV vars
+    source $BUILD_DIRECTORY/latest.properties
+    # echo the info about artifact that is used during the deployment
+    echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+
+    if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
+        # for none-merge deployments
+        # checkout the commit that was used for building the downloaded artifact
+        # to make sure the ISO and deployment mechanism uses same versions
+        echo "Checking out $OPNFV_GIT_SHA1"
+        git checkout $OPNFV_GIT_SHA1 --quiet
+    fi
+
     export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
 else
     export ISO_URL=file://$BUILD_DIRECTORY/compass.tar.gz
@@ -40,6 +39,8 @@ elif [[ "${DEPLOY_SCENARIO}" =~ "-onos" ]]; then
     export NETWORK_CONF_FILE=network_onos.yml
 elif [[ "${DEPLOY_SCENARIO}" =~ "-openo" ]]; then
     export NETWORK_CONF_FILE=network_openo.yml
+elif [[ "${DEPLOY_SCENARIO}" =~ "-dpdk" ]]; then
+    export NETWORK_CONF_FILE=network_dpdk.yml
 else
     export NETWORK_CONF_FILE=network.yml
 fi
index 5bb8f74..c6b2cb6 100644 (file)
             wget https://git.opnfv.org/functest/plain/functest/ci/download_images.sh -O functest/ci/download_images.sh
         - 'functest-suite-builder'
         - shell: |
-            functest_log="$HOME/opnfv/functest/results/{stream}/{project}.log"
+            functest_log="$HOME/opnfv/functest/results/{stream}/$FUNCTEST_SUITE_NAME.log"
             # NOTE: checking the test result, as the previous job could return
             #       0 regardless the result of doctor test scenario.
             grep -e ' OK$' $functest_log || exit 1
         - archive:
             artifacts: 'tests/*.log'
         - archive:
-            artifacts: 'functest_results/{project}.log'
+            artifacts: 'functest_results/$FUNCTEST_SUITE_NAME.log'
 
 
 #####################################
index 43978f6..bcda2b7 100644 (file)
@@ -25,7 +25,7 @@
         branch: 'stable/{stream}'
         dovetail-branch: master
         gs-pathname: '/{stream}'
-        docker-tag: 'cvp.0.2.0'
+        docker-tag: 'cvp.0.3.0'
 
 #-----------------------------------
 # POD, PLATFORM, AND BRANCH MAPPING
 # that have not been switched using labels for slaves
 #--------------------------------
 #apex PODs
-        - lf-pod1:
-            slave-label: '{pod}'
+        - virtual:
+            slave-label: apex-virtual-master
             SUT: apex
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
-        - lf-pod1:
-            slave-label: '{pod}'
+        - baremetal:
+            slave-label: apex-baremetal-master
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - virtual:
+            slave-label: apex-virtual-danube
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *danube
+        - baremetal:
+            slave-label: apex-baremetal-danube
             SUT: apex
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *danube
             <<: *danube
 #--------------------------------
     testsuite:
-        - 'debug'
         - 'compliance_set'
         - 'proposed_tests'
 
index 512a01e..9084cca 100644 (file)
@@ -64,7 +64,7 @@ if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} == *'os-nosdn-openo-h
     envs=${env}" -e OPENO_MSB_ENDPOINT=${openo_msb_endpoint}"
 fi
 
-volumes="${images_vol} ${results_vol} ${sshkey_vol} ${rc_file_vol}"
+volumes="${images_vol} ${results_vol} ${sshkey_vol} ${rc_file_vol} ${cacert_file_vol}"
 
 
 tiers=(healthcheck smoke)
index cc9bac0..f14ca75 100644 (file)
             slave-label: '{alpine-pod}'
             installer: fuel
             <<: *master
+        - huawei-virtual5:
+            slave-label: '{alpine-pod}'
+            installer: compass
+            <<: *master
 
     testsuite:
         - 'suite':
index ee154af..5e07a11 100644 (file)
@@ -75,8 +75,8 @@
             description: 'Model to deploy (os|k8)'
         - string:
             name: OS_RELEASE
-            default: 'newton'
-            description: 'OpenStack release (mitaka|newton)'
+            default: 'ocata'
+            description: 'OpenStack release (mitaka|newton|ocata)'
         - string:
             name: EXTERNAL_NETWORK
             default: ext-net
index 3694c0b..7a257f1 100644 (file)
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
+- parameter:
+    name: 'huawei-virtual5-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'huawei-virtual5'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
 - parameter:
     name: 'huawei-virtual7-defaults'
     parameters:
index 13ea9b3..1ff260a 100644 (file)
         branch: '{stream}'
         disabled: false
         gs-pathname: ''
-    danube: &danube
-        stream: danube
-        branch: 'stable/{stream}'
-        disabled: false
-        gs-pathname: '/{stream}'
 #--------------------------------
 # POD, INSTALLER, AND BRANCH MAPPING
 #--------------------------------
         - virtual:
             slave-label: joid-virtual
             <<: *master
-        - baremetal:
-            slave-label: joid-baremetal
-            <<: *danube
-        - virtual:
-            slave-label: joid-virtual
-            <<: *danube
 #--------------------------------
 #        None-CI PODs
 #--------------------------------
@@ -62,7 +51,7 @@
         - 'os-nosdn-lxd-noha':
             auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
         - 'os-odl_l2-nofeature-ha':
-            auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+            auto-trigger-name: 'daily-trigger-disabled'
         - 'os-onos-nofeature-ha':
             auto-trigger-name: 'daily-trigger-disabled'
         - 'os-odl_l2-nofeature-noha':
         - 'os-ocl-nofeature-noha':
             auto-trigger-name: 'daily-trigger-disabled'
         - 'k8-nosdn-nofeature-noha':
-            auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+            auto-trigger-name: 'daily-trigger-disabled'
         - 'k8-nosdn-lb-noha':
             auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+        - 'k8-ovn-lb-noha':
+            auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
 
     jobs:
         - 'joid-{scenario}-{pod}-daily-{stream}'
     name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-nosdn-nofeature-ha trigger - branch: danube
-- trigger:
-    name: 'joid-os-nosdn-nofeature-ha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 2 * * *'
-- trigger:
-    name: 'joid-os-nosdn-nofeature-ha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-nofeature-ha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # os-odl_l2-nofeature-ha trigger - branch: master
 - trigger:
     name: 'joid-os-odl_l2-nofeature-ha-baremetal-master-trigger'
     name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-odl_l2-nofeature-ha trigger - branch: danube
-- trigger:
-    name: 'joid-os-odl_l2-nofeature-ha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 7 * * *'
-- trigger:
-    name: 'joid-os-odl_l2-nofeature-ha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-odl_l2-nofeature-ha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # os-onos-nofeature-ha trigger - branch: master
 - trigger:
     name: 'joid-os-onos-nofeature-ha-baremetal-master-trigger'
     name: 'joid-os-onos-nofeature-ha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-onos-nofeature-ha trigger - branch: danube
-- trigger:
-    name: 'joid-os-onos-nofeature-ha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 12 * * *'
-- trigger:
-    name: 'joid-os-onos-nofeature-ha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-onos-nofeature-ha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-onos-nofeature-ha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # os-onos-sfc-ha trigger - branch: master
 - trigger:
     name: 'joid-os-onos-sfc-ha-baremetal-master-trigger'
     name: 'joid-os-onos-sfc-ha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-onos-sfc-ha trigger - branch: danube
-- trigger:
-    name: 'joid-os-onos-sfc-ha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 17 * * *'
-- trigger:
-    name: 'joid-os-onos-sfc-ha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-onos-sfc-ha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-onos-sfc-ha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # os-nosdn-lxd-noha trigger - branch: master
 - trigger:
     name: 'joid-os-nosdn-lxd-noha-baremetal-master-trigger'
     name: 'joid-os-nosdn-lxd-noha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-nosdn-lxd-noha trigger - branch: danube
-- trigger:
-    name: 'joid-os-nosdn-lxd-noha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 22 * * *'
-- trigger:
-    name: 'joid-os-nosdn-lxd-noha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-lxd-noha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-lxd-noha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # os-nosdn-lxd-ha trigger - branch: master
 - trigger:
     name: 'joid-os-nosdn-lxd-ha-baremetal-master-trigger'
     name: 'joid-os-nosdn-lxd-ha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-nosdn-lxd-ha trigger - branch: danube
-- trigger:
-    name: 'joid-os-nosdn-lxd-ha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 10 * * *'
-- trigger:
-    name: 'joid-os-nosdn-lxd-ha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-lxd-ha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-lxd-ha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # os-nosdn-nofeature-noha trigger - branch: master
 - trigger:
     name: 'joid-os-nosdn-nofeature-noha-baremetal-master-trigger'
     name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-nosdn-nofeature-noha trigger - branch: danube
-- trigger:
-    name: 'joid-os-nosdn-nofeature-noha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 4 * * *'
-- trigger:
-    name: 'joid-os-nosdn-nofeature-noha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-nofeature-noha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # k8-nosdn-nofeature-noha trigger - branch: master
 - trigger:
     name: 'joid-k8-nosdn-nofeature-noha-baremetal-master-trigger'
     name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# k8-nosdn-nofeature-noha trigger - branch: danube
-- trigger:
-    name: 'joid-k8-nosdn-nofeature-noha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 15 * * *'
-- trigger:
-    name: 'joid-k8-nosdn-nofeature-noha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-k8-nosdn-nofeature-noha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # k8-nosdn-lb-noha trigger - branch: master
 - trigger:
     name: 'joid-k8-nosdn-lb-noha-baremetal-master-trigger'
     name: 'joid-k8-nosdn-lb-noha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# k8-nosdn-lb-noha trigger - branch: danube
+# k8-ovn-lb-noha trigger - branch: master
 - trigger:
-    name: 'joid-k8-nosdn-lb-noha-baremetal-danube-trigger'
+    name: 'joid-k8-ovn-lb-noha-baremetal-master-trigger'
     triggers:
-        - timed: '0 20 * * *'
+        - timed: '5 17 * * *'
 - trigger:
-    name: 'joid-k8-nosdn-lb-noha-virtual-danube-trigger'
+    name: 'joid-k8-ovn-lb-noha-virtual-master-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'joid-k8-nosdn-lb-noha-orange-pod1-danube-trigger'
+    name: 'joid-k8-ovn-lb-noha-orange-pod1-master-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'joid-k8-nosdn-lb-noha-cengn-pod1-danube-trigger'
+    name: 'joid-k8-ovn-lb-noha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
index 417fc70..7502b17 100644 (file)
             name: RELEASE_VERSION
             default: ""
             description: "Release version, e.g. 1.0, 2.0, 3.0"
+        - string:
+            name: DOCKER_DIR
+            default: "docker"
+            description: "Directory containing files needed by the Dockerfile"
         - string:
             name: DOCKERFILE
             default: "Dockerfile.aarch64"
index ebd0c9f..0de3df2 100644 (file)
@@ -54,7 +54,7 @@ if [[ -n "$(docker images | grep $DOCKER_REPO_NAME)" ]]; then
     done
 fi
 
-cd $WORKSPACE/docker
+cd $WORKSPACE/$DOCKER_DIR
 HOST_ARCH=$(uname -m)
 if [ ! -f "${DOCKERFILE}" ]; then
     # If this is expected to be a Dockerfile for other arch than x86
index 095ba41..fa9c441 100644 (file)
     other-receivers: &other-receivers
         receivers: ''
 
-    project:
+    dockerfile: "Dockerfile"
+    dockerdir: "docker"
+
+    # This is the dockerhub repo the image will be pushed to as
+    # 'opnfv/{dockerrepo}. See: DOCKER_REPO_NAME parameter.
+    # 'project' is the OPNFV repo we expect to contain the Dockerfile
+    dockerrepo:
         # projects with jobs for master
         - 'releng-anteater':
+            project: 'releng-anteater'
             <<: *master
             <<: *other-receivers
         - 'bottlenecks':
+            project: 'bottlenecks'
             <<: *master
             <<: *other-receivers
         - 'cperf':
+            project: 'cperf'
             <<: *master
             <<: *other-receivers
         - 'dovetail':
+            project: 'dovetail'
             <<: *master
             <<: *other-receivers
         - 'functest':
+            project: 'functest'
             <<: *master
             <<: *functest-receivers
         - 'qtip':
+            project: 'qtip'
             <<: *master
             <<: *other-receivers
-        - 'storperf':
+        - 'storperf-master':
+            project: 'storperf'
+            dockerdir: 'docker/storperf-master'
+            <<: *master
+            <<: *other-receivers
+        - 'storperf-httpfrontend':
+            project: 'storperf'
+            dockerdir: 'docker/storperf-httpfrontend'
+            <<: *master
+            <<: *other-receivers
+        - 'storperf-reporting':
+            project: 'storperf'
+            dockerdir: 'docker/storperf-reporting'
             <<: *master
             <<: *other-receivers
         - 'yardstick':
+            project: 'yardstick'
             <<: *master
             <<: *other-receivers
         # projects with jobs for stable
         - 'bottlenecks':
+            project: 'bottlenecks'
             <<: *danube
             <<: *other-receivers
         - 'functest':
+            project: 'functest'
             <<: *danube
             <<: *functest-receivers
         - 'qtip':
+            project: 'qtip'
             <<: *danube
             <<: *other-receivers
         - 'storperf':
+            project: 'storperf'
             <<: *danube
             <<: *other-receivers
         - 'yardstick':
+            project: 'yardstick'
             <<: *danube
             <<: *other-receivers
 
     jobs:
-        - '{project}-docker-build-push-{stream}'
+        - "{dockerrepo}-docker-build-push-{stream}"
 
 
 - project:
 
     name: opnfv-monitor-docker        # projects which only monitor dedicated file or path
 
+    dockerfile: "Dockerfile"
+    dockerdir: "docker"
+
     project:
         # projects with jobs for master
         - 'daisy':
+            dockerrepo: 'daisy'
             <<: *master
         - 'escalator':
+            dockerrepo: 'escalator'
             <<: *master
 
     jobs:
 # job templates
 ########################
 - job-template:
-    name: '{project}-docker-build-push-{stream}'
+    name: '{dockerrepo}-docker-build-push-{stream}'
 
     disabled: '{obj:disabled}'
 
             description: "To enable/disable pushing the image to Dockerhub."
         - string:
             name: DOCKER_REPO_NAME
-            default: "opnfv/{project}"
+            default: "opnfv/{dockerrepo}"
             description: "Dockerhub repo to be pushed to."
+        - string:
+            name: DOCKER_DIR
+            default: "{dockerdir}"
+            description: "Directory containing files needed by the Dockerfile"
         - string:
             name: COMMIT_ID
             default: ""
             description: "Release version, e.g. 1.0, 2.0, 3.0"
         - string:
             name: DOCKERFILE
-            default: "Dockerfile"
+            default: "{dockerfile}"
             description: "Dockerfile to use for creating the image."
 
     scm:
index cf37ac2..56d0874 100755 (executable)
@@ -3,6 +3,7 @@ set -e
 [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
 rc_file_vol=""
+cacert_file_vol=""
 sshkey=""
 
 if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
@@ -23,6 +24,10 @@ if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
     rc_file_vol="-v $LAB_CONFIG/admin-openrc:/etc/yardstick/openstack.creds"
     # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
     # replace the default one by the customized one provided by jenkins config
+elif [[ ${INSTALLER_TYPE} == 'compass' && ${BRANCH} == 'master' ]]; then
+    cacert_file_vol="-v ${HOME}/os_cacert:/etc/yardstick/os_cacert"
+    echo "export OS_CACERT=/etc/yardstick/os_cacert" >> ${HOME}/opnfv-openrc.sh
+    rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/etc/yardstick/openstack.creds"
 else
     rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/etc/yardstick/openstack.creds"
 fi
@@ -50,7 +55,7 @@ sudo rm -rf ${dir_result}/*
 map_log_dir="-v ${dir_result}:/tmp/yardstick"
 
 # Run docker
-cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \
+cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \
     exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
 echo "Yardstick: Running docker cmd: ${cmd}"
 ${cmd}
index 8c701c3..bb31ab0 100644 (file)
@@ -54,8 +54,8 @@
                 controller: 'ResultsController as ctrl'
             }).
             state('userResults', {
-                url: 'user_results',
-                templateUrl: '/testapi-ui/components/results/results.html',
+                url: '/user_results',
+                templateUrl: 'testapi-ui/components/results/results.html',
                 controller: 'ResultsController as ctrl'
             }).
             state('resultsDetail', {
@@ -66,7 +66,7 @@
             }).
             state('profile', {
                 url: '/profile',
-                templateUrl: '/testapi-ui/components/profile/profile.html',
+                templateUrl: 'testapi-ui/components/profile/profile.html',
                 controller: 'ProfileController as ctrl'
             }).
             state('authFailure', {
index 545d5e3..a5ac5eb 100644 (file)
@@ -29,40 +29,18 @@ TODOs :
 
 """
 
-import argparse
-import sys
-
-import motor
 import tornado.ioloop
 
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
 from opnfv_testapi.router import url_mappings
 from opnfv_testapi.tornado_swagger import swagger
 
-CONF = None
-
-
-def parse_config(argv=[]):
-    global CONF
-    parser = argparse.ArgumentParser()
-    parser.add_argument("-c", "--config-file", dest='config_file',
-                        help="Config file location")
-    args = parser.parse_args(argv)
-    if args.config_file:
-        config.Config.CONFIG = args.config_file
-    CONF = config.Config()
-
-
-def get_db():
-    return motor.MotorClient(CONF.mongo_url)[CONF.mongo_dbname]
-
 
 def make_app():
     swagger.docs(base_url=CONF.swagger_base_url,
                  static_path=CONF.static_path)
     return swagger.Application(
         url_mappings.mappings,
-        db=get_db(),
         debug=CONF.api_debug,
         auth=CONF.api_authenticate,
         cookie_secret='opnfv-testapi',
@@ -70,7 +48,6 @@ def make_app():
 
 
 def main():
-    parse_config(sys.argv[1:])
     application = make_app()
     application.listen(CONF.api_port)
     tornado.ioloop.IOLoop.current().start()
index 67e8fbd..24ba876 100644 (file)
@@ -13,6 +13,7 @@ from tornado import web
 
 from opnfv_testapi.common import message
 from opnfv_testapi.common import raises
+from opnfv_testapi.db import api as dbapi
 
 
 def authenticate(method):
@@ -26,7 +27,7 @@ def authenticate(method):
             except KeyError:
                 raises.Unauthorized(message.unauthorized())
             query = {'access_token': token}
-            check = yield self._eval_db_find_one(query, 'tokens')
+            check = yield dbapi.db_find_one('tokens', query)
             if not check:
                 raises.Forbidden(message.invalid_token())
         ret = yield gen.coroutine(method)(self, *args, **kwargs)
@@ -38,7 +39,7 @@ def not_exist(xstep):
     @functools.wraps(xstep)
     def wrap(self, *args, **kwargs):
         query = kwargs.get('query')
-        data = yield self._eval_db_find_one(query)
+        data = yield dbapi.db_find_one(self.table, query)
         if not data:
             raises.NotFound(message.not_found(self.table, query))
         ret = yield gen.coroutine(xstep)(self, data, *args, **kwargs)
@@ -78,7 +79,7 @@ def carriers_exist(xstep):
         carriers = kwargs.pop('carriers', {})
         if carriers:
             for table, query in carriers:
-                exist = yield self._eval_db_find_one(query(), table)
+                exist = yield dbapi.db_find_one(table, query())
                 if not exist:
                     raises.Forbidden(message.not_found(table, query()))
         ret = yield gen.coroutine(xstep)(self, *args, **kwargs)
@@ -91,7 +92,7 @@ def new_not_exists(xstep):
     def wrap(self, *args, **kwargs):
         query = kwargs.get('query')
         if query:
-            to_data = yield self._eval_db_find_one(query())
+            to_data = yield dbapi.db_find_one(self.table, query())
             if to_data:
                 raises.Forbidden(message.exist(self.table, query()))
         ret = yield gen.coroutine(xstep)(self, *args, **kwargs)
@@ -105,7 +106,7 @@ def updated_one_not_exist(xstep):
         db_keys = kwargs.pop('db_keys', [])
         query = self._update_query(db_keys, data)
         if query:
-            to_data = yield self._eval_db_find_one(query)
+            to_data = yield dbapi.db_find_one(self.table, query)
             if to_data:
                 raises.Forbidden(message.exist(self.table, query))
         ret = yield gen.coroutine(xstep)(self, data, *args, **kwargs)
index f73c0ab..4cd53c6 100644 (file)
@@ -8,14 +8,16 @@
 # feng.xiaowei@zte.com.cn remove prepare_put_request            5-30-2016
 ##############################################################################
 import ConfigParser
+import argparse
 import os
+import sys
 
 
 class Config(object):
-    CONFIG = None
 
     def __init__(self):
-        self.file = self.CONFIG if self.CONFIG else self._default_config()
+        self.config_file = None
+        self._set_config_file()
         self._parse()
         self._parse_per_page()
         self.static_path = os.path.join(
@@ -24,11 +26,11 @@ class Config(object):
             'static')
 
     def _parse(self):
-        if not os.path.exists(self.file):
-            raise Exception("%s not found" % self.file)
+        if not os.path.exists(self.config_file):
+            raise Exception("%s not found" % self.config_file)
 
         config = ConfigParser.RawConfigParser()
-        config.read(self.file)
+        config.read(self.config_file)
         self._parse_section(config)
 
     def _parse_section(self, config):
@@ -53,8 +55,24 @@ class Config(object):
                 value = False
         return value
 
-    @staticmethod
-    def _default_config():
+    def _set_config_file(self):
+        if not self._set_sys_config_file():
+            self._set_default_config_file()
+
+    def _set_sys_config_file(self):
+        parser = argparse.ArgumentParser()
+        parser.add_argument("-c", "--config-file", dest='config_file',
+                            help="Config file location", metavar="FILE")
+        args, _ = parser.parse_known_args(sys.argv)
+        try:
+            self.config_file = args.config_file
+        finally:
+            return self.config_file is not None
+
+    def _set_default_config_file(self):
         is_venv = os.getenv('VIRTUAL_ENV')
-        return os.path.join('/' if not is_venv else is_venv,
-                            'etc/opnfv_testapi/config.ini')
+        self.config_file = os.path.join('/' if not is_venv else is_venv,
+                                        'etc/opnfv_testapi/config.ini')
+
+
+CONF = Config()
diff --git a/utils/test/testapi/opnfv_testapi/db/__init__.py b/utils/test/testapi/opnfv_testapi/db/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/utils/test/testapi/opnfv_testapi/db/api.py b/utils/test/testapi/opnfv_testapi/db/api.py
new file mode 100644 (file)
index 0000000..c057480
--- /dev/null
@@ -0,0 +1,38 @@
+import motor
+
+from opnfv_testapi.common.config import CONF
+
+DB = motor.MotorClient(CONF.mongo_url)[CONF.mongo_dbname]
+
+
+def db_update(collection, query, update_req):
+    return _eval_db(collection, 'update', query, update_req, check_keys=False)
+
+
+def db_delete(collection, query):
+    return _eval_db(collection, 'remove', query)
+
+
+def db_aggregate(collection, pipelines):
+    return _eval_db(collection, 'aggregate', pipelines, allowDiskUse=True)
+
+
+def db_list(collection, query):
+    return _eval_db(collection, 'find', query)
+
+
+def db_save(collection, data):
+    return _eval_db(collection, 'insert', data, check_keys=False)
+
+
+def db_find_one(collection, query):
+    return _eval_db(collection, 'find_one', query)
+
+
+def _eval_db(collection, method, *args, **kwargs):
+    exec_collection = DB.__getattr__(collection)
+    return exec_collection.__getattribute__(method)(*args, **kwargs)
+
+
+def _eval_db_find_one(query, table=None):
+    return _eval_db(table, 'find_one', query)
index f23cc57..8a3a2db 100644 (file)
@@ -20,8 +20,8 @@
 # feng.xiaowei@zte.com.cn remove DashboardHandler            5-30-2016
 ##############################################################################
 
-from datetime import datetime
 import json
+from datetime import datetime
 
 from tornado import gen
 from tornado import web
@@ -29,6 +29,7 @@ from tornado import web
 from opnfv_testapi.common import check
 from opnfv_testapi.common import message
 from opnfv_testapi.common import raises
+from opnfv_testapi.db import api as dbapi
 from opnfv_testapi.resources import models
 from opnfv_testapi.tornado_swagger import swagger
 
@@ -38,7 +39,6 @@ DEFAULT_REPRESENTATION = "application/json"
 class GenericApiHandler(web.RequestHandler):
     def __init__(self, application, request, **kwargs):
         super(GenericApiHandler, self).__init__(application, request, **kwargs)
-        self.db = self.settings["db"]
         self.json_args = None
         self.table = None
         self.table_cls = None
@@ -90,8 +90,7 @@ class GenericApiHandler(web.RequestHandler):
 
         if self.table != 'results':
             data.creation_date = datetime.now()
-        _id = yield self._eval_db(self.table, 'insert', data.format(),
-                                  check_keys=False)
+        _id = yield dbapi.db_save(self.table, data.format())
         if 'name' in self.json_args:
             resource = data.name
         else:
@@ -110,17 +109,14 @@ class GenericApiHandler(web.RequestHandler):
 
         total_pages = 0
         if page > 0:
-            cursor = self._eval_db(self.table, 'find', query)
+            cursor = dbapi.db_list(self.table, query)
             records_count = yield cursor.count()
             total_pages = self._calc_total_pages(records_count,
                                                  last,
                                                  page,
                                                  per_page)
         pipelines = self._set_pipelines(query, sort, last, page, per_page)
-        cursor = self._eval_db(self.table,
-                               'aggregate',
-                               pipelines,
-                               allowDiskUse=True)
+        cursor = dbapi.db_aggregate(self.table, pipelines)
         data = list()
         while (yield cursor.fetch_next):
             data.append(self.format_data(cursor.next_object()))
@@ -176,7 +172,7 @@ class GenericApiHandler(web.RequestHandler):
     @check.authenticate
     @check.not_exist
     def _delete(self, data, query=None):
-        yield self._eval_db(self.table, 'remove', query)
+        yield dbapi.db_delete(self.table, query)
         self.finish_request()
 
     @check.authenticate
@@ -186,8 +182,7 @@ class GenericApiHandler(web.RequestHandler):
     def _update(self, data, query=None, **kwargs):
         data = self.table_cls.from_dict(data)
         update_req = self._update_requests(data)
-        yield self._eval_db(self.table, 'update', query, update_req,
-                            check_keys=False)
+        yield dbapi.db_update(self.table, query, update_req)
         update_req['_id'] = str(data._id)
         self.finish_request(update_req)
 
@@ -230,23 +225,6 @@ class GenericApiHandler(web.RequestHandler):
             query[key] = new
         return query if not equal else dict()
 
-    def _eval_db(self, table, method, *args, **kwargs):
-        exec_collection = self.db.__getattr__(table)
-        return exec_collection.__getattribute__(method)(*args, **kwargs)
-
-    def _eval_db_find_one(self, query, table=None):
-        if table is None:
-            table = self.table
-        return self._eval_db(table, 'find_one', query)
-
-    def db_save(self, collection, data):
-        self._eval_db(collection, 'insert', data, check_keys=False)
-
-    def db_find_one(self, query, collection=None):
-        if not collection:
-            collection = self.table
-        return self._eval_db(collection, 'find_one', query)
-
 
 class VersionHandler(GenericApiHandler):
     @swagger.operation(nickname='listAllVersions')
index 5eb1b92..2bf1792 100644 (file)
@@ -13,7 +13,7 @@ import json
 
 from bson import objectid
 
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
 from opnfv_testapi.common import message
 from opnfv_testapi.common import raises
 from opnfv_testapi.resources import handlers
@@ -21,8 +21,6 @@ from opnfv_testapi.resources import result_models
 from opnfv_testapi.tornado_swagger import swagger
 from opnfv_testapi.ui.auth import constants as auth_const
 
-CONF = config.Config()
-
 
 class GenericResultHandler(handlers.GenericApiHandler):
     def __init__(self, application, request, **kwargs):
@@ -68,7 +66,7 @@ class GenericResultHandler(handlers.GenericApiHandler):
                     del query['public']
                     if role != "reviewer":
                         query['user'] = openid
-            elif k != 'last' and k != 'page':
+            elif k not in ['last', 'page', 'descend']:
                 query[k] = v
             if date_range:
                 query['start_date'] = date_range
@@ -169,18 +167,27 @@ class ResultsCLHandler(GenericResultHandler):
             @type signed: L{string}
             @in signed: query
             @required signed: False
+            @param descend: true, newest2oldest; false, oldest2newest
+            @type descend: L{string}
+            @in descend: query
+            @required descend: False
         """
-        limitations = {'sort': {'_id': -1}}
-        last = self.get_query_argument('last', 0)
-        if last is not None:
-            last = self.get_int('last', last)
-            limitations.update({'last': last})
+        def descend_limit():
+            descend = self.get_query_argument('descend', 'true')
+            return -1 if descend.lower() == 'true' else 1
+
+        def last_limit():
+            return self.get_int('last', self.get_query_argument('last', 0))
+
+        def page_limit():
+            return self.get_int('page', self.get_query_argument('page', 0))
 
-        page = self.get_query_argument('page', None)
-        if page is not None:
-            page = self.get_int('page', page)
-            limitations.update({'page': page,
-                                'per_page': CONF.api_results_per_page})
+        limitations = {
+            'sort': {'_id': descend_limit()},
+            'last': last_limit(),
+            'page': page_limit(),
+            'per_page': CONF.api_results_per_page
+        }
 
         self._list(query=self.set_query(), **limitations)
 
index 37e719b..562fa5e 100644 (file)
@@ -8,7 +8,7 @@
 ##############################################################################
 import tornado.web
 
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
 from opnfv_testapi.resources import handlers
 from opnfv_testapi.resources import pod_handlers
 from opnfv_testapi.resources import project_handlers
@@ -58,7 +58,7 @@ mappings = [
     # static path
     (r'/(.*\.(css|png|gif|js|html|json|map|woff2|woff|ttf))',
      tornado.web.StaticFileHandler,
-     {'path': config.Config().static_path}),
+     {'path': CONF.static_path}),
 
     (r'/', root.RootHandler),
     (r'/api/v1/auth/signin', sign.SigninHandler),
index 446b944..cc8743c 100644 (file)
@@ -1,16 +1,15 @@
-import os
+import argparse
 
-from opnfv_testapi.common import config
 
-
-def test_config_success():
-    config_file = os.path.join(os.path.dirname(__file__),
-                               '../../../../etc/config.ini')
-    config.Config.CONFIG = config_file
-    conf = config.Config()
-    assert conf.mongo_url == 'mongodb://127.0.0.1:27017/'
-    assert conf.mongo_dbname == 'test_results_collection'
-    assert conf.api_port == 8000
-    assert conf.api_debug is True
-    assert conf.api_authenticate is False
-    assert conf.swagger_base_url == 'http://localhost:8000'
+def test_config_normal(mocker, config_normal):
+    mocker.patch(
+        'argparse.ArgumentParser.parse_known_args',
+        return_value=(argparse.Namespace(config_file=config_normal), None))
+    from opnfv_testapi.common import config
+    CONF = config.Config()
+    assert CONF.mongo_url == 'mongodb://127.0.0.1:27017/'
+    assert CONF.mongo_dbname == 'test_results_collection'
+    assert CONF.api_port == 8000
+    assert CONF.api_debug is True
+    assert CONF.api_authenticate is False
+    assert CONF.swagger_base_url == 'http://localhost:8000'
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/conftest.py b/utils/test/testapi/opnfv_testapi/tests/unit/conftest.py
new file mode 100644 (file)
index 0000000..feff1da
--- /dev/null
@@ -0,0 +1,8 @@
+from os import path
+
+import pytest
+
+
+@pytest.fixture
+def config_normal():
+    return path.join(path.dirname(__file__), 'common/normal.ini')
index d95ff37..0ca83df 100644 (file)
@@ -6,9 +6,10 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from operator import itemgetter
+
 from bson.objectid import ObjectId
 from concurrent.futures import ThreadPoolExecutor
-from operator import itemgetter
 
 
 def thread_execute(method, *args, **kwargs):
index 6e4d454..dcec4e9 100644 (file)
@@ -12,13 +12,9 @@ from os import path
 import mock
 from tornado import testing
 
-from opnfv_testapi.common import config
 from opnfv_testapi.resources import models
 from opnfv_testapi.tests.unit import fake_pymongo
 
-config.Config.CONFIG = path.join(path.dirname(__file__),
-                                 '../../../../etc/config.ini')
-
 
 class TestBase(testing.AsyncHTTPTestCase):
     headers = {'Content-Type': 'application/json; charset=UTF-8'}
@@ -37,20 +33,21 @@ class TestBase(testing.AsyncHTTPTestCase):
 
     def tearDown(self):
         self.db_patcher.stop()
+        self.config_patcher.stop()
 
     def _patch_server(self):
-        from opnfv_testapi.cmd import server
-        server.parse_config([
-            '--config-file',
-            path.join(path.dirname(__file__), path.pardir, 'common/normal.ini')
-        ])
-        self.db_patcher = mock.patch('opnfv_testapi.cmd.server.get_db',
-                                     self._fake_pymongo)
+        import argparse
+        config = path.join(path.dirname(__file__), '../common/normal.ini')
+        self.config_patcher = mock.patch(
+            'argparse.ArgumentParser.parse_known_args',
+            return_value=(argparse.Namespace(config_file=config), None))
+        self.db_patcher = mock.patch('opnfv_testapi.db.api.DB',
+                                     fake_pymongo)
+        self.config_patcher.start()
         self.db_patcher.start()
 
-    @staticmethod
-    def _fake_pymongo():
-        return fake_pymongo
+    def set_config_file(self):
+        self.config_file = 'normal.ini'
 
     def get_app(self):
         from opnfv_testapi.cmd import server
index f199bc7..1e83ed3 100644 (file)
@@ -61,9 +61,9 @@ class TestResultBase(base.TestBase):
         self.scenario = 'odl-l2'
         self.criteria = 'passed'
         self.trust_indicator = result_models.TI(0.7)
-        self.start_date = "2016-05-23 07:16:09.477097"
-        self.stop_date = "2016-05-23 07:16:19.477097"
-        self.update_date = "2016-05-24 07:16:19.477097"
+        self.start_date = str(datetime.now())
+        self.stop_date = str(datetime.now() + timedelta(minutes=1))
+        self.update_date = str(datetime.now() + timedelta(days=1))
         self.update_step = -0.05
         super(TestResultBase, self).setUp()
         self.details = Details(timestart='0', duration='9s', status='OK')
@@ -275,7 +275,7 @@ class TestResultGet(TestResultBase):
 
     @executor.query(httplib.OK, '_query_period_one', 1)
     def test_queryPeriodSuccess(self):
-        return self._set_query('period=11')
+        return self._set_query('period=5')
 
     @executor.query(httplib.BAD_REQUEST, message.must_int('last'))
     def test_queryLastNotInt(self):
@@ -306,7 +306,7 @@ class TestResultGet(TestResultBase):
                                'scenario',
                                'trust_indicator',
                                'criteria',
-                               'period=11')
+                               'period=5')
 
     @executor.query(httplib.OK, '_query_success', 0)
     def test_notFound(self):
@@ -324,10 +324,10 @@ class TestResultGet(TestResultBase):
     @executor.query(httplib.OK, '_query_success', 1)
     def test_filterErrorStartdate(self):
         self._create_error_start_date(None)
-        self._create_error_start_date('None')
+        self._create_error_start_date('None')
         self._create_error_start_date('null')
         self._create_error_start_date('')
-        return self._set_query('period=11')
+        return self._set_query('period=5')
 
     def _query_success(self, body, number):
         self.assertEqual(number, len(body.results))
@@ -338,7 +338,7 @@ class TestResultGet(TestResultBase):
 
     def _query_period_one(self, body, number):
         self.assertEqual(number, len(body.results))
-        self.assert_res(body.results[0], self.req_10d_before)
+        self.assert_res(body.results[0], self.req_d)
 
     def _create_error_start_date(self, start_date):
         req = copy.deepcopy(self.req_d)
index b4ba887..940e256 100644 (file)
@@ -10,7 +10,6 @@ from tornado import web
 
 from opnfv_testapi.common import message
 from opnfv_testapi.resources import project_models
-from opnfv_testapi.router import url_mappings
 from opnfv_testapi.tests.unit import executor
 from opnfv_testapi.tests.unit import fake_pymongo
 from opnfv_testapi.tests.unit.resources import test_base as base
@@ -18,6 +17,7 @@ from opnfv_testapi.tests.unit.resources import test_base as base
 
 class TestToken(base.TestBase):
     def get_app(self):
+        from opnfv_testapi.router import url_mappings
         return web.Application(
             url_mappings.mappings,
             db=fake_pymongo,
index 5b36225..4623952 100644 (file)
@@ -1,14 +1,12 @@
 from six.moves.urllib import parse
 from tornado import gen
 from tornado import web
-import logging
 
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
+from opnfv_testapi.db import api as dbapi
 from opnfv_testapi.ui.auth import base
 from opnfv_testapi.ui.auth import constants as const
 
-CONF = config.Config()
-
 
 class SigninHandler(base.BaseHandler):
     def get(self):
@@ -48,10 +46,9 @@ class SigninReturnHandler(base.BaseHandler):
             'fullname': self.get_query_argument(const.OPENID_NS_SREG_FULLNAME),
             const.ROLE: role
         }
-        user = yield self.db_find_one({'openid': openid})
+        user = yield dbapi.db_find_one(self.table, {'openid': openid})
         if not user:
-            self.db_save(self.table, new_user_info)
-            logging.info('save to db:%s', new_user_info)
+            dbapi.db_save(self.table, new_user_info)
         else:
             role = user.get(const.ROLE)
 
index 2fca2a8..955cdee 100644 (file)
@@ -2,6 +2,7 @@ from tornado import gen
 from tornado import web
 
 from opnfv_testapi.common import raises
+from opnfv_testapi.db import api as dbapi
 from opnfv_testapi.ui.auth import base
 
 
@@ -12,7 +13,7 @@ class ProfileHandler(base.BaseHandler):
         openid = self.get_secure_cookie('openid')
         if openid:
             try:
-                user = yield self.db_find_one({'openid': openid})
+                user = yield dbapi.db_find_one(self.table, {'openid': openid})
                 self.finish_request({
                     "openid": user.get('openid'),
                     "email": user.get('email'),
index bba7a86..5b2c922 100644 (file)
@@ -1,10 +1,10 @@
 from opnfv_testapi.resources.handlers import GenericApiHandler
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
 
 
 class RootHandler(GenericApiHandler):
     def get_template_path(self):
-        return config.Config().static_path
+        return CONF.static_path
 
     def get(self):
         self.render('testapi-ui/index.html')
diff --git a/utils/test/testapi/run_test.sh b/utils/test/testapi/run_test.sh
deleted file mode 100755 (executable)
index 1e05dd6..0000000
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-set -o errexit
-
-# Get script directory
-SCRIPTDIR=`dirname $0`
-
-echo "Running unit tests..."
-
-# Creating virtual environment
-if [ ! -z $VIRTUAL_ENV ]; then
-    venv=$VIRTUAL_ENV
-else
-    venv=$SCRIPTDIR/.venv
-    virtualenv $venv
-fi
-source $venv/bin/activate
-
-# Install requirements
-pip install -r $SCRIPTDIR/requirements.txt
-pip install -r $SCRIPTDIR/test-requirements.txt
-
-find . -type f -name "*.pyc" -delete
-
-nosetests --with-xunit \
-    --with-coverage \
-    --cover-erase \
-    --cover-package=$SCRIPTDIR/opnfv_testapi/cmd \
-    --cover-package=$SCRIPTDIR/opnfv_testapi/common \
-    --cover-package=$SCRIPTDIR/opnfv_testapi/resources \
-    --cover-package=$SCRIPTDIR/opnfv_testapi/router \
-    --cover-xml \
-    --cover-html \
-    $SCRIPTDIR/opnfv_testapi/tests
-
-exit_code=$?
-
-deactivate
-
-exit $exit_code