Merge "[docker] Use image IDs instead of tags"
authorJose Lausuch <jose.lausuch@ericsson.com>
Wed, 22 Feb 2017 02:48:11 +0000 (02:48 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Wed, 22 Feb 2017 02:48:11 +0000 (02:48 +0000)
46 files changed:
jjb/apex/apex-deploy.sh
jjb/apex/apex.yml
jjb/bottlenecks/bottlenecks-ci-jobs.yml
jjb/bottlenecks/bottlenecks-cleanup.sh [new file with mode: 0644]
jjb/bottlenecks/bottlenecks-project-jobs.yml
jjb/bottlenecks/bottlenecks-run-suite.sh [new file with mode: 0644]
jjb/copper/copper.yml
jjb/daisy4nfv/daisy-deploy.sh [new file with mode: 0755]
jjb/daisy4nfv/daisy-project-jobs.yml
jjb/daisy4nfv/daisy4nfv-deploy.sh [deleted file]
jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh [deleted file]
jjb/fuel/fuel-daily-jobs.yml
jjb/global/releng-macros.yml
jjb/global/slave-params.yml
jjb/joid/joid-daily-jobs.yml
jjb/models/models.yml [new file with mode: 0644]
jjb/opnfvdocs/docs-rtd.yaml
jjb/releng/testapi-backup-mongodb.sh
jjb/ves/ves.yml [new file with mode: 0644]
modules/opnfv/deployment/fuel/adapter.py
modules/opnfv/deployment/manager.py
modules/opnfv/utils/ovs_logger.py
prototypes/bifrost/scripts/test-bifrost-deployment.sh
utils/push-test-logs.sh
utils/test/testapi/etc/config.ini
utils/test/testapi/opnfv_testapi/cmd/server.py
utils/test/testapi/opnfv_testapi/common/config.py
utils/test/testapi/opnfv_testapi/common/constants.py
utils/test/testapi/opnfv_testapi/resources/handlers.py
utils/test/testapi/opnfv_testapi/resources/pod_handlers.py
utils/test/testapi/opnfv_testapi/resources/project_handlers.py
utils/test/testapi/opnfv_testapi/resources/result_handlers.py
utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py
utils/test/testapi/opnfv_testapi/resources/testcase_handlers.py
utils/test/testapi/opnfv_testapi/router/url_mappings.py
utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
utils/test/testapi/opnfv_testapi/tests/unit/test_base.py
utils/test/testapi/opnfv_testapi/tests/unit/test_fake_pymongo.py
utils/test/testapi/opnfv_testapi/tests/unit/test_pod.py
utils/test/testapi/opnfv_testapi/tests/unit/test_project.py
utils/test/testapi/opnfv_testapi/tests/unit/test_result.py
utils/test/testapi/opnfv_testapi/tests/unit/test_scenario.py
utils/test/testapi/opnfv_testapi/tests/unit/test_testcase.py
utils/test/testapi/opnfv_testapi/tests/unit/test_token.py [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/tests/unit/test_version.py

index dc70488..63baa57 100755 (executable)
@@ -181,26 +181,16 @@ fi
 
 if [[ "$JOB_NAME" == *virtual* ]]; then
   # settings for virtual deployment
-  if [ "$IPV6_FLAG" == "True" ]; then
-    NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_v6.yaml"
-  elif echo ${DEPLOY_SCENARIO} | grep fdio; then
-    NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_vpp.yaml"
-  else
-    NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings.yaml"
-  fi
   DEPLOY_CMD="${DEPLOY_CMD} -v"
+  if [[ "${DEPLOY_SCENARIO}" =~ fdio|ovs ]]; then
+    DEPLOY_CMD="${DEPLOY_CMD} --virtual-ram 14"
+  fi
   if [[ "$JOB_NAME" == *csit* ]]; then
     DEPLOY_CMD="${DEPLOY_CMD} -e csit-environment.yaml --virtual-computes 2"
   fi
 else
   # settings for bare metal deployment
-  if [ "$IPV6_FLAG" == "True" ]; then
-    NETWORK_FILE="/root/network/network_settings_v6.yaml"
-  elif [[ "$JOB_NAME" == *master* ]]; then
-    NETWORK_FILE="/root/network/network_settings-master.yaml"
-  else
-    NETWORK_FILE="/root/network/network_settings.yaml"
-  fi
+  NETWORK_SETTINGS_DIR="/root/network"
   INVENTORY_FILE="/root/inventory/pod_settings.yaml"
 
   if ! sudo test -e "$INVENTORY_FILE"; then
@@ -211,6 +201,14 @@ else
   DEPLOY_CMD="${DEPLOY_CMD} -i ${INVENTORY_FILE}"
 fi
 
+if [ "$IPV6_FLAG" == "True" ]; then
+  NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_v6.yaml"
+elif echo ${DEPLOY_SCENARIO} | grep fdio; then
+  NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_vpp.yaml"
+else
+  NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings.yaml"
+fi
+
 # Check that network settings file exists
 if ! sudo test -e "$NETWORK_FILE"; then
   echo "ERROR: Required settings file missing: Network Settings file ${NETWORK_FILE}"
index ff9fbec..3938f15 100644 (file)
@@ -34,6 +34,7 @@
          - 'os-nosdn-ovs-noha'
          - 'os-nosdn-fdio-noha'
          - 'os-nosdn-fdio-ha'
+         - 'os-odl_l2-fdio-noha'
          - 'os-odl_l2-fdio-ha'
          - 'os-odl_l2-netvirt_gbp_fdio-noha'
          - 'os-odl_l2-sfc-noha'
     builders:
         - 'apex-unit-test'
         - 'apex-build'
-        - trigger-builds:
-          - project: 'apex-deploy-virtual-os-nosdn-nofeature-ha-{stream}'
-            predefined-parameters: |
-              BUILD_DIRECTORY=apex-verify-{stream}
-              OPNFV_CLEAN=yes
-            git-revision: false
-            block: true
-            same-node: true
-        - trigger-builds:
-          - project: 'functest-apex-{verify-slave}-suite-{stream}'
-            predefined-parameters: |
-              DEPLOY_SCENARIO=os-nosdn-nofeature-ha
-              FUNCTEST_SUITE_NAME=healthcheck
-            block: true
-            same-node: true
         - trigger-builds:
           - project: 'apex-deploy-virtual-os-odl_l3-nofeature-ha-{stream}'
             predefined-parameters: |
                 build-step-failure-threshold: 'never'
                 failure-threshold: 'never'
                 unstable-threshold: 'FAILURE'
+        - trigger-builds:
+          - project: 'apex-deploy-baremetal-os-odl_l2-fdio-noha-{stream}'
+            predefined-parameters: |
+              BUILD_DIRECTORY=apex-build-{stream}/.build
+              OPNFV_CLEAN=yes
+            git-revision: true
+            same-node: true
+            block-thresholds:
+                build-step-failure-threshold: 'never'
+            block: true
+        - trigger-builds:
+          - project: 'functest-apex-{daily-slave}-daily-{stream}'
+            predefined-parameters:
+              DEPLOY_SCENARIO=os-odl_l2-fdio-noha
+            block: true
+            same-node: true
+            block-thresholds:
+                build-step-failure-threshold: 'never'
+                failure-threshold: 'never'
+                unstable-threshold: 'FAILURE'
+        - trigger-builds:
+          - project: 'yardstick-apex-{slave}-daily-{stream}'
+            predefined-parameters:
+              DEPLOY_SCENARIO=os-odl_l2-fdio-noha
+            block: true
+            same-node: true
+            block-thresholds:
+                build-step-failure-threshold: 'never'
+                failure-threshold: 'never'
+                unstable-threshold: 'FAILURE'
+        - trigger-builds:
+          - project: 'apex-deploy-baremetal-os-odl_l2-fdio-ha-{stream}'
+            predefined-parameters: |
+              BUILD_DIRECTORY=apex-build-{stream}/.build
+              OPNFV_CLEAN=yes
+            git-revision: true
+            same-node: true
+            block-thresholds:
+                build-step-failure-threshold: 'never'
+            block: true
+        - trigger-builds:
+          - project: 'functest-apex-{daily-slave}-daily-{stream}'
+            predefined-parameters:
+              DEPLOY_SCENARIO=os-odl_l2-fdio-ha
+            block: true
+            same-node: true
+            block-thresholds:
+                build-step-failure-threshold: 'never'
+                failure-threshold: 'never'
+                unstable-threshold: 'FAILURE'
+        - trigger-builds:
+          - project: 'yardstick-apex-{slave}-daily-{stream}'
+            predefined-parameters:
+              DEPLOY_SCENARIO=os-odl_l2-fdio-ha
+            block: true
+            same-node: true
+            block-thresholds:
+                build-step-failure-threshold: 'never'
+                failure-threshold: 'never'
+                unstable-threshold: 'FAILURE'
+
 # Colorado Build
 - job-template:
     name: 'apex-build-colorado'
index a9ccd69..2779e31 100644 (file)
@@ -72,7 +72,8 @@
     suite:
         - 'rubbos'
         - 'vstf'
-        - 'posca'
+        - 'posca_stress_traffic'
+        - 'posca_stress_ping'
 
     jobs:
         - 'bottlenecks-{installer}-{suite}-{pod}-daily-{stream}'
 - builder:
     name: bottlenecks-env-cleanup
     builders:
-        - shell: |
-            #!/bin/bash
-            set -e
-            [[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-            echo "Bottlenecks: docker containers/images cleaning up"
-            if [[ ! -z $(docker ps -a | grep opnfv/bottlenecks) ]]; then
-                echo "removing existing opnfv/bottlenecks containers"
-                docker ps -a | grep opnfv/bottlenecks | awk '{print $1}' | xargs docker rm -f >$redirect
-            fi
-
-            if [[ ! -z $(docker images | grep opnfv/bottlenecks) ]]; then
-                echo "Bottlenecks: docker images to remove:"
-                docker images | head -1 && docker images | grep opnfv/bottlenecks
-                image_tags=($(docker images | grep opnfv/bottlenecks | awk '{print $2}'))
-                for tag in "${image_tags[@]}"; do
-                    echo "Removing docker image opnfv/bottlenecks:$tag..."
-                    docker rmi opnfv/bottlenecks:$tag >$redirect
-                done
-            fi
+        - shell:
+            !include-raw: ./bottlenecks-cleanup.sh
 
 - builder:
     name: bottlenecks-run-suite
     builders:
-        - shell: |
-            #!/bin/bash
-            set -e
-            [[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-            echo "Bottlenecks: to pull image opnfv/bottlenecks:${DOCKER_TAG}"
-            docker pull opnfv/bottlenecks:$DOCKER_TAG >${redirect}
-
-            echo "Bottlenecks: docker start running"
-            opts="--privileged=true -id"
-            envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
-                  -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
-                  -e BOTTLENECKS_BRANCH=${BOTTLENECKS_BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
-                  -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL}"
-            cmd="sudo docker run ${opts} ${envs} opnfv/bottlenecks:${DOCKER_TAG} /bin/bash"
-            echo "Bottlenecks: docker cmd running ${cmd}"
-            ${cmd} >${redirect}
-
-            echo "Bottlenecks: obtain docker id"
-            container_id=$(docker ps | grep "opnfv/bottlenecks:${DOCKER_TAG}" | awk '{print $1}' | head -1)
-            if [ -z ${container_id} ]; then
-                echo "Cannot find opnfv/bottlenecks container ID ${container_id}. Please check if it exists."
-                docker ps -a
-                exit 1
-            fi
-
-            echo "Bottlenecks: to prepare openstack environment"
-            prepare_env="${REPO_DIR}/ci/prepare_env.sh"
-            echo "Bottlenecks: docker cmd running: ${prepare_env}"
-            sudo docker exec ${container_id} ${prepare_env}
-
-            echo "Bottlenecks: to run testsuite ${SUITE_NAME}"
-            run_testsuite="${REPO_DIR}/run_tests.sh -s ${SUITE_NAME}"
-            echo "Bottlenecks: docker cmd running: ${run_testsuite}"
-            sudo docker exec ${container_id} ${run_testsuite}
+        - shell:
+            !include-raw: ./bottlenecks-run-suite.sh
 
 ####################
 # parameter macros
diff --git a/jjb/bottlenecks/bottlenecks-cleanup.sh b/jjb/bottlenecks/bottlenecks-cleanup.sh
new file mode 100644 (file)
index 0000000..0ba0423
--- /dev/null
@@ -0,0 +1,111 @@
+#!/bin/bash
+set -e
+[[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+
+BOTTLENECKS_IMAGE=opnfv/bottlenecks
+echo "Bottlenecks: docker containers/images cleaning up"
+
+dangling_images=($(docker images -f "dangling=true" | grep $BOTTLENECKS_IMAGE | awk '{print $3}'))
+if [[ -n $dangling_images ]]; then
+    echo "Removing $BOTTLENECKS_IMAGE:<none> dangling images and their containers"
+    docker images | head -1 && docker images | grep $dangling_images
+    for image_id in "${dangling_images[@]}"; do
+        echo "Bottlenecks: Removing dangling image $image_id"
+        docker rmi -f $image_id >${redirect}
+    done
+fi
+
+for image_id in "${dangling_images[@]}"; do
+    if [[ -n $(docker ps -a | grep $image_id) ]]; then
+        echo "Bottlenecks: Removing containers associated with dangling image: $image_id"
+        docker ps -a | head -1 && docker ps -a | grep $image_id
+        docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect}
+    fi
+done
+
+if [[ -n $(docker ps -a | grep $BOTTLENECKS_IMAGE) ]]; then
+    echo "Removing existing $BOTTLENECKS_IMAGE containers"
+    docker ps -a | grep $BOTTLENECKS_IMAGE | awk '{print $1}' | xargs docker rm -f >$redirect
+fi
+
+if [[ -n $(docker images | grep $BOTTLENECKS_IMAGE) ]]; then
+    echo "Bottlenecks: docker images to remove:"
+    docker images | head -1 && docker images | grep $BOTTLENECKS_IMAGE
+    image_tags=($(docker images | grep $BOTTLENECKS_IMAGE | awk '{print $2}'))
+    for tag in "${image_tags[@]}"; do
+        echo "Removing docker image $BOTTLENECKS_IMAGE:$tag..."
+        docker rmi $BOTTLENECKS_IMAGE:$tag >$redirect
+    done
+fi
+
+echo "Yardstick: docker containers/images cleaning up"
+YARDSTICK_IMAGE=opnfv/yardstick
+
+dangling_images=($(docker images -f "dangling=true" | grep $YARDSTICK_IMAGE | awk '{print $3}'))
+if [[ -n $dangling_images ]]; then
+    echo "Removing $YARDSTICK_IMAGE:<none> dangling images and their containers"
+    docker images | head -1 && docker images | grep $dangling_images
+    for image_id in "${dangling_images[@]}"; do
+        echo "Yardstick: Removing dangling image $image_id"
+        docker rmi -f $image_id >${redirect}
+    done
+fi
+
+for image_id in "${dangling_images[@]}"; do
+    if [[ -n $(docker ps -a | grep $image_id) ]]; then
+        echo "Yardstick: Removing containers associated with dangling image: $image_id"
+        docker ps -a | head -1 && docker ps -a | grep $image_id
+        docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect}
+    fi
+done
+
+if [[ -n $(docker ps -a | grep $YARDSTICK_IMAGE) ]]; then
+    echo "Removing existing $YARDSTICK_IMAGE containers"
+    docker ps -a | grep $YARDSTICK_IMAGE | awk '{print $1}' | xargs docker rm -f >$redirect
+fi
+
+if [[ -n $(docker images | grep $YARDSTICK_IMAGE) ]]; then
+    echo "Yardstick: docker images to remove:"
+    docker images | head -1 && docker images | grep $YARDSTICK_IMAGE
+    image_tags=($(docker images | grep $YARDSTICK_IMAGE | awk '{print $2}'))
+    for tag in "${image_tags[@]}"; do
+        echo "Removing docker image $YARDSTICK_IMAGE:$tag..."
+        docker rmi $YARDSTICK_IMAGE:$tag >$redirect
+    done
+fi
+
+echo "InfluxDB: docker containers/images cleaning up"
+INFLUXDB_IMAGE=tutum/influxdb
+
+dangling_images=($(docker images -f "dangling=true" | grep $INFLUXDB_IMAGE | awk '{print $3}'))
+if [[ -n $dangling_images ]]; then
+    echo "Removing $INFLUXDB_IMAGE:<none> dangling images and their containers"
+    docker images | head -1 && docker images | grep $dangling_images
+    for image_id in "${dangling_images[@]}"; do
+        echo "InfluxDB: Removing dangling image $image_id"
+        docker rmi -f $image_id >${redirect}
+    done
+fi
+
+for image_id in "${dangling_images[@]}"; do
+    if [[ -n $(docker ps -a | grep $image_id) ]]; then
+        echo "InfluxDB: Removing containers associated with dangling image: $image_id"
+        docker ps -a | head -1 && docker ps -a | grep $image_id
+        docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect}
+    fi
+done
+
+if [[ -n $(docker ps -a | grep $INFLUXDB_IMAGE) ]]; then
+    echo "Removing existing $INFLUXDB_IMAGE containers"
+    docker ps -a | grep $INFLUXDB_IMAGE | awk '{print $1}' | xargs docker rm -f >$redirect
+fi
+
+if [[ -n $(docker images | grep $INFLUXDB_IMAGE) ]]; then
+    echo "InfluxDB: docker images to remove:"
+    docker images | head -1 && docker images | grep $INFLUXDB_IMAGE
+    image_tags=($(docker images | grep $INFLUXDB_IMAGE | awk '{print $2}'))
+    for tag in "${image_tags[@]}"; do
+        echo "Removing docker image $INFLUXDB_IMAGE:$tag..."
+        docker rmi $INFLUXDB_IMAGE:$tag >$redirect
+    done
+fi
\ No newline at end of file
index 12ea31b..a0abb93 100644 (file)
@@ -29,7 +29,8 @@
     suite:
         - 'rubbos'
         - 'vstf'
-        - 'posca'
+        - 'posca_stress_traffic'
+        - 'posca_stress_ping'
 
 ################################
 # job templates
diff --git a/jjb/bottlenecks/bottlenecks-run-suite.sh b/jjb/bottlenecks/bottlenecks-run-suite.sh
new file mode 100644 (file)
index 0000000..f69463f
--- /dev/null
@@ -0,0 +1,65 @@
+#!/bin/bash
+#set -e
+[[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+BOTTLENECKS_IMAGE=opnfv/bottlenecks
+
+if [[ $SUITE_NAME == rubbos || $SUITE_NAME == vstf ]]; then
+    echo "Bottlenecks: to pull image $BOTTLENECKS_IMAGE:${DOCKER_TAG}"
+    docker pull $BOTTLENECKS_IMAGE:$DOCKER_TAG >${redirect}
+
+    echo "Bottlenecks: docker start running"
+    opts="--privileged=true -id"
+    envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
+          -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
+          -e BOTTLENECKS_BRANCH=${BOTTLENECKS_BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
+          -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL}"
+    cmd="sudo docker run ${opts} ${envs} $BOTTLENECKS_IMAGE:${DOCKER_TAG} /bin/bash"
+    echo "Bottlenecks: docker cmd running ${cmd}"
+    ${cmd} >${redirect}
+
+    echo "Bottlenecks: obtain docker id"
+    container_id=$(docker ps | grep "$BOTTLENECKS_IMAGE:${DOCKER_TAG}" | awk '{print $1}' | head -1)
+    if [ -z ${container_id} ]; then
+        echo "Cannot find $BOTTLENECKS_IMAGE container ID ${container_id}. Please check if it exists."
+        docker ps -a
+        exit 1
+    fi
+
+    echo "Bottlenecks: to prepare openstack environment"
+    prepare_env="${REPO_DIR}/ci/prepare_env.sh"
+    echo "Bottlenecks: docker cmd running: ${prepare_env}"
+    sudo docker exec ${container_id} ${prepare_env}
+
+    echo "Bottlenecks: to run testsuite ${SUITE_NAME}"
+    run_testsuite="${REPO_DIR}/run_tests.sh -s ${SUITE_NAME}"
+    echo "Bottlenecks: docker cmd running: ${run_testsuite}"
+    sudo docker exec ${container_id} ${run_testsuite}
+else
+    echo "Bottlenecks: installing POSCA docker-compose"
+    if [ -d usr/local/bin/docker-compose ]; then
+        rm -rf usr/local/bin/docker-compose
+    fi
+    curl -L https://github.com/docker/compose/releases/download/1.11.0/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
+    chmod +x /usr/local/bin/docker-compose
+
+    echo "Bottlenecks: composing up dockers"
+    cd $WORKSPACE
+    docker-compose -f $WORKSPACE/docker/bottleneck-compose/docker-compose.yml up -d
+
+    echo "Bottlenecks: running traffic stress/factor testing in posca testsuite "
+    POSCA_SCRIPT=/home/opnfv/bottlenecks/testsuites/posca
+    if [[ $SUITE_NAME == posca_stress_traffic ]]; then
+        TEST_CASE=posca_factor_system_bandwidth
+        echo "Bottlenecks: pulling tutum/influxdb for yardstick"
+        docker pull tutum/influxdb:0.13
+        sleep 5
+        docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE
+    elif [[ $SUITE_NAME == posca_stress_ping ]]; then
+        TEST_CASE=posca_stress_ping
+        sleep 5
+        docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE
+    fi
+
+    echo "Bottlenecks: cleaning up docker-compose images and dockers"
+    docker-compose -f $WORKSPACE/docker/bottleneck-compose/docker-compose.yml down --rmi all
+fi
\ No newline at end of file
index ea1af47..b65466e 100644 (file)
@@ -64,5 +64,4 @@
             set -o nounset
             set -o pipefail
 
-            cd $WORKSPACE/ci
             shellcheck -f tty tests/*.sh
diff --git a/jjb/daisy4nfv/daisy-deploy.sh b/jjb/daisy4nfv/daisy-deploy.sh
new file mode 100755 (executable)
index 0000000..b303c2c
--- /dev/null
@@ -0,0 +1,60 @@
+#!/bin/bash
+set -o nounset
+set -o pipefail
+
+echo "--------------------------------------------------------"
+echo "This is $INSTALLER_TYPE deploy job!"
+echo "--------------------------------------------------------"
+
+DEPLOY_SCENARIO=${DEPLOY_SCENARIO:-"os-nosdn-nofeature-ha"}
+BRIDGE=${BRIDGE:-pxebr}
+LAB_NAME=${NODE_NAME/-*}
+POD_NAME=${NODE_NAME/*-}
+deploy_ret=0
+
+if [[ ! "$NODE_NAME" =~ "-virtual" ]] && [[ ! "$LAB_NAME" =~ (zte) ]]; then
+    echo "Unsupported lab $LAB_NAME for now, Cannot continue!"
+    exit $deploy_ret
+fi
+
+# clone the securedlab repo
+cd $WORKSPACE
+BASE_DIR=$(cd ./;pwd)
+
+echo "Cloning securedlab repo $BRANCH"
+git clone ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab --quiet \
+    --branch $BRANCH
+
+DEPLOY_COMMAND="sudo ./ci/deploy/deploy.sh -b $BASE_DIR \
+                -l $LAB_NAME -p $POD_NAME -B $BRIDGE"
+
+# log info to console
+echo """
+Deployment parameters
+--------------------------------------------------------
+Scenario: $DEPLOY_SCENARIO
+LAB: $LAB_NAME
+POD: $POD_NAME
+BRIDGE: $BRIDGE
+BASE_DIR: $BASE_DIR
+
+Starting the deployment using $INSTALLER_TYPE. This could take some time...
+--------------------------------------------------------
+Issuing command
+$DEPLOY_COMMAND
+"""
+
+# start the deployment
+#$DEPLOY_COMMAND
+
+if [ $? -ne 0 ]; then
+    echo
+    echo "Depolyment failed!"
+    deploy_ret=1
+else
+    echo
+    echo "--------------------------------------------------------"
+    echo "Deployment done!"
+fi
+
+exit $deploy_ret
index 1567409..0127ed0 100644 (file)
         - shell:
             !include-raw: ./daisy4nfv-download-artifact.sh
         - shell:
-            !include-raw: ./daisy4nfv-deploy.sh
+            !include-raw: ./daisy-deploy.sh
 
 - builder:
     name: 'daisy-test-daily-macro'
diff --git a/jjb/daisy4nfv/daisy4nfv-deploy.sh b/jjb/daisy4nfv/daisy4nfv-deploy.sh
deleted file mode 100755 (executable)
index cc2c103..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-echo "Daisy deployment WIP"
index a6659b2..95d851c 100644 (file)
         - shell:
             !include-raw: ./daisy4nfv-download-artifact.sh
         - shell:
-            !include-raw: ./daisy4nfv-virtual-deploy.sh
+            !include-raw: ./daisy-deploy.sh
         - shell:
             !include-raw: ./daisy4nfv-workspace-cleanup.sh
 
diff --git a/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh b/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh
deleted file mode 100755 (executable)
index ef4a07b..0000000
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv virtual deploy job!"
-echo "--------------------------------------------------------"
-
-cd $WORKSPACE
-
-if [[ "$NODE_NAME" =~ "-virtual" ]]; then
-    export NETWORK_CONF=./deploy/config/vm_environment/$NODE_NAME/network.yml
-    export DHA_CONF=./deploy/config/vm_environment/$NODE_NAME/deploy.yml
-else
-    # TODO: For the time being, we need to pass this script to let contributors merge their work.
-    echo "No support for non-virtual node"
-    exit 0
-fi
-
-sudo ./ci/deploy/deploy.sh -d ${DHA_CONF} -n ${NETWORK_CONF} -p ${NODE_NAME:-"zte-virtual1"}
-
-if [ $? -ne 0 ]; then
-    echo "depolyment failed!"
-    deploy_ret=1
-fi
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
-
-exit $deploy_ret
index f78c4a3..2378552 100644 (file)
             auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
         - 'os-nosdn-kvm_ovs_dpdk-noha':
             auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
+        - 'os-nosdn-kvm_ovs_dpdk_bar-noha':
+            auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
 
     jobs:
         - 'fuel-{scenario}-{pod}-daily-{stream}'
 - trigger:
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '30 16 * * *'
+        - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-master-trigger'
+    triggers:
+        - timed: ''
 #-----------------------------------------------
 # Triggers for job running on fuel-baremetal against danube branch
 #-----------------------------------------------
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-danube-trigger'
+    triggers:
+        - timed: ''
 #-----------------------------------------------
 # Triggers for job running on fuel-virtual against master branch
 #-----------------------------------------------
 - trigger:
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: ''
+        - timed: '30 16 * * *'
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-master-trigger'
+    triggers:
+        - timed: '30 20 * * *'
 #-----------------------------------------------
 # Triggers for job running on fuel-virtual against danube branch
 #-----------------------------------------------
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-danube-trigger'
+    triggers:
+        - timed: ''
 #-----------------------------------------------
 # ZTE POD1 Triggers running against master branch
 #-----------------------------------------------
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-master-trigger'
+    triggers:
+        - timed: ''
 
 #-----------------------------------------------
 # ZTE POD2 Triggers running against master branch
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod2-daily-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod2-daily-master-trigger'
+    triggers:
+        - timed: ''
 #-----------------------------------------------
 # ZTE POD3 Triggers running against master branch
 #-----------------------------------------------
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-master-trigger'
+    triggers:
+        - timed: ''
 #-----------------------------------------------
 # ZTE POD1 Triggers running against danube branch
 #-----------------------------------------------
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-danube-trigger'
+    triggers:
+        - timed: ''
 
 #-----------------------------------------------
 # ZTE POD2 Triggers running against danube branch
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod2-daily-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod2-daily-danube-trigger'
+    triggers:
+        - timed: ''
 #-----------------------------------------------
 # ZTE POD3 Triggers running against danube branch
 #-----------------------------------------------
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-danube-trigger'
+    triggers:
+        - timed: ''
index 298fb04..d5eb0c9 100644 (file)
@@ -74,6 +74,8 @@
             wipe-workspace: true
             submodule:
                 recursive: true
+                timeout: 20
+                shallow-clone: true
 - trigger:
     name: 'daily-trigger-disabled'
     triggers:
@@ -97,6 +99,8 @@
                 - draft-published-event
                 - comment-added-contains-event:
                     comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
             projects:
               - project-compare-type: 'ANT'
                 project-pattern: '{project}'
index 429828e..4b3eaaa 100644 (file)
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'cengn-pod1-defaults'
+    parameters:
+        - node:
+            name: SLAVE_NAME
+            description: 'Slave name on Jenkins'
+            allowed-slaves:
+                - cengn-pod1
+            default-slaves:
+                - cengn-pod1
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
 - parameter:
     name: 'intel-pod1-defaults'
     parameters:
index b28dd60..88269d3 100644 (file)
@@ -46,6 +46,9 @@
         - orange-pod1:
             slave-label: orange-pod1
             <<: *master
+        - cengn-pod1:
+            slave-label: cengn-pod1
+            <<: *master
 #--------------------------------
 # scenarios
 #--------------------------------
     name: 'joid-os-nosdn-nofeature-ha-orange-pod1-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-master-trigger'
+    triggers:
+        - timed: '5 2 * * *'
 # os-nosdn-nofeature-ha trigger - branch: danube
 - trigger:
     name: 'joid-os-nosdn-nofeature-ha-baremetal-danube-trigger'
     name: 'joid-os-nosdn-nofeature-ha-orange-pod1-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-danube-trigger'
+    triggers:
+        - timed: ''
 # os-odl_l2-nofeature-ha trigger - branch: master
 - trigger:
     name: 'joid-os-odl_l2-nofeature-ha-baremetal-master-trigger'
     name: 'joid-os-odl_l2-nofeature-ha-orange-pod1-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-master-trigger'
+    triggers:
+        - timed: '5 7 * * *'
 # os-odl_l2-nofeature-ha trigger - branch: danube
 - trigger:
     name: 'joid-os-odl_l2-nofeature-ha-baremetal-danube-trigger'
     name: 'joid-os-odl_l2-nofeature-ha-orange-pod1-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-danube-trigger'
+    triggers:
+        - timed: ''
 # os-onos-nofeature-ha trigger - branch: master
 - trigger:
     name: 'joid-os-onos-nofeature-ha-baremetal-master-trigger'
     name: 'joid-os-onos-nofeature-ha-orange-pod1-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-onos-nofeature-ha-cengn-pod1-master-trigger'
+    triggers:
+        - timed: '5 12 * * *'
 # os-onos-nofeature-ha trigger - branch: danube
 - trigger:
     name: 'joid-os-onos-nofeature-ha-baremetal-danube-trigger'
     name: 'joid-os-onos-nofeature-ha-orange-pod1-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-onos-nofeature-ha-cengn-pod1-danube-trigger'
+    triggers:
+        - timed: ''
 # os-onos-sfc-ha trigger - branch: master
 - trigger:
     name: 'joid-os-onos-sfc-ha-baremetal-master-trigger'
     name: 'joid-os-onos-sfc-ha-orange-pod1-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-onos-sfc-ha-cengn-pod1-master-trigger'
+    triggers:
+        - timed: '5 17 * * *'
 # os-onos-sfc-ha trigger - branch: danube
 - trigger:
     name: 'joid-os-onos-sfc-ha-baremetal-danube-trigger'
     name: 'joid-os-onos-sfc-ha-orange-pod1-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-onos-sfc-ha-cengn-pod1-danube-trigger'
+    triggers:
+        - timed: ''
 # os-nosdn-lxd-noha trigger - branch: master
 - trigger:
     name: 'joid-os-nosdn-lxd-noha-baremetal-master-trigger'
     name: 'joid-os-nosdn-lxd-noha-orange-pod1-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-noha-cengn-pod1-master-trigger'
+    triggers:
+        - timed: '5 22 * * *'
 # os-nosdn-lxd-noha trigger - branch: danube
 - trigger:
     name: 'joid-os-nosdn-lxd-noha-baremetal-danube-trigger'
     name: 'joid-os-nosdn-lxd-noha-orange-pod1-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-noha-cengn-pod1-danube-trigger'
+    triggers:
+        - timed: ''
 # os-nosdn-lxd-ha trigger - branch: master
 - trigger:
     name: 'joid-os-nosdn-lxd-ha-baremetal-master-trigger'
     name: 'joid-os-nosdn-lxd-ha-orange-pod1-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-ha-cengn-pod1-master-trigger'
+    triggers:
+        - timed: '5 10 * * *'
 # os-nosdn-lxd-ha trigger - branch: danube
 - trigger:
     name: 'joid-os-nosdn-lxd-ha-baremetal-danube-trigger'
     name: 'joid-os-nosdn-lxd-ha-orange-pod1-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-ha-cengn-pod1-danube-trigger'
+    triggers:
+        - timed: ''
 # os-nosdn-nofeature-noha trigger - branch: master
 - trigger:
     name: 'joid-os-nosdn-nofeature-noha-baremetal-master-trigger'
     name: 'joid-os-nosdn-nofeature-noha-orange-pod1-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-master-trigger'
+    triggers:
+        - timed: '5 4 * * *'
 # os-nosdn-nofeature-noha trigger - branch: danube
 - trigger:
     name: 'joid-os-nosdn-nofeature-noha-baremetal-danube-trigger'
     name: 'joid-os-nosdn-nofeature-noha-orange-pod1-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
+    triggers:
+        - timed: ''
 # k8-nosdn-nofeature-noha trigger - branch: master
 - trigger:
     name: 'joid-k8-nosdn-nofeature-noha-baremetal-master-trigger'
     name: 'joid-k8-nosdn-nofeature-noha-orange-pod1-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-master-trigger'
+    triggers:
+        - timed: '5 15 * * *'
 # k8-nosdn-nofeature-noha trigger - branch: danube
 - trigger:
     name: 'joid-k8-nosdn-nofeature-noha-baremetal-danube-trigger'
     name: 'joid-k8-nosdn-nofeature-noha-orange-pod1-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
+    triggers:
+        - timed: ''
 # k8-nosdn-lb-noha trigger - branch: master
 - trigger:
     name: 'joid-k8-nosdn-lb-noha-baremetal-master-trigger'
     name: 'joid-k8-nosdn-lb-noha-orange-pod1-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-k8-nosdn-lb-noha-cengn-pod1-master-trigger'
+    triggers:
+        - timed: '5 20 * * *'
 # k8-nosdn-lb-noha trigger - branch: danube
 - trigger:
     name: 'joid-k8-nosdn-lb-noha-baremetal-danube-trigger'
     name: 'joid-k8-nosdn-lb-noha-orange-pod1-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'joid-k8-nosdn-lb-noha-cengn-pod1-danube-trigger'
+    triggers:
+        - timed: ''
diff --git a/jjb/models/models.yml b/jjb/models/models.yml
new file mode 100644 (file)
index 0000000..f419c88
--- /dev/null
@@ -0,0 +1,67 @@
+###################################################
+# All the jobs except verify have been removed!
+# They will only be enabled on request by projects!
+###################################################
+- project:
+    name: models
+
+    project: '{name}'
+
+    jobs:
+        - 'models-verify-{stream}'
+
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+            disabled: false
+        - danube:
+            branch: 'stable/{stream}'
+            gs-pathname: '/{stream}'
+            disabled: false
+
+- job-template:
+    name: 'models-verify-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+        - git-scm-gerrit
+
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'docs/**|.gitignore'
+
+    builders:
+        - shell: |
+            #!/bin/bash
+            set -o errexit
+            set -o nounset
+            set -o pipefail
+
+            shellcheck -f tty tests/*.sh
index 32ef732..151b535 100644 (file)
     builders:
         - shell: |
             if [ "$GERRIT_PROJECT" != "opnfvdocs" ]; then
-                cd opnfvdocs/docs/submodules/$GERRIT_PROJECT
+                cd docs/submodules/$GERRIT_PROJECT
                 git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
             else
                 git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
             fi
         - shell: |
-            sudo pip install virtualenv virtualenvwrapper
-            export WORKON_HOME=$HOME/.virtualenvs
-            source /usr/local/bin/virtualenvwrapper.sh
-            mkvirtualenv $WORKSPACE/venv
-            workon $WORKSPACE/venv
+            sudo pip install virtualenv 
+            virtualenv $WORKSPACE/venv
+            source $WORKSPACE/venv/bin/activate
             pip install --upgrade pip
             pip freeze
             pip install tox
index 8dba17b..795e479 100644 (file)
@@ -27,5 +27,5 @@ if [ $? != 0 ]; then
 else
     echo "Uploading mongodump to artifact $artifact_dir"
     /usr/local/bin/gsutil cp -r "$workspace"/"$file_name" gs://artifacts.opnfv.org/"$artifact_dir"/
-    echo "MongoDump can be found at http://artifacts.opnfv.org/$artifact_dir"
+    echo "MongoDump can be found at http://artifacts.opnfv.org/$artifact_dir.html"
 fi
diff --git a/jjb/ves/ves.yml b/jjb/ves/ves.yml
new file mode 100644 (file)
index 0000000..5f0da33
--- /dev/null
@@ -0,0 +1,68 @@
+###################################################
+# All the jobs except verify have been removed!
+# They will only be enabled on request by projects!
+###################################################
+- project:
+    name: ves
+
+    project: '{name}'
+
+    jobs:
+        - 'ves-verify-{stream}'
+
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+            disabled: false
+        - danube:
+            branch: 'stable/{stream}'
+            gs-pathname: '/{stream}'
+            disabled: false
+
+- job-template:
+    name: 'ves-verify-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+        - git-scm-gerrit
+
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'docs/**|.gitignore'
+
+    builders:
+        - shell: |
+            #!/bin/bash
+            set -o errexit
+            set -o nounset
+            set -o pipefail
+
+            shellcheck -f tty tests/*.sh
+            shellcheck -f tty utils/*.sh
index aa5ad7a..a71d6cb 100644 (file)
@@ -114,7 +114,7 @@ class FuelAdapter(manager.DeploymentHandler):
                 index_ip = i
             elif "mac" in fields[i]:
                 index_mac = i
-            elif "roles " in fields[i]:
+            elif "roles " in fields[i] and "pending_roles" not in fields[i]:
                 index_roles = i
             elif "online" in fields[i]:
                 index_online = i
@@ -170,26 +170,30 @@ class FuelAdapter(manager.DeploymentHandler):
         cmd = 'source openrc;nova-manage version 2>/dev/null'
         version = None
         for node in self.nodes:
-            if node.is_controller():
+            if node.is_controller() and node.is_active():
                 version = node.run_cmd(cmd)
                 break
         return version
 
     def get_sdn_version(self):
-        cmd = "apt-cache show opendaylight|grep Version|sed 's/^.*\: //'"
+        cmd = "apt-cache show opendaylight|grep Version"
         version = None
         for node in self.nodes:
-            if node.is_controller():
+            if manager.Role.ODL in node.roles and node.is_active():
                 odl_version = node.run_cmd(cmd)
                 if odl_version:
-                    version = 'OpenDaylight ' + odl_version
-                break
+                    version = 'OpenDaylight ' + odl_version.split(' ')[-1]
+                    break
         return version
 
     def get_deployment_status(self):
-        cmd = 'fuel env|grep operational'
+        cmd = "fuel env|tail -1|awk '{print $3}'"
         result = self.installer_node.run_cmd(cmd)
         if result is None or len(result) == 0:
-            return 'failed'
+            return 'unknown'
+        elif 'operational' in result:
+            return 'active'
+        elif 'deploy' in result:
+            return 'deploying'
         else:
             return 'active'
index 43a7948..7047a4d 100644 (file)
@@ -27,7 +27,7 @@ class Deployment(object):
                  status,
                  openstack_version,
                  sdn_controller,
-                 nodes=[]):
+                 nodes=None):
 
         self.deployment_info = {
             'installer': installer,
@@ -116,9 +116,9 @@ class Node(object):
                  ip,
                  name,
                  status,
-                 roles=[],
+                 roles=None,
                  ssh_client=None,
-                 info={}):
+                 info=None):
         self.id = id
         self.ip = ip
         self.name = name
@@ -127,6 +127,16 @@ class Node(object):
         self.roles = roles
         self.info = info
 
+        self.cpu_info = 'unknown'
+        self.memory = 'unknown'
+        self.ovs = 'unknown'
+
+        if ssh_client:
+            sys_info = self.get_system_info()
+            self.cpu_info = sys_info['cpu_info']
+            self.memory = sys_info['memory']
+            self.ovs = self.get_ovs_info()
+
     def get_file(self, src, dest):
         '''
         SCP file from a node
@@ -164,13 +174,15 @@ class Node(object):
         Run command remotely on a node
         '''
         if self.status is not NodeStatus.STATUS_OK:
-            logger.error("The node %s is not active" % self.ip)
+            logger.error(
+                "Error running command %s. The node %s is not active"
+                % (cmd, self.ip))
             return None
         _, stdout, stderr = (self.ssh_client.exec_command(cmd))
         error = stderr.readlines()
         if len(error) > 0:
             logger.error("error %s" % ''.join(error))
-            return error
+            return None
         output = ''.join(stdout.readlines()).rstrip()
         return output
 
@@ -184,53 +196,91 @@ class Node(object):
             'name': self.name,
             'status': self.status,
             'roles': self.roles,
+            'cpu_info': self.cpu_info,
+            'memory': self.memory,
+            'ovs': self.ovs,
             'info': self.info
         }
 
-    def get_attribute(self, attribute):
+    def is_active(self):
         '''
-        Returns an attribute given the name
+        Returns if the node is active
         '''
-        return self.get_dict()[attribute]
+        if self.status == NodeStatus.STATUS_OK:
+            return True
+        return False
 
     def is_controller(self):
         '''
         Returns if the node is a controller
         '''
-        if 'controller' in self.roles:
-            return True
-        return False
+        return Role.CONTROLLER in self.roles
 
     def is_compute(self):
         '''
         Returns if the node is a compute
         '''
-        if 'compute' in self.roles:
-            return True
-        return False
+        return Role.COMPUTE in self.roles
+
+    def is_odl(self):
+        '''
+        Returns if the node is an opendaylight
+        '''
+        return Role.ODL in self.roles
 
     def get_ovs_info(self):
         '''
         Returns the ovs version installed
         '''
-        cmd = "ovs-vsctl --version|head -1| sed 's/^.*) //'"
-        return self.run_cmd(cmd)
+        if self.is_active():
+            cmd = "ovs-vsctl --version|head -1| sed 's/^.*) //'"
+            return self.run_cmd(cmd)
+        return None
+
+    def get_system_info(self):
+        '''
+        Returns the ovs version installed
+        '''
+        cmd = 'grep MemTotal /proc/meminfo'
+        memory = self.run_cmd(cmd).partition('MemTotal:')[-1].strip().encode()
+
+        cpu_info = {}
+        cmd = 'lscpu'
+        result = self.run_cmd(cmd)
+        for line in result.splitlines():
+            if line.startswith('CPU(s)'):
+                cpu_info['num_cpus'] = line.split(' ')[-1].encode()
+            elif line.startswith('Thread(s) per core'):
+                cpu_info['threads/core'] = line.split(' ')[-1].encode()
+            elif line.startswith('Core(s) per socket'):
+                cpu_info['cores/socket'] = line.split(' ')[-1].encode()
+            elif line.startswith('Model name'):
+                cpu_info['model'] = line.partition(
+                    'Model name:')[-1].strip().encode()
+            elif line.startswith('Architecture'):
+                cpu_info['arch'] = line.split(' ')[-1].encode()
+
+        return {'memory': memory, 'cpu_info': cpu_info}
 
     def __str__(self):
         return '''
-            name:   {name}
-            id:     {id}
-            ip:     {ip}
-            status: {status}
-            roles:  {roles}
-            ovs:    {ovs}
-            info:   {info}'''.format(name=self.name,
-                                     id=self.id,
-                                     ip=self.ip,
-                                     status=self.status,
-                                     roles=self.roles,
-                                     ovs=self.get_ovs_info(),
-                                     info=self.info)
+            name:    {name}
+            id:      {id}
+            ip:      {ip}
+            status:  {status}
+            roles:   {roles}
+            cpu:     {cpu_info}
+            memory:  {memory}
+            ovs:     {ovs}
+            info:    {info}'''.format(name=self.name,
+                                      id=self.id,
+                                      ip=self.ip,
+                                      status=self.status,
+                                      roles=self.roles,
+                                      cpu_info=self.cpu_info,
+                                      memory=self.memory,
+                                      ovs=self.ovs,
+                                      info=self.info)
 
 
 class DeploymentHandler(object):
@@ -309,6 +359,18 @@ class DeploymentHandler(object):
         '''
         return self.installer_node
 
+    def get_arch(self):
+        '''
+            Returns the architecture of the first compute node found
+        '''
+        arch = None
+        for node in self.nodes:
+            if node.is_compute():
+                arch = node.cpu_info.get('arch', None)
+                if arch:
+                    break
+        return arch
+
     def get_deployment_info(self):
         '''
             Returns an object of type Deployment
index d650eb9..7777a9a 100644 (file)
@@ -101,19 +101,13 @@ class OVSLogger(object):
         if timestamp is None:
             timestamp = time.strftime("%Y%m%d-%H%M%S")
 
-        for controller_client in controller_clients:
-            self.ofctl_dump_flows(controller_client,
-                                  timestamp=timestamp)
-            self.vsctl_show(controller_client,
-                            timestamp=timestamp)
-
-        for compute_client in compute_clients:
-            self.ofctl_dump_flows(compute_client,
-                                  timestamp=timestamp)
-            self.vsctl_show(compute_client,
-                            timestamp=timestamp)
+        clients = controller_clients + compute_clients
+        for client in clients:
+            self.ofctl_dump_flows(client, timestamp=timestamp)
+            self.vsctl_show(client, timestamp=timestamp)
 
         if related_error is not None:
             dumpdir = os.path.join(self.ovs_dir, timestamp)
+            self.__mkdir_p(dumpdir)
             with open(os.path.join(dumpdir, 'error'), 'w') as f:
                 f.write(related_error)
index 914a906..3e2381f 100755 (executable)
@@ -79,6 +79,11 @@ source ${ANSIBLE_INSTALL_ROOT}/ansible/hacking/env-setup
 ANSIBLE=$(which ansible-playbook)
 set -x -o nounset
 
+logs_on_exit() {
+    $SCRIPT_HOME/collect-test-info.sh
+}
+trap logs_on_exit EXIT
+
 # Change working directory
 cd $BIFROST_HOME/playbooks
 
@@ -129,6 +134,4 @@ if [ $EXITCODE != 0 ]; then
     echo "****************************"
 fi
 
-$SCRIPT_HOME/collect-test-info.sh
-
 exit $EXITCODE
index 5e428d0..9099657 100644 (file)
@@ -25,7 +25,7 @@ node_list=(\
 'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' \
 'ericsson-pod1' 'ericsson-pod2' \
 'ericsson-virtual1' 'ericsson-virtual2'  'ericsson-virtual3' \
-'ericsson-virtual4' 'ericsson-virtual5' \
+'ericsson-virtual4' 'ericsson-virtual5' 'ericsson-virtual12' \
 'arm-pod1' 'arm-pod3' \
 'huawei-pod1' 'huawei-pod2' 'huawei-pod3' 'huawei-pod4' 'huawei-pod5' \
 'huawei-pod6' 'huawei-pod7' 'huawei-pod12' \
index 0edb73a..77cc6c6 100644 (file)
@@ -11,6 +11,7 @@ dbname = test_results_collection
 port = 8000
 # With debug_on set to true, error traces will be shown in HTTP responses
 debug = True
+authenticate = False
 
 [swagger]
 base_url = http://localhost:8000
index c3d7346..013ee66 100644 (file)
@@ -31,19 +31,19 @@ TODOs :
 
 import argparse
 
-import tornado.ioloop
 import motor
+import tornado.ioloop
 
-from opnfv_testapi.common.config import APIConfig
-from opnfv_testapi.tornado_swagger import swagger
+from opnfv_testapi.common import config
 from opnfv_testapi.router import url_mappings
+from opnfv_testapi.tornado_swagger import swagger
 
 # optionally get config file from command line
 parser = argparse.ArgumentParser()
 parser.add_argument("-c", "--config-file", dest='config_file',
                     help="Config file location")
 args = parser.parse_args()
-CONF = APIConfig().parse(args.config_file)
+CONF = config.APIConfig().parse(args.config_file)
 
 # connecting to MongoDB server, and choosing database
 client = motor.MotorClient(CONF.mongo_url)
@@ -57,6 +57,7 @@ def make_app():
         url_mappings.mappings,
         db=db,
         debug=CONF.api_debug_on,
+        auth=CONF.api_authenticate_on
     )
 
 
index ecab88a..84a1273 100644 (file)
@@ -7,9 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 # feng.xiaowei@zte.com.cn remove prepare_put_request            5-30-2016
 ##############################################################################
-
-
-from ConfigParser import SafeConfigParser, NoOptionError
+import ConfigParser
 
 
 class ParseError(Exception):
@@ -36,13 +34,14 @@ class APIConfig:
         self.mongo_dbname = None
         self.api_port = None
         self.api_debug_on = None
+        self.api_authenticate_on = None
         self._parser = None
         self.swagger_base_url = None
 
     def _get_parameter(self, section, param):
         try:
             return self._parser.get(section, param)
-        except NoOptionError:
+        except ConfigParser.NoOptionError:
             raise ParseError("[%s.%s] parameter not found" % (section, param))
 
     def _get_int_parameter(self, section, param):
@@ -68,7 +67,7 @@ class APIConfig:
         if config_location is None:
             config_location = obj._default_config_location
 
-        obj._parser = SafeConfigParser()
+        obj._parser = ConfigParser.SafeConfigParser()
         obj._parser.read(config_location)
         if not obj._parser:
             raise ParseError("%s not found" % config_location)
@@ -79,6 +78,9 @@ class APIConfig:
 
         obj.api_port = obj._get_int_parameter("api", "port")
         obj.api_debug_on = obj._get_bool_parameter("api", "debug")
+        obj.api_authenticate_on = obj._get_bool_parameter("api",
+                                                          "authenticate")
+
         obj.swagger_base_url = obj._get_parameter("swagger", "base_url")
 
         return obj
@@ -92,4 +94,5 @@ class APIConfig:
                                              self.mongo_dbname,
                                              self.api_port,
                                              self.api_debug_on,
+                                             self.api_authenticate_on,
                                              self.swagger_base_url)
index 4d39a14..71bd952 100644 (file)
@@ -10,6 +10,7 @@
 
 DEFAULT_REPRESENTATION = "application/json"
 HTTP_BAD_REQUEST = 400
+HTTP_UNAUTHORIZED = 401
 HTTP_FORBIDDEN = 403
 HTTP_NOT_FOUND = 404
 HTTP_OK = 200
index a2628e2..8255b52 100644 (file)
 # feng.xiaowei@zte.com.cn remove DashboardHandler            5-30-2016
 ##############################################################################
 
-import json
 from datetime import datetime
+import functools
+import json
 
 from tornado import gen
-from tornado.web import RequestHandler, asynchronous, HTTPError
+from tornado import web
 
-from models import CreateResponse
-from opnfv_testapi.common.constants import DEFAULT_REPRESENTATION, \
-    HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_FORBIDDEN
+import models
+from opnfv_testapi.common import constants
 from opnfv_testapi.tornado_swagger import swagger
 
 
-class GenericApiHandler(RequestHandler):
+class GenericApiHandler(web.RequestHandler):
     def __init__(self, application, request, **kwargs):
         super(GenericApiHandler, self).__init__(application, request, **kwargs)
         self.db = self.settings["db"]
@@ -44,49 +44,71 @@ class GenericApiHandler(RequestHandler):
         self.db_testcases = 'testcases'
         self.db_results = 'results'
         self.db_scenarios = 'scenarios'
+        self.auth = self.settings["auth"]
 
     def prepare(self):
         if self.request.method != "GET" and self.request.method != "DELETE":
             if self.request.headers.get("Content-Type") is not None:
                 if self.request.headers["Content-Type"].startswith(
-                        DEFAULT_REPRESENTATION):
+                        constants.DEFAULT_REPRESENTATION):
                     try:
                         self.json_args = json.loads(self.request.body)
                     except (ValueError, KeyError, TypeError) as error:
-                        raise HTTPError(HTTP_BAD_REQUEST,
-                                        "Bad Json format [{}]".
-                                        format(error))
+                        raise web.HTTPError(constants.HTTP_BAD_REQUEST,
+                                            "Bad Json format [{}]".
+                                            format(error))
 
     def finish_request(self, json_object=None):
         if json_object:
             self.write(json.dumps(json_object))
-        self.set_header("Content-Type", DEFAULT_REPRESENTATION)
+        self.set_header("Content-Type", constants.DEFAULT_REPRESENTATION)
         self.finish()
 
     def _create_response(self, resource):
         href = self.request.full_url() + '/' + str(resource)
-        return CreateResponse(href=href).format()
+        return models.CreateResponse(href=href).format()
 
     def format_data(self, data):
         cls_data = self.table_cls.from_dict(data)
         return cls_data.format_http()
 
-    @asynchronous
+    def authenticate(method):
+        @web.asynchronous
+        @gen.coroutine
+        @functools.wraps(method)
+        def wrapper(self, *args, **kwargs):
+            if self.auth:
+                try:
+                    token = self.request.headers['X-Auth-Token']
+                except KeyError:
+                    raise web.HTTPError(constants.HTTP_UNAUTHORIZED,
+                                        "No Authentication Header.")
+                query = {'access_token': token}
+                check = yield self._eval_db_find_one(query, 'tokens')
+                if not check:
+                    raise web.HTTPError(constants.HTTP_FORBIDDEN,
+                                        "Invalid Token.")
+            ret = yield gen.coroutine(method)(self, *args, **kwargs)
+            raise gen.Return(ret)
+        return wrapper
+
+    @web.asynchronous
     @gen.coroutine
+    @authenticate
     def _create(self, miss_checks, db_checks, **kwargs):
         """
         :param miss_checks: [miss1, miss2]
         :param db_checks: [(table, exist, query, error)]
         """
         if self.json_args is None:
-            raise HTTPError(HTTP_BAD_REQUEST, "no body")
+            raise web.HTTPError(constants.HTTP_BAD_REQUEST, "no body")
 
         data = self.table_cls.from_dict(self.json_args)
         for miss in miss_checks:
             miss_data = data.__getattribute__(miss)
             if miss_data is None or miss_data == '':
-                raise HTTPError(HTTP_BAD_REQUEST,
-                                '{} missing'.format(miss))
+                raise web.HTTPError(constants.HTTP_BAD_REQUEST,
+                                    '{} missing'.format(miss))
 
         for k, v in kwargs.iteritems():
             data.__setattr__(k, v)
@@ -95,7 +117,7 @@ class GenericApiHandler(RequestHandler):
             check = yield self._eval_db_find_one(query(data), table)
             if (exist and not check) or (not exist and check):
                 code, message = error(data)
-                raise HTTPError(code, message)
+                raise web.HTTPError(code, message)
 
         if self.table != 'results':
             data.creation_date = datetime.now()
@@ -107,7 +129,7 @@ class GenericApiHandler(RequestHandler):
             resource = _id
         self.finish_request(self._create_response(resource))
 
-    @asynchronous
+    @web.asynchronous
     @gen.coroutine
     def _list(self, query=None, res_op=None, *args, **kwargs):
         if query is None:
@@ -126,40 +148,42 @@ class GenericApiHandler(RequestHandler):
             res = res_op(data, *args)
         self.finish_request(res)
 
-    @asynchronous
+    @web.asynchronous
     @gen.coroutine
     def _get_one(self, query):
         data = yield self._eval_db_find_one(query)
         if data is None:
-            raise HTTPError(HTTP_NOT_FOUND,
-                            "[{}] not exist in table [{}]"
-                            .format(query, self.table))
+            raise web.HTTPError(constants.HTTP_NOT_FOUND,
+                                "[{}] not exist in table [{}]"
+                                .format(query, self.table))
         self.finish_request(self.format_data(data))
 
-    @asynchronous
+    @web.asynchronous
     @gen.coroutine
+    @authenticate
     def _delete(self, query):
         data = yield self._eval_db_find_one(query)
         if data is None:
-            raise HTTPError(HTTP_NOT_FOUND,
-                            "[{}] not exit in table [{}]"
-                            .format(query, self.table))
+            raise web.HTTPError(constants.HTTP_NOT_FOUND,
+                                "[{}] not exit in table [{}]"
+                                .format(query, self.table))
 
         yield self._eval_db(self.table, 'remove', query)
         self.finish_request()
 
-    @asynchronous
+    @web.asynchronous
     @gen.coroutine
+    @authenticate
     def _update(self, query, db_keys):
         if self.json_args is None:
-            raise HTTPError(HTTP_BAD_REQUEST, "No payload")
+            raise web.HTTPError(constants.HTTP_BAD_REQUEST, "No payload")
 
         # check old data exist
         from_data = yield self._eval_db_find_one(query)
         if from_data is None:
-            raise HTTPError(HTTP_NOT_FOUND,
-                            "{} could not be found in table [{}]"
-                            .format(query, self.table))
+            raise web.HTTPError(constants.HTTP_NOT_FOUND,
+                                "{} could not be found in table [{}]"
+                                .format(query, self.table))
 
         data = self.table_cls.from_dict(from_data)
         # check new data exist
@@ -167,9 +191,9 @@ class GenericApiHandler(RequestHandler):
         if not equal:
             to_data = yield self._eval_db_find_one(new_query)
             if to_data is not None:
-                raise HTTPError(HTTP_FORBIDDEN,
-                                "{} already exists in table [{}]"
-                                .format(new_query, self.table))
+                raise web.HTTPError(constants.HTTP_FORBIDDEN,
+                                    "{} already exists in table [{}]"
+                                    .format(new_query, self.table))
 
         # we merge the whole document """
         edit_request = self._update_requests(data)
@@ -186,7 +210,7 @@ class GenericApiHandler(RequestHandler):
             request = self._update_request(request, k, v,
                                            data.__getattribute__(k))
         if not request:
-            raise HTTPError(HTTP_FORBIDDEN, "Nothing to update")
+            raise web.HTTPError(constants.HTTP_FORBIDDEN, "Nothing to update")
 
         edit_request = data.format()
         edit_request.update(request)
index e1bd9d3..65c27f6 100644 (file)
@@ -6,17 +6,17 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+import handlers
+from opnfv_testapi.common import constants
 from opnfv_testapi.tornado_swagger import swagger
-from handlers import GenericApiHandler
-from pod_models import Pod
-from opnfv_testapi.common.constants import HTTP_FORBIDDEN
+import pod_models
 
 
-class GenericPodHandler(GenericApiHandler):
+class GenericPodHandler(handlers.GenericApiHandler):
     def __init__(self, application, request, **kwargs):
         super(GenericPodHandler, self).__init__(application, request, **kwargs)
         self.table = 'pods'
-        self.table_cls = Pod
+        self.table_cls = pod_models.Pod
 
 
 class PodCLHandler(GenericPodHandler):
@@ -46,7 +46,7 @@ class PodCLHandler(GenericPodHandler):
 
         def error(data):
             message = '{} already exists as a pod'.format(data.name)
-            return HTTP_FORBIDDEN, message
+            return constants.HTTP_FORBIDDEN, message
 
         miss_checks = ['name']
         db_checks = [(self.table, False, query, error)]
index 94c65b7..f352196 100644 (file)
@@ -6,19 +6,19 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+import handlers
+from opnfv_testapi.common import constants
 from opnfv_testapi.tornado_swagger import swagger
-from handlers import GenericApiHandler
-from opnfv_testapi.common.constants import HTTP_FORBIDDEN
-from project_models import Project
+import project_models
 
 
-class GenericProjectHandler(GenericApiHandler):
+class GenericProjectHandler(handlers.GenericApiHandler):
     def __init__(self, application, request, **kwargs):
         super(GenericProjectHandler, self).__init__(application,
                                                     request,
                                                     **kwargs)
         self.table = 'projects'
-        self.table_cls = Project
+        self.table_cls = project_models.Project
 
 
 class ProjectCLHandler(GenericProjectHandler):
@@ -48,7 +48,7 @@ class ProjectCLHandler(GenericProjectHandler):
 
         def error(data):
             message = '{} already exists as a project'.format(data.name)
-            return HTTP_FORBIDDEN, message
+            return constants.HTTP_FORBIDDEN, message
 
         miss_checks = ['name']
         db_checks = [(self.table, False, query, error)]
index 2a1ed56..d41ba48 100644 (file)
@@ -6,30 +6,32 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-from datetime import datetime, timedelta
+from datetime import datetime
+from datetime import timedelta
 
-from bson.objectid import ObjectId
-from tornado.web import HTTPError
+from bson import objectid
+from tornado import web
 
-from opnfv_testapi.common.constants import HTTP_BAD_REQUEST, HTTP_NOT_FOUND
-from opnfv_testapi.resources.handlers import GenericApiHandler
-from opnfv_testapi.resources.result_models import TestResult
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import handlers
+from opnfv_testapi.resources import result_models
 from opnfv_testapi.tornado_swagger import swagger
 
 
-class GenericResultHandler(GenericApiHandler):
+class GenericResultHandler(handlers.GenericApiHandler):
     def __init__(self, application, request, **kwargs):
         super(GenericResultHandler, self).__init__(application,
                                                    request,
                                                    **kwargs)
         self.table = self.db_results
-        self.table_cls = TestResult
+        self.table_cls = result_models.TestResult
 
     def get_int(self, key, value):
         try:
             value = int(value)
         except:
-            raise HTTPError(HTTP_BAD_REQUEST, '{} must be int'.format(key))
+            raise web.HTTPError(constants.HTTP_BAD_REQUEST,
+                                '{} must be int'.format(key))
         return value
 
     def set_query(self):
@@ -144,14 +146,14 @@ class ResultsCLHandler(GenericResultHandler):
 
         def pod_error(data):
             message = 'Could not find pod [{}]'.format(data.pod_name)
-            return HTTP_NOT_FOUND, message
+            return constants.HTTP_NOT_FOUND, message
 
         def project_query(data):
             return {'name': data.project_name}
 
         def project_error(data):
             message = 'Could not find project [{}]'.format(data.project_name)
-            return HTTP_NOT_FOUND, message
+            return constants.HTTP_NOT_FOUND, message
 
         def testcase_query(data):
             return {'project_name': data.project_name, 'name': data.case_name}
@@ -159,7 +161,7 @@ class ResultsCLHandler(GenericResultHandler):
         def testcase_error(data):
             message = 'Could not find testcase [{}] in project [{}]'\
                 .format(data.case_name, data.project_name)
-            return HTTP_NOT_FOUND, message
+            return constants.HTTP_NOT_FOUND, message
 
         miss_checks = ['pod_name', 'project_name', 'case_name']
         db_checks = [('pods', True, pod_query, pod_error),
@@ -178,7 +180,7 @@ class ResultsGURHandler(GenericResultHandler):
             @raise 404: test result not exist
         """
         query = dict()
-        query["_id"] = ObjectId(result_id)
+        query["_id"] = objectid.ObjectId(result_id)
         self._get_one(query)
 
     @swagger.operation(nickname="updateTestResultById")
@@ -193,6 +195,6 @@ class ResultsGURHandler(GenericResultHandler):
             @raise 404: result not exist
             @raise 403: nothing to update
         """
-        query = {'_id': ObjectId(result_id)}
+        query = {'_id': objectid.ObjectId(result_id)}
         db_keys = []
         self._update(query, db_keys)
index 7c8c333..083bf59 100644 (file)
@@ -1,17 +1,16 @@
-from opnfv_testapi.common.constants import HTTP_FORBIDDEN
-from opnfv_testapi.resources.handlers import GenericApiHandler
-from opnfv_testapi.resources.scenario_models import Scenario
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import handlers
 import opnfv_testapi.resources.scenario_models as models
 from opnfv_testapi.tornado_swagger import swagger
 
 
-class GenericScenarioHandler(GenericApiHandler):
+class GenericScenarioHandler(handlers.GenericApiHandler):
     def __init__(self, application, request, **kwargs):
         super(GenericScenarioHandler, self).__init__(application,
                                                      request,
                                                      **kwargs)
         self.table = self.db_scenarios
-        self.table_cls = Scenario
+        self.table_cls = models.Scenario
 
 
 class ScenariosCLHandler(GenericScenarioHandler):
@@ -81,7 +80,7 @@ class ScenariosCLHandler(GenericScenarioHandler):
 
         def error(data):
             message = '{} already exists as a scenario'.format(data.name)
-            return HTTP_FORBIDDEN, message
+            return constants.HTTP_FORBIDDEN, message
 
         miss_checks = ['name']
         db_checks = [(self.table, False, query, error)]
index 100a4fd..3debd69 100644 (file)
@@ -6,19 +6,19 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-from opnfv_testapi.common.constants import HTTP_FORBIDDEN
-from opnfv_testapi.resources.handlers import GenericApiHandler
-from opnfv_testapi.resources.testcase_models import Testcase
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import handlers
+from opnfv_testapi.resources import testcase_models
 from opnfv_testapi.tornado_swagger import swagger
 
 
-class GenericTestcaseHandler(GenericApiHandler):
+class GenericTestcaseHandler(handlers.GenericApiHandler):
     def __init__(self, application, request, **kwargs):
         super(GenericTestcaseHandler, self).__init__(application,
                                                      request,
                                                      **kwargs)
         self.table = self.db_testcases
-        self.table_cls = Testcase
+        self.table_cls = testcase_models.Testcase
 
 
 class TestcaseCLHandler(GenericTestcaseHandler):
@@ -58,12 +58,12 @@ class TestcaseCLHandler(GenericTestcaseHandler):
 
         def p_error(data):
             message = 'Could not find project [{}]'.format(data.project_name)
-            return HTTP_FORBIDDEN, message
+            return constants.HTTP_FORBIDDEN, message
 
         def tc_error(data):
             message = '{} already exists as a testcase in project {}'\
                 .format(data.name, data.project_name)
-            return HTTP_FORBIDDEN, message
+            return constants.HTTP_FORBIDDEN, message
 
         miss_checks = ['name']
         db_checks = [(self.db_projects, True, p_query, p_error),
index 0ae3c31..39cf006 100644 (file)
@@ -6,37 +6,34 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-from opnfv_testapi.resources.handlers import VersionHandler
-from opnfv_testapi.resources.testcase_handlers import TestcaseCLHandler, \
-    TestcaseGURHandler
-from opnfv_testapi.resources.pod_handlers import PodCLHandler, PodGURHandler
-from opnfv_testapi.resources.project_handlers import ProjectCLHandler, \
-    ProjectGURHandler
-from opnfv_testapi.resources.result_handlers import ResultsCLHandler, \
-    ResultsGURHandler
-from opnfv_testapi.resources.scenario_handlers import ScenariosCLHandler
-from opnfv_testapi.resources.scenario_handlers import ScenarioGURHandler
+from opnfv_testapi.resources import handlers
+from opnfv_testapi.resources import pod_handlers
+from opnfv_testapi.resources import project_handlers
+from opnfv_testapi.resources import result_handlers
+from opnfv_testapi.resources import scenario_handlers
+from opnfv_testapi.resources import testcase_handlers
 
 mappings = [
     # GET /versions => GET API version
-    (r"/versions", VersionHandler),
+    (r"/versions", handlers.VersionHandler),
 
     # few examples:
     # GET /api/v1/pods => Get all pods
     # GET /api/v1/pods/1 => Get details on POD 1
-    (r"/api/v1/pods", PodCLHandler),
-    (r"/api/v1/pods/([^/]+)", PodGURHandler),
+    (r"/api/v1/pods", pod_handlers.PodCLHandler),
+    (r"/api/v1/pods/([^/]+)", pod_handlers.PodGURHandler),
 
     # few examples:
     # GET /projects
     # GET /projects/yardstick
-    (r"/api/v1/projects", ProjectCLHandler),
-    (r"/api/v1/projects/([^/]+)", ProjectGURHandler),
+    (r"/api/v1/projects", project_handlers.ProjectCLHandler),
+    (r"/api/v1/projects/([^/]+)", project_handlers.ProjectGURHandler),
 
     # few examples
     # GET /projects/qtip/cases => Get cases for qtip
-    (r"/api/v1/projects/([^/]+)/cases", TestcaseCLHandler),
-    (r"/api/v1/projects/([^/]+)/cases/([^/]+)", TestcaseGURHandler),
+    (r"/api/v1/projects/([^/]+)/cases", testcase_handlers.TestcaseCLHandler),
+    (r"/api/v1/projects/([^/]+)/cases/([^/]+)",
+     testcase_handlers.TestcaseGURHandler),
 
     # new path to avoid a long depth
     # GET /results?project=functest&case=keystone.catalog&pod=1
@@ -44,10 +41,10 @@ mappings = [
     # POST /results =>
     # Push results with mandatory request payload parameters
     # (project, case, and pod)
-    (r"/api/v1/results", ResultsCLHandler),
-    (r"/api/v1/results/([^/]+)", ResultsGURHandler),
+    (r"/api/v1/results", result_handlers.ResultsCLHandler),
+    (r"/api/v1/results/([^/]+)", result_handlers.ResultsGURHandler),
 
     # scenarios
-    (r"/api/v1/scenarios", ScenariosCLHandler),
-    (r"/api/v1/scenarios/([^/]+)", ScenarioGURHandler),
+    (r"/api/v1/scenarios", scenario_handlers.ScenariosCLHandler),
+    (r"/api/v1/scenarios/([^/]+)", scenario_handlers.ScenarioGURHandler),
 ]
index 3c4fd01..ef74a08 100644 (file)
@@ -242,3 +242,4 @@ projects = MemDb('projects')
 testcases = MemDb('testcases')
 results = MemDb('results')
 scenarios = MemDb('scenarios')
+tokens = MemDb('tokens')
index fc780e4..b2be8d5 100644 (file)
@@ -8,20 +8,20 @@
 ##############################################################################
 import json
 
-from tornado.web import Application
-from tornado.testing import AsyncHTTPTestCase
+from tornado import testing
+from tornado import web
 
-from opnfv_testapi.router import url_mappings
-from opnfv_testapi.resources.models import CreateResponse
 import fake_pymongo
+from opnfv_testapi.resources import models
+from opnfv_testapi.router import url_mappings
 
 
-class TestBase(AsyncHTTPTestCase):
+class TestBase(testing.AsyncHTTPTestCase):
     headers = {'Content-Type': 'application/json; charset=UTF-8'}
 
     def setUp(self):
         self.basePath = ''
-        self.create_res = CreateResponse
+        self.create_res = models.CreateResponse
         self.get_res = None
         self.list_res = None
         self.update_res = None
@@ -31,10 +31,11 @@ class TestBase(AsyncHTTPTestCase):
         super(TestBase, self).setUp()
 
     def get_app(self):
-        return Application(
+        return web.Application(
             url_mappings.mappings,
             db=fake_pymongo,
             debug=True,
+            auth=False
         )
 
     def create_d(self, *args):
index 5f50ba8..7c43fca 100644 (file)
@@ -9,13 +9,13 @@
 import unittest
 
 from tornado import gen
-from tornado.testing import AsyncHTTPTestCase, gen_test
-from tornado.web import Application
+from tornado import testing
+from tornado import web
 
 import fake_pymongo
 
 
-class MyTest(AsyncHTTPTestCase):
+class MyTest(testing.AsyncHTTPTestCase):
     def setUp(self):
         super(MyTest, self).setUp()
         self.db = fake_pymongo
@@ -23,7 +23,7 @@ class MyTest(AsyncHTTPTestCase):
         self.io_loop.run_sync(self.fixture_setup)
 
     def get_app(self):
-        return Application()
+        return web.Application()
 
     @gen.coroutine
     def fixture_setup(self):
@@ -32,13 +32,13 @@ class MyTest(AsyncHTTPTestCase):
         yield self.db.pods.insert({'_id': '1', 'name': 'test1'})
         yield self.db.pods.insert({'name': 'test2'})
 
-    @gen_test
+    @testing.gen_test
     def test_find_one(self):
         user = yield self.db.pods.find_one({'name': 'test1'})
         self.assertEqual(user, self.test1)
         self.db.pods.remove()
 
-    @gen_test
+    @testing.gen_test
     def test_find(self):
         cursor = self.db.pods.find()
         names = []
@@ -47,7 +47,7 @@ class MyTest(AsyncHTTPTestCase):
             names.append(ob.get('name'))
         self.assertItemsEqual(names, ['test1', 'test2'])
 
-    @gen_test
+    @testing.gen_test
     def test_update(self):
         yield self.db.pods.update({'_id': '1'}, {'name': 'new_test1'})
         user = yield self.db.pods.find_one({'_id': '1'})
@@ -71,7 +71,7 @@ class MyTest(AsyncHTTPTestCase):
                             None,
                             check_keys=False)
 
-    @gen_test
+    @testing.gen_test
     def test_remove(self):
         yield self.db.pods.remove({'_id': '1'})
         user = yield self.db.pods.find_one({'_id': '1'})
@@ -104,7 +104,7 @@ class MyTest(AsyncHTTPTestCase):
     def _insert_assert(self, docs, error=None, **kwargs):
         self._db_assert('insert', error, docs, **kwargs)
 
-    @gen_test
+    @testing.gen_test
     def _db_assert(self, method, error, *args, **kwargs):
         name_error = None
         try:
index a1184d5..922bd46 100644 (file)
@@ -8,20 +8,19 @@
 ##############################################################################
 import unittest
 
-from test_base import TestBase
-from opnfv_testapi.resources.pod_models import PodCreateRequest, Pod, Pods
-from opnfv_testapi.common.constants import HTTP_OK, HTTP_BAD_REQUEST, \
-    HTTP_FORBIDDEN, HTTP_NOT_FOUND
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import pod_models
+import test_base as base
 
 
-class TestPodBase(TestBase):
+class TestPodBase(base.TestBase):
     def setUp(self):
         super(TestPodBase, self).setUp()
-        self.req_d = PodCreateRequest('zte-1', 'virtual',
-                                      'zte pod 1', 'ci-pod')
-        self.req_e = PodCreateRequest('zte-2', 'metal', 'zte pod 2')
-        self.get_res = Pod
-        self.list_res = Pods
+        self.req_d = pod_models.PodCreateRequest('zte-1', 'virtual',
+                                                 'zte pod 1', 'ci-pod')
+        self.req_e = pod_models.PodCreateRequest('zte-2', 'metal', 'zte pod 2')
+        self.get_res = pod_models.Pod
+        self.list_res = pod_models.Pods
         self.basePath = '/api/v1/pods'
 
     def assert_get_body(self, pod, req=None):
@@ -38,36 +37,36 @@ class TestPodBase(TestBase):
 class TestPodCreate(TestPodBase):
     def test_withoutBody(self):
         (code, body) = self.create()
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
 
     def test_emptyName(self):
-        req_empty = PodCreateRequest('')
+        req_empty = pod_models.PodCreateRequest('')
         (code, body) = self.create(req_empty)
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
         self.assertIn('name missing', body)
 
     def test_noneName(self):
-        req_none = PodCreateRequest(None)
+        req_none = pod_models.PodCreateRequest(None)
         (code, body) = self.create(req_none)
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
         self.assertIn('name missing', body)
 
     def test_success(self):
         code, body = self.create_d()
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
         self.assert_create_body(body)
 
     def test_alreadyExist(self):
         self.create_d()
         code, body = self.create_d()
-        self.assertEqual(code, HTTP_FORBIDDEN)
+        self.assertEqual(code, constants.HTTP_FORBIDDEN)
         self.assertIn('already exists', body)
 
 
 class TestPodGet(TestPodBase):
     def test_notExist(self):
         code, body = self.get('notExist')
-        self.assertEqual(code, HTTP_NOT_FOUND)
+        self.assertEqual(code, constants.HTTP_NOT_FOUND)
 
     def test_getOne(self):
         self.create_d()
index 327ddf7..afd4a66 100644 (file)
@@ -8,21 +8,21 @@
 ##############################################################################
 import unittest
 
-from test_base import TestBase
-from opnfv_testapi.resources.project_models import ProjectCreateRequest, \
-    Project, Projects, ProjectUpdateRequest
-from opnfv_testapi.common.constants import HTTP_OK, HTTP_BAD_REQUEST, \
-    HTTP_FORBIDDEN, HTTP_NOT_FOUND
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import project_models
+import test_base as base
 
 
-class TestProjectBase(TestBase):
+class TestProjectBase(base.TestBase):
     def setUp(self):
         super(TestProjectBase, self).setUp()
-        self.req_d = ProjectCreateRequest('vping', 'vping-ssh test')
-        self.req_e = ProjectCreateRequest('doctor', 'doctor test')
-        self.get_res = Project
-        self.list_res = Projects
-        self.update_res = Project
+        self.req_d = project_models.ProjectCreateRequest('vping',
+                                                         'vping-ssh test')
+        self.req_e = project_models.ProjectCreateRequest('doctor',
+                                                         'doctor test')
+        self.get_res = project_models.Project
+        self.list_res = project_models.Projects
+        self.update_res = project_models.Project
         self.basePath = '/api/v1/projects'
 
     def assert_body(self, project, req=None):
@@ -37,41 +37,41 @@ class TestProjectBase(TestBase):
 class TestProjectCreate(TestProjectBase):
     def test_withoutBody(self):
         (code, body) = self.create()
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
 
     def test_emptyName(self):
-        req_empty = ProjectCreateRequest('')
+        req_empty = project_models.ProjectCreateRequest('')
         (code, body) = self.create(req_empty)
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
         self.assertIn('name missing', body)
 
     def test_noneName(self):
-        req_none = ProjectCreateRequest(None)
+        req_none = project_models.ProjectCreateRequest(None)
         (code, body) = self.create(req_none)
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
         self.assertIn('name missing', body)
 
     def test_success(self):
         (code, body) = self.create_d()
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
         self.assert_create_body(body)
 
     def test_alreadyExist(self):
         self.create_d()
         (code, body) = self.create_d()
-        self.assertEqual(code, HTTP_FORBIDDEN)
+        self.assertEqual(code, constants.HTTP_FORBIDDEN)
         self.assertIn('already exists', body)
 
 
 class TestProjectGet(TestProjectBase):
     def test_notExist(self):
         code, body = self.get('notExist')
-        self.assertEqual(code, HTTP_NOT_FOUND)
+        self.assertEqual(code, constants.HTTP_NOT_FOUND)
 
     def test_getOne(self):
         self.create_d()
         code, body = self.get(self.req_d.name)
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
         self.assert_body(body)
 
     def test_list(self):
@@ -88,23 +88,23 @@ class TestProjectGet(TestProjectBase):
 class TestProjectUpdate(TestProjectBase):
     def test_withoutBody(self):
         code, _ = self.update(None, 'noBody')
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
 
     def test_notFound(self):
         code, _ = self.update(self.req_e, 'notFound')
-        self.assertEqual(code, HTTP_NOT_FOUND)
+        self.assertEqual(code, constants.HTTP_NOT_FOUND)
 
     def test_newNameExist(self):
         self.create_d()
         self.create_e()
         code, body = self.update(self.req_e, self.req_d.name)
-        self.assertEqual(code, HTTP_FORBIDDEN)
+        self.assertEqual(code, constants.HTTP_FORBIDDEN)
         self.assertIn("already exists", body)
 
     def test_noUpdate(self):
         self.create_d()
         code, body = self.update(self.req_d, self.req_d.name)
-        self.assertEqual(code, HTTP_FORBIDDEN)
+        self.assertEqual(code, constants.HTTP_FORBIDDEN)
         self.assertIn("Nothing to update", body)
 
     def test_success(self):
@@ -112,9 +112,9 @@ class TestProjectUpdate(TestProjectBase):
         code, body = self.get(self.req_d.name)
         _id = body._id
 
-        req = ProjectUpdateRequest('newName', 'new description')
+        req = project_models.ProjectUpdateRequest('newName', 'new description')
         code, body = self.update(req, self.req_d.name)
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
         self.assertEqual(_id, body._id)
         self.assert_body(body, req)
 
@@ -126,16 +126,16 @@ class TestProjectUpdate(TestProjectBase):
 class TestProjectDelete(TestProjectBase):
     def test_notFound(self):
         code, body = self.delete('notFound')
-        self.assertEqual(code, HTTP_NOT_FOUND)
+        self.assertEqual(code, constants.HTTP_NOT_FOUND)
 
     def test_success(self):
         self.create_d()
         code, body = self.delete(self.req_d.name)
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
         self.assertEqual(body, '')
 
         code, body = self.get(self.req_d.name)
-        self.assertEqual(code, HTTP_NOT_FOUND)
+        self.assertEqual(code, constants.HTTP_NOT_FOUND)
 
 if __name__ == '__main__':
     unittest.main()
index 10575a9..2c7268e 100644 (file)
@@ -7,17 +7,15 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 import copy
-import unittest
 from datetime import datetime, timedelta
+import unittest
 
-from opnfv_testapi.common.constants import HTTP_OK, HTTP_BAD_REQUEST, \
-    HTTP_NOT_FOUND
-from opnfv_testapi.resources.pod_models import PodCreateRequest
-from opnfv_testapi.resources.project_models import ProjectCreateRequest
-from opnfv_testapi.resources.result_models import ResultCreateRequest, \
-    TestResult, TestResults, ResultUpdateRequest, TI, TIHistory
-from opnfv_testapi.resources.testcase_models import TestcaseCreateRequest
-from test_base import TestBase
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import pod_models
+from opnfv_testapi.resources import project_models
+from opnfv_testapi.resources import result_models
+from opnfv_testapi.resources import testcase_models
+import test_base as base
 
 
 class Details(object):
@@ -49,7 +47,7 @@ class Details(object):
         return t
 
 
-class TestResultBase(TestBase):
+class TestResultBase(base.TestBase):
     def setUp(self):
         self.pod = 'zte-pod1'
         self.project = 'functest'
@@ -59,34 +57,41 @@ class TestResultBase(TestBase):
         self.build_tag = 'v3.0'
         self.scenario = 'odl-l2'
         self.criteria = 'passed'
-        self.trust_indicator = TI(0.7)
+        self.trust_indicator = result_models.TI(0.7)
         self.start_date = "2016-05-23 07:16:09.477097"
         self.stop_date = "2016-05-23 07:16:19.477097"
         self.update_date = "2016-05-24 07:16:19.477097"
         self.update_step = -0.05
         super(TestResultBase, self).setUp()
         self.details = Details(timestart='0', duration='9s', status='OK')
-        self.req_d = ResultCreateRequest(pod_name=self.pod,
-                                         project_name=self.project,
-                                         case_name=self.case,
-                                         installer=self.installer,
-                                         version=self.version,
-                                         start_date=self.start_date,
-                                         stop_date=self.stop_date,
-                                         details=self.details.format(),
-                                         build_tag=self.build_tag,
-                                         scenario=self.scenario,
-                                         criteria=self.criteria,
-                                         trust_indicator=self.trust_indicator)
-        self.get_res = TestResult
-        self.list_res = TestResults
-        self.update_res = TestResult
+        self.req_d = result_models.ResultCreateRequest(
+            pod_name=self.pod,
+            project_name=self.project,
+            case_name=self.case,
+            installer=self.installer,
+            version=self.version,
+            start_date=self.start_date,
+            stop_date=self.stop_date,
+            details=self.details.format(),
+            build_tag=self.build_tag,
+            scenario=self.scenario,
+            criteria=self.criteria,
+            trust_indicator=self.trust_indicator)
+        self.get_res = result_models.TestResult
+        self.list_res = result_models.TestResults
+        self.update_res = result_models.TestResult
         self.basePath = '/api/v1/results'
-        self.req_pod = PodCreateRequest(self.pod, 'metal', 'zte pod 1')
-        self.req_project = ProjectCreateRequest(self.project, 'vping test')
-        self.req_testcase = TestcaseCreateRequest(self.case,
-                                                  '/cases/vping',
-                                                  'vping-ssh test')
+        self.req_pod = pod_models.PodCreateRequest(
+            self.pod,
+            'metal',
+            'zte pod 1')
+        self.req_project = project_models.ProjectCreateRequest(
+            self.project,
+            'vping test')
+        self.req_testcase = testcase_models.TestcaseCreateRequest(
+            self.case,
+            '/cases/vping',
+            'vping-ssh test')
         self.create_help('/api/v1/pods', self.req_pod)
         self.create_help('/api/v1/projects', self.req_project)
         self.create_help('/api/v1/projects/%s/cases',
@@ -94,7 +99,7 @@ class TestResultBase(TestBase):
                          self.project)
 
     def assert_res(self, code, result, req=None):
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
         if req is None:
             req = self.req_d
         self.assertEqual(result.pod_name, req.pod_name)
@@ -129,78 +134,78 @@ class TestResultBase(TestBase):
 class TestResultCreate(TestResultBase):
     def test_nobody(self):
         (code, body) = self.create(None)
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
         self.assertIn('no body', body)
 
     def test_podNotProvided(self):
         req = self.req_d
         req.pod_name = None
         (code, body) = self.create(req)
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
         self.assertIn('pod_name missing', body)
 
     def test_projectNotProvided(self):
         req = self.req_d
         req.project_name = None
         (code, body) = self.create(req)
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
         self.assertIn('project_name missing', body)
 
     def test_testcaseNotProvided(self):
         req = self.req_d
         req.case_name = None
         (code, body) = self.create(req)
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
         self.assertIn('case_name missing', body)
 
     def test_noPod(self):
         req = self.req_d
         req.pod_name = 'notExistPod'
         (code, body) = self.create(req)
-        self.assertEqual(code, HTTP_NOT_FOUND)
+        self.assertEqual(code, constants.HTTP_NOT_FOUND)
         self.assertIn('Could not find pod', body)
 
     def test_noProject(self):
         req = self.req_d
         req.project_name = 'notExistProject'
         (code, body) = self.create(req)
-        self.assertEqual(code, HTTP_NOT_FOUND)
+        self.assertEqual(code, constants.HTTP_NOT_FOUND)
         self.assertIn('Could not find project', body)
 
     def test_noTestcase(self):
         req = self.req_d
         req.case_name = 'notExistTestcase'
         (code, body) = self.create(req)
-        self.assertEqual(code, HTTP_NOT_FOUND)
+        self.assertEqual(code, constants.HTTP_NOT_FOUND)
         self.assertIn('Could not find testcase', body)
 
     def test_success(self):
         (code, body) = self.create_d()
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
         self.assert_href(body)
 
     def test_key_with_doc(self):
         req = copy.deepcopy(self.req_d)
         req.details = {'1.name': 'dot_name'}
         (code, body) = self.create(req)
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
         self.assert_href(body)
 
     def test_no_ti(self):
-        req = ResultCreateRequest(pod_name=self.pod,
-                                  project_name=self.project,
-                                  case_name=self.case,
-                                  installer=self.installer,
-                                  version=self.version,
-                                  start_date=self.start_date,
-                                  stop_date=self.stop_date,
-                                  details=self.details.format(),
-                                  build_tag=self.build_tag,
-                                  scenario=self.scenario,
-                                  criteria=self.criteria)
+        req = result_models.ResultCreateRequest(pod_name=self.pod,
+                                                project_name=self.project,
+                                                case_name=self.case,
+                                                installer=self.installer,
+                                                version=self.version,
+                                                start_date=self.start_date,
+                                                stop_date=self.stop_date,
+                                                details=self.details.format(),
+                                                build_tag=self.build_tag,
+                                                scenario=self.scenario,
+                                                criteria=self.criteria)
         (code, res) = self.create(req)
         _id = res.href.split('/')[-1]
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
         code, body = self.get(_id)
         self.assert_res(code, body, req)
 
@@ -240,7 +245,7 @@ class TestResultGet(TestResultBase):
 
     def test_queryPeriodNotInt(self):
         code, body = self.query(self._set_query('period=a'))
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
         self.assertIn('period must be int', body)
 
     def test_queryPeriodFail(self):
@@ -253,7 +258,7 @@ class TestResultGet(TestResultBase):
 
     def test_queryLastNotInt(self):
         code, body = self.query(self._set_query('last=a'))
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
         self.assertIn('last must be int', body)
 
     def test_queryLast(self):
@@ -292,7 +297,7 @@ class TestResultGet(TestResultBase):
             req = self._create_changed_date(**kwargs)
         code, body = self.query(query)
         if not found:
-            self.assertEqual(code, HTTP_OK)
+            self.assertEqual(code, constants.HTTP_OK)
             self.assertEqual(0, len(body.results))
         else:
             self.assertEqual(1, len(body.results))
@@ -326,10 +331,11 @@ class TestResultUpdate(TestResultBase):
 
         new_ti = copy.deepcopy(self.trust_indicator)
         new_ti.current += self.update_step
-        new_ti.histories.append(TIHistory(self.update_date, self.update_step))
+        new_ti.histories.append(
+            result_models.TIHistory(self.update_date, self.update_step))
         new_data = copy.deepcopy(self.req_d)
         new_data.trust_indicator = new_ti
-        update = ResultUpdateRequest(trust_indicator=new_ti)
+        update = result_models.ResultUpdateRequest(trust_indicator=new_ti)
         code, body = self.update(update, _id)
         self.assertEqual(_id, body._id)
         self.assert_res(code, body, new_data)
index 3a0abf9..f604c57 100644 (file)
@@ -5,10 +5,10 @@ import os
 
 from opnfv_testapi.common import constants
 import opnfv_testapi.resources.scenario_models as models
-from test_testcase import TestBase
+import test_base as base
 
 
-class TestScenarioBase(TestBase):
+class TestScenarioBase(base.TestBase):
     def setUp(self):
         super(TestScenarioBase, self).setUp()
         self.get_res = models.Scenario
index cb76784..c0494db 100644 (file)
@@ -6,35 +6,33 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import unittest
 import copy
+import unittest
 
-from test_base import TestBase
-from opnfv_testapi.resources.testcase_models import TestcaseCreateRequest, \
-    Testcase, Testcases, TestcaseUpdateRequest
-from opnfv_testapi.resources.project_models import ProjectCreateRequest
-from opnfv_testapi.common.constants import HTTP_OK, HTTP_BAD_REQUEST, \
-    HTTP_FORBIDDEN, HTTP_NOT_FOUND
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import project_models
+from opnfv_testapi.resources import testcase_models
+import test_base as base
 
 
-class TestCaseBase(TestBase):
+class TestCaseBase(base.TestBase):
     def setUp(self):
         super(TestCaseBase, self).setUp()
-        self.req_d = TestcaseCreateRequest('vping_1',
-                                           '/cases/vping_1',
-                                           'vping-ssh test')
-        self.req_e = TestcaseCreateRequest('doctor_1',
-                                           '/cases/doctor_1',
-                                           'create doctor')
-        self.update_d = TestcaseUpdateRequest('vping_1',
-                                              'vping-ssh test',
-                                              'functest')
-        self.update_e = TestcaseUpdateRequest('doctor_1',
-                                              'create doctor',
-                                              'functest')
-        self.get_res = Testcase
-        self.list_res = Testcases
-        self.update_res = Testcase
+        self.req_d = testcase_models.TestcaseCreateRequest('vping_1',
+                                                           '/cases/vping_1',
+                                                           'vping-ssh test')
+        self.req_e = testcase_models.TestcaseCreateRequest('doctor_1',
+                                                           '/cases/doctor_1',
+                                                           'create doctor')
+        self.update_d = testcase_models.TestcaseUpdateRequest('vping_1',
+                                                              'vping-ssh test',
+                                                              'functest')
+        self.update_e = testcase_models.TestcaseUpdateRequest('doctor_1',
+                                                              'create doctor',
+                                                              'functest')
+        self.get_res = testcase_models.Testcase
+        self.list_res = testcase_models.Testcases
+        self.update_res = testcase_models.Testcase
         self.basePath = '/api/v1/projects/%s/cases'
         self.create_project()
 
@@ -57,7 +55,8 @@ class TestCaseBase(TestBase):
         self.assertIsNotNone(new.creation_date)
 
     def create_project(self):
-        req_p = ProjectCreateRequest('functest', 'vping-ssh test')
+        req_p = project_models.ProjectCreateRequest('functest',
+                                                    'vping-ssh test')
         self.create_help('/api/v1/projects', req_p)
         self.project = req_p.name
 
@@ -80,46 +79,46 @@ class TestCaseBase(TestBase):
 class TestCaseCreate(TestCaseBase):
     def test_noBody(self):
         (code, body) = self.create(None, 'vping')
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
 
     def test_noProject(self):
         code, body = self.create(self.req_d, 'noProject')
-        self.assertEqual(code, HTTP_FORBIDDEN)
+        self.assertEqual(code, constants.HTTP_FORBIDDEN)
         self.assertIn('Could not find project', body)
 
     def test_emptyName(self):
-        req_empty = TestcaseCreateRequest('')
+        req_empty = testcase_models.TestcaseCreateRequest('')
         (code, body) = self.create(req_empty, self.project)
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
         self.assertIn('name missing', body)
 
     def test_noneName(self):
-        req_none = TestcaseCreateRequest(None)
+        req_none = testcase_models.TestcaseCreateRequest(None)
         (code, body) = self.create(req_none, self.project)
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
         self.assertIn('name missing', body)
 
     def test_success(self):
         code, body = self.create_d()
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
         self.assert_create_body(body, None, self.project)
 
     def test_alreadyExist(self):
         self.create_d()
         code, body = self.create_d()
-        self.assertEqual(code, HTTP_FORBIDDEN)
+        self.assertEqual(code, constants.HTTP_FORBIDDEN)
         self.assertIn('already exists', body)
 
 
 class TestCaseGet(TestCaseBase):
     def test_notExist(self):
         code, body = self.get('notExist')
-        self.assertEqual(code, HTTP_NOT_FOUND)
+        self.assertEqual(code, constants.HTTP_NOT_FOUND)
 
     def test_getOne(self):
         self.create_d()
         code, body = self.get(self.req_d.name)
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
         self.assert_body(body)
 
     def test_list(self):
@@ -136,23 +135,23 @@ class TestCaseGet(TestCaseBase):
 class TestCaseUpdate(TestCaseBase):
     def test_noBody(self):
         code, _ = self.update(case='noBody')
-        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertEqual(code, constants.HTTP_BAD_REQUEST)
 
     def test_notFound(self):
         code, _ = self.update(self.update_e, 'notFound')
-        self.assertEqual(code, HTTP_NOT_FOUND)
+        self.assertEqual(code, constants.HTTP_NOT_FOUND)
 
     def test_newNameExist(self):
         self.create_d()
         self.create_e()
         code, body = self.update(self.update_e, self.req_d.name)
-        self.assertEqual(code, HTTP_FORBIDDEN)
+        self.assertEqual(code, constants.HTTP_FORBIDDEN)
         self.assertIn("already exists", body)
 
     def test_noUpdate(self):
         self.create_d()
         code, body = self.update(self.update_d, self.req_d.name)
-        self.assertEqual(code, HTTP_FORBIDDEN)
+        self.assertEqual(code, constants.HTTP_FORBIDDEN)
         self.assertIn("Nothing to update", body)
 
     def test_success(self):
@@ -161,7 +160,7 @@ class TestCaseUpdate(TestCaseBase):
         _id = body._id
 
         code, body = self.update(self.update_e, self.req_d.name)
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
         self.assertEqual(_id, body._id)
         self.assert_update_body(self.req_d, body, self.update_e)
 
@@ -174,22 +173,22 @@ class TestCaseUpdate(TestCaseBase):
         update = copy.deepcopy(self.update_d)
         update.description = {'2. change': 'dollar change'}
         code, body = self.update(update, self.req_d.name)
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
 
 
 class TestCaseDelete(TestCaseBase):
     def test_notFound(self):
         code, body = self.delete('notFound')
-        self.assertEqual(code, HTTP_NOT_FOUND)
+        self.assertEqual(code, constants.HTTP_NOT_FOUND)
 
     def test_success(self):
         self.create_d()
         code, body = self.delete(self.req_d.name)
-        self.assertEqual(code, HTTP_OK)
+        self.assertEqual(code, constants.HTTP_OK)
         self.assertEqual(body, '')
 
         code, body = self.get(self.req_d.name)
-        self.assertEqual(code, HTTP_NOT_FOUND)
+        self.assertEqual(code, constants.HTTP_NOT_FOUND)
 
 
 if __name__ == '__main__':
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/test_token.py b/utils/test/testapi/opnfv_testapi/tests/unit/test_token.py
new file mode 100644 (file)
index 0000000..19b9e3e
--- /dev/null
@@ -0,0 +1,118 @@
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+import unittest
+
+from tornado import web
+
+import fake_pymongo
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import project_models
+from opnfv_testapi.router import url_mappings
+import test_base as base
+
+
+class TestToken(base.TestBase):
+    def get_app(self):
+        return web.Application(
+            url_mappings.mappings,
+            db=fake_pymongo,
+            debug=True,
+            auth=True
+        )
+
+
+class TestTokenCreateProject(TestToken):
+    def setUp(self):
+        super(TestTokenCreateProject, self).setUp()
+        self.req_d = project_models.ProjectCreateRequest('vping')
+        fake_pymongo.tokens.insert({"access_token": "12345"})
+        self.basePath = '/api/v1/projects'
+
+    def test_projectCreateTokenInvalid(self):
+        self.headers['X-Auth-Token'] = '1234'
+        code, body = self.create_d()
+        self.assertEqual(code, constants.HTTP_FORBIDDEN)
+        self.assertIn('Invalid Token.', body)
+
+    def test_projectCreateTokenUnauthorized(self):
+        self.headers.pop('X-Auth-Token')
+        code, body = self.create_d()
+        self.assertEqual(code, constants.HTTP_UNAUTHORIZED)
+        self.assertIn('No Authentication Header.', body)
+
+    def test_projectCreateTokenSuccess(self):
+        self.headers['X-Auth-Token'] = '12345'
+        code, body = self.create_d()
+        self.assertEqual(code, constants.HTTP_OK)
+
+
+class TestTokenDeleteProject(TestToken):
+    def setUp(self):
+        super(TestTokenDeleteProject, self).setUp()
+        self.req_d = project_models.ProjectCreateRequest('vping')
+        fake_pymongo.tokens.insert({"access_token": "12345"})
+        self.basePath = '/api/v1/projects'
+
+    def test_projectDeleteTokenIvalid(self):
+        self.headers['X-Auth-Token'] = '12345'
+        self.create_d()
+        self.headers['X-Auth-Token'] = '1234'
+        code, body = self.delete(self.req_d.name)
+        self.assertEqual(code, constants.HTTP_FORBIDDEN)
+        self.assertIn('Invalid Token.', body)
+
+    def test_projectDeleteTokenUnauthorized(self):
+        self.headers['X-Auth-Token'] = '12345'
+        self.create_d()
+        self.headers.pop('X-Auth-Token')
+        code, body = self.delete(self.req_d.name)
+        self.assertEqual(code, constants.HTTP_UNAUTHORIZED)
+        self.assertIn('No Authentication Header.', body)
+
+    def test_projectDeleteTokenSuccess(self):
+        self.headers['X-Auth-Token'] = '12345'
+        self.create_d()
+        code, body = self.delete(self.req_d.name)
+        self.assertEqual(code, constants.HTTP_OK)
+
+
+class TestTokenUpdateProject(TestToken):
+    def setUp(self):
+        super(TestTokenUpdateProject, self).setUp()
+        self.req_d = project_models.ProjectCreateRequest('vping')
+        fake_pymongo.tokens.insert({"access_token": "12345"})
+        self.basePath = '/api/v1/projects'
+
+    def test_projectUpdateTokenIvalid(self):
+        self.headers['X-Auth-Token'] = '12345'
+        self.create_d()
+        code, body = self.get(self.req_d.name)
+        self.headers['X-Auth-Token'] = '1234'
+        req = project_models.ProjectUpdateRequest('newName', 'new description')
+        code, body = self.update(req, self.req_d.name)
+        self.assertEqual(code, constants.HTTP_FORBIDDEN)
+        self.assertIn('Invalid Token.', body)
+
+    def test_projectUpdateTokenUnauthorized(self):
+        self.headers['X-Auth-Token'] = '12345'
+        self.create_d()
+        code, body = self.get(self.req_d.name)
+        self.headers.pop('X-Auth-Token')
+        req = project_models.ProjectUpdateRequest('newName', 'new description')
+        code, body = self.update(req, self.req_d.name)
+        self.assertEqual(code, constants.HTTP_UNAUTHORIZED)
+        self.assertIn('No Authentication Header.', body)
+
+    def test_projectUpdateTokenSuccess(self):
+        self.headers['X-Auth-Token'] = '12345'
+        self.create_d()
+        code, body = self.get(self.req_d.name)
+        req = project_models.ProjectUpdateRequest('newName', 'new description')
+        code, body = self.update(req, self.req_d.name)
+        self.assertEqual(code, constants.HTTP_OK)
+
+if __name__ == '__main__':
+    unittest.main()
index b6fbf45..c8f3f50 100644 (file)
@@ -8,14 +8,14 @@
 ##############################################################################
 import unittest
 
-from test_base import TestBase
-from opnfv_testapi.resources.models import Versions
+from opnfv_testapi.resources import models
+import test_base as base
 
 
-class TestVersionBase(TestBase):
+class TestVersionBase(base.TestBase):
     def setUp(self):
         super(TestVersionBase, self).setUp()
-        self.list_res = Versions
+        self.list_res = models.Versions
         self.basePath = '/versions'