Merge "Fix "Illegal option -o pipefail" problem and correct the parser path."
authormei mei <meimei@huawei.com>
Fri, 27 May 2016 02:20:24 +0000 (02:20 +0000)
committerGerrit Code Review <gerrit@172.30.200.206>
Fri, 27 May 2016 02:20:24 +0000 (02:20 +0000)
30 files changed:
jjb/apex/apex-deploy-baremetal.sh [deleted file]
jjb/apex/apex-deploy-virtual.sh [deleted file]
jjb/apex/apex-deploy.sh [new file with mode: 0755]
jjb/apex/apex.yml
jjb/armband/armband-ci-jobs.yml
jjb/compass4nfv/compass-ci-jobs.yml
jjb/compass4nfv/compass-project-jobs.yml
jjb/functest/functest-ci-jobs.yml
jjb/opnfv/slave-params.yml
jjb/qtip/qtip-ci-jobs.yml
jjb/releng-macros.yaml
jjb/yardstick/yardstick-ci-jobs.yml
utils/push-test-logs.sh
utils/test/result_collection_api/resources/handlers.py
utils/test/result_collection_api/resources/pod_handlers.py [new file with mode: 0644]
utils/test/result_collection_api/resources/pod_models.py
utils/test/result_collection_api/resources/project_handlers.py [new file with mode: 0644]
utils/test/result_collection_api/resources/project_models.py
utils/test/result_collection_api/result_collection_api.py
utils/test/result_collection_api/tests/unit/test_base.py
utils/test/result_collection_api/tests/unit/test_pod.py
utils/test/result_collection_api/tests/unit/test_project.py
utils/test/result_collection_api/tornado_swagger_ui/__init__.py
utils/test/result_collection_api/tornado_swagger_ui/example/basic.py
utils/test/result_collection_api/tornado_swagger_ui/setup.py
utils/test/result_collection_api/tornado_swagger_ui/tornado_swagger/__init__.py
utils/test/result_collection_api/tornado_swagger_ui/tornado_swagger/handlers.py
utils/test/result_collection_api/tornado_swagger_ui/tornado_swagger/settings.py
utils/test/result_collection_api/tornado_swagger_ui/tornado_swagger/swagger.py
utils/test/result_collection_api/tornado_swagger_ui/tornado_swagger/views.py

diff --git a/jjb/apex/apex-deploy-baremetal.sh b/jjb/apex/apex-deploy-baremetal.sh
deleted file mode 100755 (executable)
index efb6561..0000000
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Starting the Apex baremetal deployment."
-echo "--------------------------------------------------------"
-echo
-
-if [[ ! "$ARTIFACT_NAME" == "latest" ]]; then
-    # if artifact name is passed the pull a
-    # specific artifact from artifacts.opnfv.org
-    RPM_INSTALL_PATH=$GS_URL/$ARTIFACT_NAME
-else
-    if [[ $BUILD_DIRECTORY == *apex-build* ]]; then
-      BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY
-      echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY"
-    fi
-    if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then
-        # if opnfv.properties exists then use the
-        # local build. Source the file so we get local OPNFV vars
-        source ${BUILD_DIRECTORY}/../opnfv.properties
-        RPM_INSTALL_PATH=${BUILD_DIRECTORY}/$(basename $OPNFV_RPM_URL)
-    else
-        # no opnfv.properties means use the latest from artifacts.opnfv.org
-        # get the latest.properties to get the link to the latest artifact
-        curl -s -o $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties
-        [[ -f opnfv.properties ]] || exit 1
-        # source the file so we get OPNFV vars
-        source opnfv.properties
-        RPM_INSTALL_PATH=$OPNFV_RPM_URL
-    fi
-fi
-
-if [ ! -e "$RPM_INSTALL_PATH" ]; then
-   RPM_INSTALL_PATH=http://${OPNFV_RPM_URL}
-fi
-
-RPM_LIST=$RPM_INSTALL_PATH
-for pkg in common undercloud; do
-    RPM_LIST+=" ${RPM_INSTALL_PATH/opnfv-apex/opnfv-apex-${pkg}}"
-done
-
-# update / install the new rpm
-if rpm -q opnfv-apex > /dev/null; then
-   if [ $(basename $OPNFV_RPM_URL) == $(rpm -q opnfv-apex).rpm ]; then
-     echo "RPM is already installed"
-   elif sudo yum update -y $RPM_LIST | grep "does not update installed package"; then
-       if ! sudo yum downgrade -y $RPM_LIST; then
-         sudo yum remove -y opnfv-undercloud opnfv-common
-         sudo yum downgrade -y $RPM_INSTALL_PATH
-       fi
-   fi
-else
-   sudo yum install -y $RPM_LIST;
-fi
-
-# cleanup environment before we start
-sudo opnfv-clean
-# initiate baremetal deployment
-if [ -e /etc/opnfv-apex/network_settings.yaml ]; then
-  if [ -n "$DEPLOY_SCENARIO" ]; then
-    echo "Deploy Scenario set to ${DEPLOY_SCENARIO}"
-    if [ -e /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml ]; then
-      sudo opnfv-deploy -i  /root/inventory/pod_settings.yaml \
-      -d /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml \
-      -n /root/network/network_settings.yaml --debug
-    else
-      echo "File does not exist /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml"
-      exit 1
-    fi
-  else
-    echo "Deploy scenario not set!"
-    exit 1
-  fi
-else
-  echo "File /etc/opnfv-apex/network_settings.yaml does not exist!"
-  exit 1
-fi
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-deploy-virtual.sh b/jjb/apex/apex-deploy-virtual.sh
deleted file mode 100755 (executable)
index 4d9b030..0000000
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Starting the Apex virtual deployment."
-echo "--------------------------------------------------------"
-echo
-
-if [[ $BUILD_DIRECTORY == *verify-master* ]]; then
-    cd $WORKSPACE/../${BUILD_DIRECTORY/build_output/}
-    WORKSPACE=$(pwd)
-    echo "WORKSPACE modified to $WORKSPACE"
-    cd $WORKSPACE/ci
-elif [[ ! "$ARTIFACT_NAME" == "latest" ]]; then
-    # if artifact name is passed the pull a
-    # specific artifact from artifacts.opnfv.org
-    RPM_INSTALL_PATH=$GS_URL
-    RPM_LIST=$RPM_INSTALL_PATH/$ARTIFACT_NAME
-else
-    if [[ $BUILD_DIRECTORY == *verify* ]]; then
-      BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY
-      echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY"
-    elif [[ $BUILD_DIRECTORY == *apex-build* ]]; then
-      BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY
-      echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY"
-    fi
-
-    if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then
-        # if opnfv.properties exists then use the
-        # local build. Source the file so we get local OPNFV vars
-        source ${BUILD_DIRECTORY}/../opnfv.properties
-        RPM_INSTALL_PATH=${BUILD_DIRECTORY}/noarch
-        RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
-    else
-        if [[ $BUILD_DIRECTORY == *verify* ]]; then
-          echo "BUILD_DIRECTORY is from a verify job, so will not use latest from URL"
-          echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY"
-          exit 1
-        elif [[ $BUILD_DIRECTORY == *apex-build* ]]; then
-          echo "BUILD_DIRECTORY is from a daily job, so will not use latest from URL"
-          echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY"
-          exit 1
-        fi
-        # no opnfv.properties means use the latest from artifacts.opnfv.org
-        # get the latest.properties to get the link to the latest artifact
-        curl -s -o $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties
-        [[ -f opnfv.properties ]] || exit 1
-        # source the file so we get OPNFV vars
-        source opnfv.properties
-        RPM_INSTALL_PATH=$(echo $OPNFV_RPM_URL | sed 's/'"$(basename $OPNFV_RPM_URL)"'//')
-        RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
-    fi
-fi
-
-if [ -z "$DEPLOY_SCENARIO" ]; then
-  echo "Deploy scenario not set!"
-  exit 1
-fi
-
-# use local build for verify
-if [[ $BUILD_DIRECTORY == *verify-master* ]]; then
-    if [ ! -e "${WORKSPACE}/build/lib" ]; then ln -s ${WORKSPACE}/lib ${WORKSPACE}/build/lib; fi
-    DEPLOY_CMD="CONFIG=${WORKSPACE}/build RESOURCES=${WORKSPACE}/build/images/ ./deploy.sh -c ${WORKSPACE}/build -r ${WORKSPACE}/build/images/"
-    DEPLOY_FILE="${WORKSPACE}/config/deploy/${DEPLOY_SCENARIO}.yaml"
-    NETWORK_FILE="${WORKSPACE}/config/network/network_settings.yaml"
-    # Make sure python34 is installed
-    if ! rpm -q python34 > /dev/null; then
-        sudo yum install -y epel-release
-        if ! sudo yum install -y python34; then
-            echo "Failed to install python34"
-            exit 1
-        fi
-    fi
-    if ! rpm -q python34-PyYAML > /dev/null; then
-        sudo yum install -y epel-release
-        if ! sudo yum install -y python34-PyYAML; then
-            echo "Failed to install python34-PyYAML"
-            exit 1
-        fi
-    fi
-    if ! rpm -q python34-setuptools > /dev/null; then
-        if ! sudo yum install -y python34-setuptools; then
-            echo "Failed to install python34-setuptools"
-            exit 1
-        fi
-    fi
-    if [ -z ${PYTHONPATH:-} ]; then
-        export PYTHONPATH=${WORKSPACE}/lib/python
-    else
-        export PYTHONPATH=$PYTHONPATH:${WORKSPACE}/lib/python
-    fi
-else
-    VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-[0-9]{8}')
-    for pkg in common undercloud opendaylight-sfc onos; do
-        RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}.noarch.rpm"
-    done
-
-    # update / install the new rpm
-    if rpm -q opnfv-apex > /dev/null; then
-       INSTALLED_RPMS=$(rpm -qa | grep apex)
-       for x in $INSTALLED_RPMS; do
-         INSTALLED_RPM_VER=$(echo $x | grep -Eo '[0-9]+\.[0-9]+-[0-9]{8}')
-         # Does each RPM's version match the version required for deployment
-         if [ "$INSTALLED_RPM_VER" == "$VERSION_EXTENSION" ]; then
-           echo "RPM $x is already installed"
-         else
-           echo "RPM $x does not match version $VERSION_EXTENSION"
-           echo "Will upgrade/downgrade RPMs..."
-           # Try to upgrade/downgrade RPMS
-           if sudo yum update -y $RPM_LIST | grep "does not update installed package"; then
-             if ! sudo yum downgrade -y $RPM_LIST; then
-               sudo yum remove -y opnfv-apex-undercloud opnfv-apex-common opnfv-apex-opendaylight-sfc opnfv-apex-onos
-               if ! sudo yum downgrade -y $RPM_LIST; then
-                 echo "Unable to downgrade RPMs: $RPM_LIST"
-                 exit 1
-               fi
-             fi
-           fi
-           break
-         fi
-       done
-    else
-       sudo yum install -y $RPM_LIST;
-    fi
-    DEPLOY_CMD=opnfv-deploy
-    DEPLOY_FILE="/etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml"
-    NETWORK_FILE="/etc/opnfv-apex/network_settings.yaml"
-    export RESOURCES="/var/opt/opnfv/images"
-    export CONFIG="/var/opt/opnfv"
-fi
-
-if [ "$OPNFV_CLEAN" == 'yes' ]; then
-    if [[ $BUILD_DIRECTORY == *verify-master* ]]; then
-        sudo CONFIG=${WORKSPACE}/build ./clean.sh
-    else
-        sudo opnfv-clean
-    fi
-fi
-# initiate virtual deployment
-echo "Deploy Scenario set to ${DEPLOY_SCENARIO}"
-if [ -e $DEPLOY_FILE ]; then
-  sudo $DEPLOY_CMD -v -d ${DEPLOY_FILE} -n $NETWORK_FILE --debug
-else
-  echo "File does not exist /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml"
-  exit 1
-fi
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh
new file mode 100755 (executable)
index 0000000..4706d45
--- /dev/null
@@ -0,0 +1,168 @@
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
+APEX_PKGS="common undercloud opendaylight-sfc onos"
+
+# log info to console
+echo "Starting the Apex virtual deployment."
+echo "--------------------------------------------------------"
+echo
+
+if ! rpm -q wget > /dev/null; then
+  sudo yum -y install wget
+fi
+
+if [[ $BUILD_DIRECTORY == *verify* ]]; then
+    # Build is from a verify, use local build artifacts (not RPMs)
+    cd $WORKSPACE/../${BUILD_DIRECTORY}
+    WORKSPACE=$(pwd)
+    echo "WORKSPACE modified to $WORKSPACE"
+    cd $WORKSPACE/ci
+elif [[ ! "$ARTIFACT_NAME" == "latest" ]]; then
+    # if artifact name is passed the pull a
+    # specific artifact from artifacts.opnfv.org
+    # artifact specified should be opnfv-apex-<version>.noarch.rpm
+    RPM_INSTALL_PATH=$GS_URL
+    RPM_LIST=$RPM_INSTALL_PATH/$ARTIFACT_NAME
+else
+    # Use latest RPMS
+    if [[ $BUILD_DIRECTORY == *apex-build* ]]; then
+      # Triggered from a daily so RPMS should be in local directory
+      BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY
+      echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY"
+
+      if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then
+        # if opnfv.properties exists then use the
+        # local build. Source the file so we get local OPNFV vars
+        source ${BUILD_DIRECTORY}/../opnfv.properties
+        RPM_INSTALL_PATH=${BUILD_DIRECTORY}/noarch
+        RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
+      else
+        echo "BUILD_DIRECTORY is from a daily job, so will not use latest from URL"
+        echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY"
+        exit 1
+      fi
+    else
+      # use the latest from artifacts.opnfv.org
+      # get the latest.properties to get the link to the latest artifact
+      if ! wget -O $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties; then
+        echo "ERROR: Unable to find latest.properties at ${GS_URL}...exiting"
+        exit 1
+      fi
+      # source the file so we get OPNFV vars
+      source opnfv.properties
+      RPM_INSTALL_PATH=$(echo $OPNFV_RPM_URL | sed 's/'"$(basename $OPNFV_RPM_URL)"'//')
+      RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
+    fi
+fi
+
+if [ -z "$DEPLOY_SCENARIO" ]; then
+  echo "Deploy scenario not set!"
+  exit 1
+fi
+
+# use local build for verify
+if [[ "$BUILD_DIRECTORY" == *verify* ]]; then
+    if [ ! -e "${WORKSPACE}/build/lib" ]; then
+      ln -s ${WORKSPACE}/lib ${WORKSPACE}/build/lib
+    fi
+    DEPLOY_SETTINGS_DIR="${WORKSPACE}/config/deploy"
+    NETWORK_SETTINGS_DIR="${WORKSPACE}/config/network"
+    DEPLOY_CMD="$(pwd)/deploy.sh"
+    RESOURCES="${WORKSPACE}/build/images/"
+    CONFIG="${WORKSPACE}/build"
+    LIB="${WORKSPACE}/lib"
+    # Make sure python34 deps are installed
+    for dep_pkg in epel-release python34 python34-PyYAML python34-setuptools; do
+      if ! rpm -q ${dep_pkg} > /dev/null; then
+        if ! sudo yum install -y ${dep_pkg}; then
+          echo "Failed to install ${dep_pkg}"
+          exit 1
+        fi
+      fi
+    done
+
+    if [ -z ${PYTHONPATH:-} ]; then
+        export PYTHONPATH=${WORKSPACE}/lib/python
+    else
+        export PYTHONPATH=$PYTHONPATH:${WORKSPACE}/lib/python
+    fi
+# use RPMs
+else
+    # find version of RPM
+    VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-[0-9]{8}')
+    # build RPM List which already includes base Apex RPM
+    for pkg in ${APEX_PKGS}; do
+        RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}.noarch.rpm"
+    done
+
+    # remove old / install new RPMs
+    if rpm -q opnfv-apex > /dev/null; then
+      INSTALLED_RPMS=$(rpm -qa | grep apex)
+      if [ -n "$INSTALLED_RPMS" ]; then
+        sudo yum remove -y ${INSTALLED_RPMS}
+      fi
+    fi
+
+    if ! sudo yum install -y $RPM_LIST; then
+      echo "Unable to install new RPMs: $RPM_LIST"
+      exit 1
+    fi
+
+    DEPLOY_CMD=opnfv-deploy
+    DEPLOY_SETTINGS_DIR="/etc/opnfv-apex/"
+    NETWORK_SETTINGS_DIR="/etc/opnfv-apex/"
+    RESOURCES="/var/opt/opnfv/images"
+    CONFIG="/var/opt/opnfv"
+    LIB="/var/opt/opnfv/lib"
+fi
+
+# set env vars to deploy cmd
+DEPLOY_CMD="CONFIG=${CONFIG} RESOURCES=${RESOURCES} LIB=${LIB} ${DEPLOY_CMD}"
+
+if [ "$OPNFV_CLEAN" == 'yes' ]; then
+  if [[ "$BUILD_DIRECTORY" == *verify* ]]; then
+    sudo CONFIG=${CONFIG} LIB=${LIB} ./clean.sh
+  else
+    sudo CONFIG=${CONFIG} LIB=${LIB} opnfv-clean
+  fi
+fi
+
+echo "Deploy Scenario set to ${DEPLOY_SCENARIO}"
+DEPLOY_FILE="${DEPLOY_SETTINGS_DIR}/${DEPLOY_SCENARIO}.yaml"
+
+if [ ! -e "$DEPLOY_FILE" ]; then
+  echo "ERROR: Required settings file missing: Deploy settings file ${DEPLOY_FILE}"
+fi
+
+if [[ "$JOB_NAME" == *virtual* ]]; then
+  # settings for virtual deployment
+  NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings.yaml"
+  DEPLOY_CMD="${DEPLOY_CMD} -v"
+else
+  # settings for bare metal deployment
+  NETWORK_FILE="/root/network/network_settings.yaml"
+  INVENTORY_FILE="/root/inventory/pod_settings.yaml"
+
+  if [ ! -e "$INVENTORY_FILE" ]; then
+    echo "ERROR: Required settings file missing: Inventory settings file ${INVENTORY_FILE}"
+  fi
+  # include inventory file for bare metal deployment
+  DEPLOY_CMD="${DEPLOY_CMD} -i ${INVENTORY_FILE}"
+fi
+
+# Check that network settings file exists
+if [ ! -e "$NETWORK_FILE" ]; then
+  echo "ERROR: Required settings file missing for Network Settings"
+  echo "Network settings file: ${NETWORK_FILE}"
+  exit 1
+fi
+
+# start deployment
+sudo ${DEPLOY_CMD} -d ${DEPLOY_FILE} -n ${NETWORK_FILE} --debug
+
+echo
+echo "--------------------------------------------------------"
+echo "Done!"
index ed06113..c121d63 100644 (file)
             max-total: 10
 
     builders:
-        - 'apex-deploy-virtual'
+        - 'apex-deploy'
         - 'apex-workspace-cleanup'
 
 - job-template:
 
 
     builders:
-        - 'apex-deploy-baremetal'
+        - 'apex-deploy'
         - 'apex-workspace-cleanup'
 
 # Brahmaputra Daily
             !include-raw: ./apex-gs-cleanup.sh
 
 - builder:
-    name: 'apex-deploy-virtual'
+    name: 'apex-deploy'
     builders:
         - shell:
-            !include-raw: ./apex-deploy-virtual.sh
+            !include-raw: ./apex-deploy.sh
 
-- builder:
-    name: 'apex-deploy-baremetal'
-    builders:
-        - shell:
-            !include-raw: ./apex-deploy-baremetal.sh
 
 #######################
 # trigger macros
index 031c972..7d39241 100644 (file)
@@ -39,7 +39,7 @@
 
         # NOHA scenarios
         - 'os-odl_l2-nofeature-noha':
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
 
     jobs:
         - 'armband-{scenario}-{pod}-daily-{stream}'
index 40f5445..7ce9268 100644 (file)
         stream: brahmaputra
         branch: 'stable/{stream}'
         gs-pathname: '/{stream}'
-
 #--------------------------------
 # POD, INSTALLER, AND BRANCH MAPPING
 #--------------------------------
-    pod:
-#--------------------------------
-#        brahmaputra
+#        CI PODs
 #--------------------------------
-        - huawei-pod1:
+    pod:
+        - baremetal:
+            slave-label: compass-baremetal
+            os-version: 'trusty'
+            <<: *master
+        - virtual:
+            slave-label: compass-virtual
+            os-version: 'trusty'
+            <<: *master
+        - baremetal:
+            slave-label: compass-baremetal
             os-version: 'trusty'
             <<: *brahmaputra
-        - huawei-virtual:
+        - virtual:
+            slave-label: compass-virtual
             os-version: 'trusty'
             <<: *brahmaputra
 #--------------------------------
 #        master
 #--------------------------------
         - huawei-pod2:
+            slave-label: '{pod}'
             os-version: 'centos7'
             <<: *master
-#        - intel-pod8:
-        - huawei-pod1:
-            os-version: 'trusty'
-            <<: *master
-        - huawei-virtual:
-            os-version: 'trusty'
-            <<: *master
 
     scenario:
         - 'os-nosdn-nofeature-ha':
@@ -95,7 +97,7 @@
         - string:
             name: DEPLOY_SCENARIO
             default: '{scenario}'
-        - '{pod}-defaults'
+        - '{slave-label}-defaults'
         - '{installer}-defaults'
 
     triggers:
         - compass-ci-parameter:
             installer: '{installer}'
             gs-pathname: '{gs-pathname}'
-        - '{pod}-defaults'
+        - '{slave-label}-defaults'
         - '{installer}-defaults'
 
     scm:
         - timed: ''
 
 - trigger:
-    name: 'compass-os-nosdn-nofeature-ha-huawei-pod1-master-trigger'
+    name: 'compass-os-nosdn-nofeature-ha-baremetal-master-trigger'
     triggers:
         - timed: '0 3 * * *'
 - trigger:
-    name: 'compass-os-odl_l2-nofeature-ha-huawei-pod1-master-trigger'
+    name: 'compass-os-odl_l2-nofeature-ha-baremetal-master-trigger'
     triggers:
         - timed: '0 21 * * *'
 - trigger:
-    name: 'compass-os-odl_l3-nofeature-ha-huawei-pod1-master-trigger'
+    name: 'compass-os-odl_l3-nofeature-ha-baremetal-master-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-onos-nofeature-ha-huawei-pod1-master-trigger'
+    name: 'compass-os-onos-nofeature-ha-baremetal-master-trigger'
     triggers:
         - timed: '0 15 * * *'
 - trigger:
-    name: 'compass-os-ocl-nofeature-ha-huawei-pod1-master-trigger'
+    name: 'compass-os-ocl-nofeature-ha-baremetal-master-trigger'
     triggers:
         - timed: '0 9 * * *'
 
 - trigger:
-    name: 'compass-os-nosdn-nofeature-ha-huawei-pod1-brahmaputra-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'compass-os-odl_l2-nofeature-ha-huawei-pod1-brahmaputra-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'compass-os-odl_l3-nofeature-ha-huawei-pod1-brahmaputra-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'compass-os-onos-nofeature-ha-huawei-pod1-brahmaputra-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'compass-os-ocl-nofeature-ha-huawei-pod1-brahmaputra-trigger'
-    triggers:
-        - timed: ''
-
-- trigger:
-    name: 'compass-os-nosdn-nofeature-ha-intel-pod8-master-trigger'
+    name: 'compass-os-nosdn-nofeature-ha-baremetal-brahmaputra-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-odl_l2-nofeature-ha-intel-pod8-master-trigger'
+    name: 'compass-os-odl_l2-nofeature-ha-baremetal-brahmaputra-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-odl_l3-nofeature-ha-intel-pod8-master-trigger'
+    name: 'compass-os-odl_l3-nofeature-ha-baremetal-brahmaputra-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-onos-nofeature-ha-intel-pod8-master-trigger'
+    name: 'compass-os-onos-nofeature-ha-baremetal-brahmaputra-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-ocl-nofeature-ha-intel-pod8-master-trigger'
+    name: 'compass-os-ocl-nofeature-ha-baremetal-brahmaputra-trigger'
     triggers:
         - timed: ''
 
 - trigger:
-    name: 'compass-os-nosdn-nofeature-ha-huawei-virtual-master-trigger'
+    name: 'compass-os-nosdn-nofeature-ha-virtual-master-trigger'
     triggers:
         - timed: '0 3 * * *'
 - trigger:
-    name: 'compass-os-odl_l2-nofeature-ha-huawei-virtual-master-trigger'
+    name: 'compass-os-odl_l2-nofeature-ha-virtual-master-trigger'
     triggers:
         - timed: '0 21 * * *'
 - trigger:
-    name: 'compass-os-odl_l3-nofeature-ha-huawei-virtual-master-trigger'
+    name: 'compass-os-odl_l3-nofeature-ha-virtual-master-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-onos-nofeature-ha-huawei-virtual-master-trigger'
+    name: 'compass-os-onos-nofeature-ha-virtual-master-trigger'
     triggers:
         - timed: '0 15 * * *'
 - trigger:
-    name: 'compass-os-ocl-nofeature-ha-huawei-virtual-master-trigger'
+    name: 'compass-os-ocl-nofeature-ha-virtual-master-trigger'
     triggers:
         - timed: '0 9 * * *'
 - trigger:
-    name: 'compass-os-nosdn-nofeature-ha-huawei-virtual-brahmaputra-trigger'
+    name: 'compass-os-nosdn-nofeature-ha-virtual-brahmaputra-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-odl_l2-nofeature-ha-huawei-virtual-brahmaputra-trigger'
+    name: 'compass-os-odl_l2-nofeature-ha-virtual-brahmaputra-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-odl_l3-nofeature-ha-huawei-virtual-brahmaputra-trigger'
+    name: 'compass-os-odl_l3-nofeature-ha-virtual-brahmaputra-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-onos-nofeature-ha-huawei-virtual-brahmaputra-trigger'
+    name: 'compass-os-onos-nofeature-ha-virtual-brahmaputra-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-ocl-nofeature-ha-huawei-virtual-brahmaputra-trigger'
+    name: 'compass-os-ocl-nofeature-ha-virtual-brahmaputra-trigger'
     triggers:
         - timed: ''
index 920923a..9db709d 100644 (file)
@@ -6,7 +6,7 @@
 
     project: 'compass4nfv'
 
-    pod: 'huawei-virtual'
+    pod: 'compass-virtual'
 
     stream:
         - master:
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
-                - 'compass-deploy-huawei-virtual-daily-.*?'
+                - 'compass-deploy-virtual-daily-.*?'
             block-level: 'NODE'
 
     scm:
index 2467361..d7e3729 100644 (file)
             slave-label: joid-virtual
             installer: joid
             <<: *brahmaputra
+# compass CI PODs
+        - baremetal:
+            slave-label: compass-baremetal
+            installer: compass
+            <<: *master
+        - virtual:
+            slave-label: compass-virtual
+            installer: compass
+            <<: *master
+        - baremetal:
+            slave-label: compass-baremetal
+            installer: compass
+            <<: *brahmaputra
+        - virtual:
+            slave-label: compass-virtual
+            installer: compass
+            <<: *brahmaputra
 #--------------------------------
 #    Installers not using labels
 #            CI PODs
             slave-label: '{pod}'
             installer: apex
             <<: *brahmaputra
-        - intel-pod8:
-            slave-label: '{pod}'
-            installer: compass
-            <<: *master
-        - huawei-pod1:
-            slave-label: '{pod}'
-            installer: compass
-            <<: *brahmaputra
 #--------------------------------
 #        None-CI PODs
 #--------------------------------
             slave-label: '{pod}'
             installer: joid
             <<: *master
-        - huawei-virtual:
-            slave-label: '{pod}'
-            installer: compass
-            <<: *master
         - huawei-pod2:
             slave-label: '{pod}'
             installer: compass
             <<: *master
-        - huawei-pod1:
-            slave-label: '{pod}'
-            installer: compass
-            <<: *master
         - nokia-pod1:
             slave-label: '{pod}'
             installer: apex
             slave-label: '{pod}'
             installer: fuel
             <<: *brahmaputra
-        - huawei-virtual:
-            slave-label: '{pod}'
-            installer: compass
-            <<: *brahmaputra
 #--------------------------------
 
     testsuite:
         - 'set-functest-env'
         - 'functest-suite'
 
-
 - builder:
     name: functest-suite
     builders:
index d89af41..02d74ab 100644 (file)
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'compass-baremetal-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'compass-baremetal'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'compass-virtual-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'compass-virtual'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
 - parameter:
     name: 'lf-pod1-defaults'
     parameters:
@@ -68,7 +88,6 @@
             name: SSH_KEY
             default: /root/.ssh/id_rsa
             description: 'SSH key to use for Apex'
-
 - parameter:
     name: 'lf-pod2-defaults'
     parameters:
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
 
-- parameter:
-    name: 'huawei-virtual-defaults'
-    parameters:
-        - label:
-            name: SLAVE_LABEL
-            default: 'huawei-deploy-vm'
-        - string:
-            name: INSTALLER_VERSION
-            default: stable
-            description: 'Version of the installer to deploy'
-        - string:
-            name: GIT_BASE
-            default: https://gerrit.opnfv.org/gerrit/$PROJECT
-            description: 'Git URL to use on this Jenkins Slave'
-
 - parameter:
     name: 'huawei-pod3-defaults'
     parameters:
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
 
-- parameter:
-    name: 'virtual-defaults'
-    parameters:
-        - label:
-            name: SLAVE_LABEL
-            default: '{installer}-deploy-virtual'
-        - string:
-            name: GIT_BASE
-            default: https://gerrit.opnfv.org/gerrit/$PROJECT
-            description: 'Git URL to use on these Jenkins Slaves'
-
 - parameter:
     name: 'ericsson-build-defaults'
     parameters:
index 39d856e..ee321eb 100644 (file)
@@ -29,7 +29,7 @@
             <<: *brahmaputra
         - orange-pod2:
             installer: joid
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *brahmaputra
 #--------------------------------
 #        master
@@ -37,7 +37,7 @@
         - juniper-pod1:
             installer: joid
             <<: *master
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
 #      - zte-pod1:   #would be confirmed with the ZTE lab by tomorrow
 #            installer: fuel
 #            <<: *master
index cdbb0ba..d8866bd 100644 (file)
     triggers:
         - timed: ''
 
-- trigger:
-    name: 'brahmaputra-trigger-daily-disabled'
-    triggers:
-        - timed: ''
-
 - trigger:
     name: 'brahmaputra-trigger-daily-enabled'
     triggers:
index 3d59e61..48c335a 100644 (file)
         - baremetal:
             slave-label: fuel-baremetal
             installer: fuel
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
         - virtual:
             slave-label: fuel-virtual
             installer: fuel
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
         - baremetal:
             slave-label: fuel-baremetal
             installer: fuel
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *brahmaputra
         - virtual:
             slave-label: fuel-virtual
             installer: fuel
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *brahmaputra
 
 # just in case if things go wrong
         - lf-pod2:
             slave-label: '{pod}'
             installer: fuel
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
 
 # joid CI PODs
         - baremetal:
             slave-label: joid-baremetal
             installer: joid
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
         - virtual:
             slave-label: joid-virtual
             installer: joid
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
         - baremetal:
             slave-label: joid-baremetal
             installer: joid
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *brahmaputra
         - virtual:
             slave-label: joid-virtual
             installer: joid
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *brahmaputra
+
+# compass CI PODs
+        - baremetal:
+            slave-label: compass-baremetal
+            installer: compass
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - virtual:
+            slave-label: compass-virtual
+            installer: compass
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - baremetal:
+            slave-label: compass-baremetal
+            installer: compass
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *brahmaputra
+        - virtual:
+            slave-label: compass-virtual
+            installer: compass
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *brahmaputra
 #--------------------------------
 #    Installers not using labels
         - lf-pod1:
             slave-label: '{pod}'
             installer: apex
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
         - lf-pod1:
             slave-label: '{pod}'
             installer: apex
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
-            <<: *brahmaputra
-        - intel-pod8:
-            slave-label: '{pod}'
-            installer: compass
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
-            <<: *master
-        - huawei-pod1:
-            slave-label: '{pod}'
-            installer: compass
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *brahmaputra
 #--------------------------------
 #        None-CI PODs
         - orange-pod2:
             slave-label: '{pod}'
             installer: joid
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *brahmaputra
         - zte-pod1:
             slave-label: '{pod}'
             installer: fuel
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
         - orange-pod2:
             slave-label: '{pod}'
             installer: joid
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
-            <<: *master
-        - huawei-pod1:
-            slave-label: '{pod}'
-            installer: compass
-            suite: daily
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
         - huawei-pod2:
             slave-label: '{pod}'
             installer: compass
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
-            <<: *master
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
-            <<: *master
-        - huawei-virtual:
-            slave-label: '{pod}'
-            installer: compass
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+            auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
         - huawei-pod3:
             slave-label: '{pod}'
             installer: compass
             auto-trigger-name: 'yardstick-daily-huawei-pod4-trigger'
             <<: *master
-        - huawei-virtual:
-            slave-label: '{pod}'
-            installer: compass
-            auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
-            <<: *brahmaputra
 #--------------------------------
     testsuite:
         - 'daily'
             description: 'Arguments to use in order to choose the backend DB'
 
 - parameter:
-    name: 'yardstick-params-huawei-pod1'
+    name: 'yardstick-params-compass-baremetal'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             description: 'Arguments to use in order to choose the backend DB'
 
 - parameter:
-    name: 'yardstick-params-huawei-virtual'
+    name: 'yardstick-params-compass-virtual'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
index 972853b..7456450 100644 (file)
@@ -17,11 +17,13 @@ res_build_date=${1:-$(date -u +"%Y-%m-%d_%H-%M-%S")}
 project=$PROJECT
 branch=${GIT_BRANCH##*/}
 testbed=$NODE_NAME
-dir_result="${HOME}/opnfv/$project/results/$branch"
+dir_result="${HOME}/opnfv/$project/results"
+# src: https://wiki.opnfv.org/display/INF/Hardware+Infrastructure
+# + intel-pod3 (vsperf)
 node_list=(\
-'opnfv-jump-1' 'opnfv-jump-2' 'ericsson-pod1' 'ericsson-pod2' \
-'intelpod2-jumphost' 'intel-pod3' 'intel-pod5' 'intel-pod6' \
-'intel-pod8' 'huawei-us-deploy-bare-1' 'orange-fr-pod2')
+'lf-pod1' 'lf-pod2' 'intel-pod2' 'intel-pod3' \
+'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' \
+'ericsson-pod2' 'huawei-pod1')
 
 if [[ ! " ${node_list[@]} " =~ " ${testbed} " ]]; then
     echo "This is not a CI POD. Aborting pushing the logs to artifacts."
index 4353343..3f9d842 100644 (file)
@@ -12,6 +12,8 @@
 # feng.xiaowei@zte.com.cn refactor testcase related handler  5-20-2016
 # feng.xiaowei@zte.com.cn refactor result related handler    5-23-2016
 # feng.xiaowei@zte.com.cn refactor dashboard related handler 5-24-2016
+# feng.xiaowei@zte.com.cn add methods to GenericApiHandler   5-26-2016
+# feng.xiaowei@zte.com.cn remove PodHandler                  5-26-2016
 ##############################################################################
 
 import json
@@ -23,8 +25,6 @@ from tornado import gen
 from models import CreateResponse
 from resources.result_models import TestResult
 from resources.testcase_models import Testcase
-from resources.project_models import Project
-from resources.pod_models import Pod
 from common.constants import DEFAULT_REPRESENTATION, HTTP_BAD_REQUEST, \
     HTTP_NOT_FOUND, HTTP_FORBIDDEN
 from common.config import prepare_put_request
@@ -38,15 +38,12 @@ def format_data(data, cls):
 
 
 class GenericApiHandler(RequestHandler):
-    """
-    The purpose of this class is to take benefit of inheritance and prepare
-    a set of common functions for
-    the handlers
-    """
-
-    def initialize(self):
-        """ Prepares the database for the entire class """
+    def __init__(self, application, request, **kwargs):
+        super(GenericApiHandler, self).__init__(application, request, **kwargs)
         self.db = self.settings["db"]
+        self.json_args = None
+        self.table = None
+        self.table_cls = None
 
     def prepare(self):
         if self.request.method != "GET" and self.request.method != "DELETE":
@@ -59,8 +56,6 @@ class GenericApiHandler(RequestHandler):
                         raise HTTPError(HTTP_BAD_REQUEST,
                                         "Bad Json format [{}]".
                                         format(error))
-                else:
-                    self.json_args = None
 
     def finish_request(self, json_object=None):
         if json_object:
@@ -72,232 +67,68 @@ class GenericApiHandler(RequestHandler):
         href = self.request.full_url() + '/' + resource
         return CreateResponse(href=href).format()
 
-
-class VersionHandler(GenericApiHandler):
-    """ Display a message for the API version """
-    def get(self):
-        self.finish_request([{'v1': 'basics'}])
-
-
-class PodHandler(GenericApiHandler):
-    """ Handle the requests about the POD Platforms
-    HTTP Methdods :
-        - GET : Get PODS
-        - POST : Create a pod
-        - DELETE : DELETE POD
-    """
-
-    def initialize(self):
-        """ Prepares the database for the entire class """
-        super(PodHandler, self).initialize()
-
     @asynchronous
     @gen.coroutine
-    def get(self, pod_name=None):
-        """
-        Get all pods or a single pod
-        :param pod_id:
-        """
-        query = dict()
-
-        if pod_name is not None:
-            query["name"] = pod_name
-            answer = yield self.db.pods.find_one(query)
-            if answer is None:
-                raise HTTPError(HTTP_NOT_FOUND,
-                                "{} Not Exist".format(pod_name))
-            else:
-                answer = format_data(answer, Pod)
-        else:
-            res = []
-            cursor = self.db.pods.find(query)
-            while (yield cursor.fetch_next):
-                res.append(format_data(cursor.next_object(), Pod))
-            answer = {'pods': res}
-
-        self.finish_request(answer)
-
-    @asynchronous
-    @gen.coroutine
-    def post(self):
-        """ Create a POD"""
-
+    def _create(self, error):
         if self.json_args is None:
-            raise HTTPError(HTTP_BAD_REQUEST)
+            raise HTTPError(HTTP_BAD_REQUEST, 'no body')
 
-        query = {"name": self.json_args.get("name")}
+        data = self.table_cls.from_dict(self.json_args)
+        name = data.name
+        if name is None or name == '':
+            raise HTTPError(HTTP_BAD_REQUEST,
+                            '{} name missing'.format(self.table[:-1]))
 
-        # check for existing name in db
-        the_pod = yield self.db.pods.find_one(query)
-        if the_pod is not None:
+        exist_data = yield self._eval_db(self.table, 'find_one',
+                                         {"name": name})
+        if exist_data is not None:
             raise HTTPError(HTTP_FORBIDDEN,
-                            "{} already exists as a pod".format(
-                                self.json_args.get("name")))
-
-        pod = Pod.from_dict(self.json_args)
-        pod.creation_date = datetime.now()
-
-        yield self.db.pods.insert(pod.format())
-        self.finish_request(self._create_response(pod.name))
+                            error.format(name, self.table[:-1]))
+        data.creation_date = datetime.now()
+        yield self._eval_db(self.table, 'insert', data.format())
+        self.finish_request(self._create_response(name))
 
     @asynchronous
     @gen.coroutine
-    def delete(self, pod_name):
-        """ Remove a POD
-
-        # check for an existing pod to be deleted
-        mongo_dict = yield self.db.pods.find_one(
-            {'name': pod_name})
-        pod = TestProject.pod(mongo_dict)
-        if pod is None:
-            raise HTTPError(HTTP_NOT_FOUND,
-                            "{} could not be found as a pod to be deleted"
-                            .format(pod_name))
-
-        # just delete it, or maybe save it elsewhere in a future
-        res = yield self.db.projects.remove(
-            {'name': pod_name})
-
-        self.finish_request(answer)
-        """
-        pass
-
-
-class ProjectHandler(GenericApiHandler):
-    """
-    TestProjectHandler Class
-    Handle the requests about the Test projects
-    HTTP Methdods :
-        - GET : Get all test projects and details about a specific one
-        - POST : Add a test project
-        - PUT : Edit test projects information (name and/or description)
-        - DELETE : Remove a test project
-    """
-
-    def initialize(self):
-        """ Prepares the database for the entire class """
-        super(ProjectHandler, self).initialize()
-
-    @asynchronous
-    @gen.coroutine
-    def get(self, project_name=None):
-        """
-        Get Project(s) info
-        :param project_name:
-        """
-
-        query = dict()
-
-        if project_name is not None:
-            query["name"] = project_name
-            answer = yield self.db.projects.find_one(query)
-            if answer is None:
-                raise HTTPError(HTTP_NOT_FOUND,
-                                "{} Not Exist".format(project_name))
-            else:
-                answer = format_data(answer, Project)
-        else:
-            res = []
-            cursor = self.db.projects.find(query)
-            while (yield cursor.fetch_next):
-                res.append(format_data(cursor.next_object(), Project))
-            answer = {'projects': res}
-
-        self.finish_request(answer)
+    def _list(self, query=None):
+        if query is None:
+            query = {}
+        res = []
+        cursor = self._eval_db(self.table, 'find', query)
+        while (yield cursor.fetch_next):
+            res.append(format_data(cursor.next_object(), self.table_cls))
+        self.finish_request({self.table: res})
 
     @asynchronous
     @gen.coroutine
-    def post(self):
-        """ Create a test project"""
-
-        if self.json_args is None:
-            raise HTTPError(HTTP_BAD_REQUEST)
-
-        query = {"name": self.json_args.get("name")}
-
-        # check for name in db
-        the_project = yield self.db.projects.find_one(query)
-        if the_project is not None:
-            raise HTTPError(HTTP_FORBIDDEN,
-                            "{} already exists as a project".format(
-                                self.json_args.get("name")))
-
-        project = Project.from_dict(self.json_args)
-        project.creation_date = datetime.now()
-
-        yield self.db.projects.insert(project.format())
-        self.finish_request(self._create_response(project.name))
+    def _get_one(self, query):
+        data = yield self._eval_db(self.table, 'find_one', query)
+        if data is None:
+            raise HTTPError(HTTP_NOT_FOUND,
+                            "[{}] not exist in table [{}]"
+                            .format(query, self.table))
+        self.finish_request(format_data(data, self.table_cls))
 
     @asynchronous
     @gen.coroutine
-    def put(self, project_name):
-        """ Updates the name and description of a test project"""
-
-        if self.json_args is None:
-            raise HTTPError(HTTP_BAD_REQUEST)
-
-        query = {'name': project_name}
-        from_project = yield self.db.projects.find_one(query)
-        if from_project is None:
+    def _delete(self, query):
+        data = yield self._eval_db(self.table, 'find_one', query)
+        if data is None:
             raise HTTPError(HTTP_NOT_FOUND,
-                            "{} could not be found".format(project_name))
-
-        project = Project.from_dict(from_project)
-        new_name = self.json_args.get("name")
-        new_description = self.json_args.get("description")
-
-        # check for payload name parameter in db
-        # avoid a request if the project name has not changed in the payload
-        if new_name != project.name:
-            to_project = yield self.db.projects.find_one(
-                {"name": new_name})
-            if to_project is not None:
-                raise HTTPError(HTTP_FORBIDDEN,
-                                "{} already exists as a project"
-                                .format(new_name))
+                            "[{}] not exit in table [{}]"
+                            .format(query, self.table))
 
-        # new dict for changes
-        request = dict()
-        request = prepare_put_request(request,
-                                      "name",
-                                      new_name,
-                                      project.name)
-        request = prepare_put_request(request,
-                                      "description",
-                                      new_description,
-                                      project.description)
-
-        """ raise exception if there isn't a change """
-        if not request:
-            raise HTTPError(HTTP_FORBIDDEN, "Nothing to update")
-
-        """ we merge the whole document """
-        edit_request = project.format()
-        edit_request.update(request)
-
-        """ Updating the DB """
-        yield self.db.projects.update({'name': project_name}, edit_request)
-        new_project = yield self.db.projects.find_one({"_id": project._id})
-
-        self.finish_request(format_data(new_project, Project))
-
-    @asynchronous
-    @gen.coroutine
-    def delete(self, project_name):
-        """ Remove a test project"""
-        query = {'name': project_name}
+        yield self._eval_db(self.table, 'remove', query)
+        self.finish_request()
 
-        # check for an existing project to be deleted
-        project = yield self.db.projects.find_one(query)
-        if project is None:
-            raise HTTPError(HTTP_NOT_FOUND,
-                            "{} could not be found as a project to be deleted"
-                            .format(project_name))
+    def _eval_db(self, table, method, param):
+        return eval('self.db.%s.%s(param)' % (table, method))
 
-        # just delete it, or maybe save it elsewhere in a future
-        yield self.db.projects.remove(query)
 
-        self.finish_request()
+class VersionHandler(GenericApiHandler):
+    """ Display a message for the API version """
+    def get(self):
+        self.finish_request([{'v1': 'basics'}])
 
 
 class TestcaseHandler(GenericApiHandler):
diff --git a/utils/test/result_collection_api/resources/pod_handlers.py b/utils/test/result_collection_api/resources/pod_handlers.py
new file mode 100644 (file)
index 0000000..590ae5b
--- /dev/null
@@ -0,0 +1,75 @@
+from tornado import gen
+from tornado.web import asynchronous
+
+from tornado_swagger_ui.tornado_swagger import swagger
+from handlers import GenericApiHandler
+from pod_models import Pod
+
+
+class GenericPodHandler(GenericApiHandler):
+    def __init__(self, application, request, **kwargs):
+        super(GenericPodHandler, self).__init__(application, request, **kwargs)
+        self.table = 'pods'
+        self.table_cls = Pod
+
+
+class PodCLHandler(GenericPodHandler):
+    @swagger.operation(nickname='list-all')
+    def get(self):
+        """
+            @description: list all pods
+            @return 200: list all pods, empty list is no pod exist
+            @rtype: L{Pods}
+        """
+        self._list()
+
+    @gen.coroutine
+    @swagger.operation(nickname='create')
+    def post(self):
+        """
+            @description: create a pod
+            @param body: pod to be created
+            @type body: L{PodCreateRequest}
+            @in body: body
+            @rtype: L{Pod}
+            @return 200: pod is created.
+            @raise 403: pod already exists
+            @raise 400: post without body
+        """
+        self._create('{} already exists as a {}')
+
+
+class PodGURHandler(GenericPodHandler):
+    @swagger.operation(nickname='get-one')
+    def get(self, pod_name):
+        """
+            @description: get a single pod by pod_name
+            @rtype: L{Pod}
+            @return 200: pod exist
+            @raise 404: pod not exist
+        """
+        query = dict()
+        query['name'] = pod_name
+        self._get_one(query)
+
+    @asynchronous
+    @gen.coroutine
+    def delete(self, pod_name):
+        """ Remove a POD
+
+        # check for an existing pod to be deleted
+        mongo_dict = yield self.db.pods.find_one(
+            {'name': pod_name})
+        pod = TestProject.pod(mongo_dict)
+        if pod is None:
+            raise HTTPError(HTTP_NOT_FOUND,
+                            "{} could not be found as a pod to be deleted"
+                            .format(pod_name))
+
+        # just delete it, or maybe save it elsewhere in a future
+        res = yield self.db.projects.remove(
+            {'name': pod_name})
+
+        self.finish_request(answer)
+        """
+        pass
index b02e3c2..cc98c90 100644 (file)
@@ -1,3 +1,5 @@
+from tornado_swagger_ui.tornado_swagger import swagger
+
 __author__ = '__serena__'
 
 # name: name of the POD e.g. zte-1
@@ -6,8 +8,9 @@ __author__ = '__serena__'
 # role: ci-pod or community-pod or single-node
 
 
+@swagger.model()
 class PodCreateRequest(object):
-    def __init__(self, name='', mode='', details='', role=""):
+    def __init__(self, name, mode='', details='', role=""):
         self.name = name
         self.mode = mode
         self.details = details
@@ -22,10 +25,11 @@ class PodCreateRequest(object):
         }
 
 
+@swagger.model()
 class Pod(PodCreateRequest):
-    """ describes a POD platform """
-    def __init__(self, name='', mode='', details='', role="",
-                 _id='', create_date=''):
+    def __init__(self,
+                 name='', mode='', details='',
+                 role="", _id='', create_date=''):
         super(Pod, self).__init__(name, mode, details, role)
         self._id = _id
         self.creation_date = create_date
@@ -55,7 +59,11 @@ class Pod(PodCreateRequest):
         return f
 
 
+@swagger.model()
 class Pods(object):
+    """
+        @ptype pods: C{list} of L{Pod}
+    """
     def __init__(self, pods=list()):
         self.pods = pods
 
diff --git a/utils/test/result_collection_api/resources/project_handlers.py b/utils/test/result_collection_api/resources/project_handlers.py
new file mode 100644 (file)
index 0000000..69ce3b5
--- /dev/null
@@ -0,0 +1,124 @@
+from tornado import gen
+from tornado.web import HTTPError, asynchronous
+
+from tornado_swagger_ui.tornado_swagger import swagger
+from handlers import GenericApiHandler, prepare_put_request, format_data
+from common.constants import HTTP_BAD_REQUEST, HTTP_FORBIDDEN, HTTP_NOT_FOUND
+from project_models import Project
+
+
+class GenericProjectHandler(GenericApiHandler):
+    def __init__(self, application, request, **kwargs):
+        super(GenericProjectHandler, self).__init__(application,
+                                                    request,
+                                                    **kwargs)
+        self.table = 'projects'
+        self.table_cls = Project
+
+
+class ProjectCLHandler(GenericProjectHandler):
+    @swagger.operation(nickname="list-all")
+    def get(self):
+        """
+            @description: list all projects
+            @return 200: return all projects, empty list is no project exist
+            @rtype: L{Projects}
+        """
+        self._list()
+
+    @swagger.operation(nickname="create")
+    def post(self):
+        """
+            @description: create a project
+            @param body: project to be created
+            @type body: L{ProjectCreateRequest}
+            @in body: body
+            @rtype: L{Project}
+            @return 200: project is created.
+            @raise 403: project already exists
+            @raise 400: post without body
+        """
+        self._create('{} already exists as a {}')
+
+
+class ProjectGURHandler(GenericProjectHandler):
+    @swagger.operation(nickname='get-one')
+    def get(self, project_name):
+        """
+            @description: get a single project by project_name
+            @rtype: L{Project}
+            @return 200: project exist
+            @raise 404: project not exist
+        """
+        self._get_one({'name': project_name})
+
+    @asynchronous
+    @gen.coroutine
+    @swagger.operation(nickname="update")
+    def put(self, project_name):
+        """
+            @description: update a single project by project_name
+            @param body: project to be updated
+            @type body: L{ProjectUpdateRequest}
+            @in body: body
+            @rtype: L{Project}
+            @return 200: update success
+            @raise 404: project not exist
+            @raise 403: new project name already exist or nothing to update
+        """
+        if self.json_args is None:
+            raise HTTPError(HTTP_BAD_REQUEST)
+
+        query = {'name': project_name}
+        from_project = yield self.db.projects.find_one(query)
+        if from_project is None:
+            raise HTTPError(HTTP_NOT_FOUND,
+                            "{} could not be found".format(project_name))
+
+        project = Project.from_dict(from_project)
+        new_name = self.json_args.get("name")
+        new_description = self.json_args.get("description")
+
+        # check for payload name parameter in db
+        # avoid a request if the project name has not changed in the payload
+        if new_name != project.name:
+            to_project = yield self.db.projects.find_one(
+                {"name": new_name})
+            if to_project is not None:
+                raise HTTPError(HTTP_FORBIDDEN,
+                                "{} already exists as a project"
+                                .format(new_name))
+
+        # new dict for changes
+        request = dict()
+        request = prepare_put_request(request,
+                                      "name",
+                                      new_name,
+                                      project.name)
+        request = prepare_put_request(request,
+                                      "description",
+                                      new_description,
+                                      project.description)
+
+        """ raise exception if there isn't a change """
+        if not request:
+            raise HTTPError(HTTP_FORBIDDEN, "Nothing to update")
+
+        """ we merge the whole document """
+        edit_request = project.format()
+        edit_request.update(request)
+
+        """ Updating the DB """
+        yield self.db.projects.update(query, edit_request)
+        new_project = yield self.db.projects.find_one({"_id": project._id})
+
+        self.finish_request(format_data(new_project, Project))
+
+    @swagger.operation(nickname='delete')
+    def delete(self, project_name):
+        """
+            @description: delete a project by project_name
+            @return 200: delete success
+            @raise 404: project not exist
+        """
+        self._delete({'name': project_name})
index 895fc3e..a8f8309 100644 (file)
@@ -1,7 +1,23 @@
+from tornado_swagger_ui.tornado_swagger import swagger
+
 __author__ = '__serena__'
 
 
+@swagger.model()
 class ProjectCreateRequest(object):
+    def __init__(self, name, description=''):
+        self.name = name
+        self.description = description
+
+    def format(self):
+        return {
+            "name": self.name,
+            "description": self.description,
+        }
+
+
+@swagger.model()
+class ProjectUpdateRequest(object):
     def __init__(self, name='', description=''):
         self.name = name
         self.description = description
@@ -13,14 +29,14 @@ class ProjectCreateRequest(object):
         }
 
 
+@swagger.model()
 class Project:
-    """ Describes a test project"""
-
-    def __init__(self):
-        self._id = None
-        self.name = None
-        self.description = None
-        self.creation_date = None
+    def __init__(self,
+                 name=None, _id=None, description=None, create_date=None):
+        self._id = _id
+        self.name = name
+        self.description = description
+        self.creation_date = create_date
 
     @staticmethod
     def from_dict(res_dict):
@@ -52,7 +68,11 @@ class Project:
         }
 
 
+@swagger.model()
 class Projects(object):
+    """
+        @ptype projects: C{list} of L{Project}
+    """
     def __init__(self, projects=list()):
         self.projects = projects
 
index 97aa58c..344e0d7 100644 (file)
@@ -29,14 +29,17 @@ TODOs :
 
 """
 
+import argparse
+
 import tornado.ioloop
 import motor
-import argparse
 
-from resources.handlers import VersionHandler, PodHandler, \
-    ProjectHandler, TestcaseHandler, TestResultsHandler, DashboardHandler
+from resources.handlers import VersionHandler, \
+    TestcaseHandler, TestResultsHandler, DashboardHandler
+from resources.pod_handlers import PodCLHandler, PodGURHandler
+from resources.project_handlers import ProjectCLHandler, ProjectGURHandler
 from common.config import APIConfig
-
+from tornado_swagger_ui.tornado_swagger import swagger
 
 # optionally get config file from command line
 parser = argparse.ArgumentParser()
@@ -51,29 +54,27 @@ db = client[CONF.mongo_dbname]
 
 
 def make_app():
-    return tornado.web.Application(
+    return swagger.Application(
         [
             # GET /version => GET API version
             (r"/versions", VersionHandler),
 
             # few examples:
-            # GET /pods => Get all pods
-            # GET /pods/1 => Get details on POD 1
-            (r"/api/v1/pods", PodHandler),
-            (r"/api/v1/pods/([^/]+)", PodHandler),
+            # GET /api/v1/pods => Get all pods
+            # GET /api/v1/pods/1 => Get details on POD 1
+            (r"/api/v1/pods", PodCLHandler),
+            (r"/api/v1/pods/([^/]+)", PodGURHandler),
 
             # few examples:
             # GET /projects
             # GET /projects/yardstick
-            (r"/api/v1/projects", ProjectHandler),
-            (r"/api/v1/projects/([^/]+)", ProjectHandler),
+            (r"/api/v1/projects", ProjectCLHandler),
+            (r"/api/v1/projects/([^/]+)", ProjectGURHandler),
 
             # few examples
             # GET /projects/qtip/cases => Get cases for qtip
-            #
             (r"/api/v1/projects/([^/]+)/cases", TestcaseHandler),
             (r"/api/v1/projects/([^/]+)/cases/([^/]+)", TestcaseHandler),
-            # (r"/test_cases/([^/]+)", TestCasesHandler),
 
             # new path to avoid a long depth
             # GET /results?project=functest&case=keystone.catalog&pod=1
index 99b1de2..44e42b7 100644 (file)
@@ -2,8 +2,10 @@ import json
 from tornado.web import Application
 from tornado.testing import AsyncHTTPTestCase
 
-from resources.handlers import VersionHandler, PodHandler, \
-    ProjectHandler, TestcaseHandler, TestResultsHandler, DashboardHandler
+from resources.pod_handlers import PodCLHandler, PodGURHandler
+from resources.project_handlers import ProjectCLHandler, ProjectGURHandler
+from resources.handlers import VersionHandler, \
+    TestcaseHandler, TestResultsHandler, DashboardHandler
 from resources.models import CreateResponse
 import fake_pymongo
 
@@ -26,10 +28,10 @@ class TestBase(AsyncHTTPTestCase):
         return Application(
             [
                 (r"/versions", VersionHandler),
-                (r"/api/v1/pods", PodHandler),
-                (r"/api/v1/pods/([^/]+)", PodHandler),
-                (r"/api/v1/projects", ProjectHandler),
-                (r"/api/v1/projects/([^/]+)", ProjectHandler),
+                (r"/api/v1/pods", PodCLHandler),
+                (r"/api/v1/pods/([^/]+)", PodGURHandler),
+                (r"/api/v1/projects", ProjectCLHandler),
+                (r"/api/v1/projects/([^/]+)", ProjectGURHandler),
                 (r"/api/v1/projects/([^/]+)/cases", TestcaseHandler),
                 (r"/api/v1/projects/([^/]+)/cases/([^/]+)", TestcaseHandler),
                 (r"/api/v1/results", TestResultsHandler),
index d7f4c3a..8a93027 100644 (file)
@@ -32,6 +32,18 @@ class TestPodCreate(TestPodBase):
         (code, body) = self.create()
         self.assertEqual(code, HTTP_BAD_REQUEST)
 
+    def test_emptyName(self):
+        req_empty = PodCreateRequest('')
+        (code, body) = self.create(req_empty)
+        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertIn('pod name missing', body)
+
+    def test_noneName(self):
+        req_none = PodCreateRequest(None)
+        (code, body) = self.create(req_none)
+        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertIn('pod name missing', body)
+
     def test_success(self):
         code, body = self.create_d()
         self.assertEqual(code, HTTP_OK)
index c380780..b07cb7a 100644 (file)
@@ -30,6 +30,18 @@ class TestProjectCreate(TestProjectBase):
         (code, body) = self.create()
         self.assertEqual(code, HTTP_BAD_REQUEST)
 
+    def test_emptyName(self):
+        req_empty = ProjectCreateRequest('')
+        (code, body) = self.create(req_empty)
+        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertIn('project name missing', body)
+
+    def test_noneName(self):
+        req_none = ProjectCreateRequest(None)
+        (code, body) = self.create(req_none)
+        self.assertEqual(code, HTTP_BAD_REQUEST)
+        self.assertIn('project name missing', body)
+
     def test_success(self):
         (code, body) = self.create_d()
         self.assertEqual(code, HTTP_OK)
index 1731bfd..93ff00b 100644 (file)
@@ -22,15 +22,21 @@ class PropertySubclass:
 class Item:
     """
         @description:
-            This is an example of a model class that has parameters in its constructor
-            and the fields in the swagger spec are derived from the parameters to __init__.
+            This is an example of a model class that has parameters in its
+            constructor and the fields in the swagger spec are derived from
+            the parameters to __init__.
         @notes:
-            In this case we would have property1, name as required parameters and property3 as optional parameter.
+            In this case we would have property1, name as required parameters
+            and property3 as optional parameter.
         @property property3: Item description
         @ptype property3: L{PropertySubclass}
         @ptype property4: C{list} of L{PropertySubclass}
     """
-    def __init__(self, property1, property2=None, property3=None, property4=None):
+    def __init__(self,
+                 property1,
+                 property2=None,
+                 property3=None,
+                 property4=None):
         self.property1 = property1
         self.property2 = property2
         self.property3 = property3
@@ -78,17 +84,17 @@ class GenericApiHandler(RequestHandler):
         pass
 
     def prepare(self):
-        if not (self.request.method == "GET" or self.request.method == "DELETE"):
-            if self.request.headers.get("Content-Type") is not None:
-                if self.request.headers["Content-Type"].startswith(DEFAULT_REPRESENTATION):
+        if self.request.method != "GET" and self.request.method != "DELETE":
+            self.json_args = None
+            content_type = self.request.headers.get("Content-Type")
+            if content_type is not None:
+                if content_type.startswith(DEFAULT_REPRESENTATION):
                     try:
                         self.json_args = json.loads(self.request.body)
                     except (ValueError, KeyError, TypeError) as error:
                         raise HTTPError(HTTP_BAD_REQUEST,
                                         "Bad Json format [{}]".
                                         format(error))
-                else:
-                    self.json_args = None
 
     def finish_request(self, json_object):
         self.write(json.dumps(json_object))
@@ -138,7 +144,8 @@ class ItemHandler(GenericApiHandler):
             @notes:
                 get a item,
 
-                This will be added to the Implementation Notes.It lets you put very long text in your api.
+                This will be added to the Implementation Notes.
+                It lets you put very long text in your api.
         """
         self.finish_request(items[arg].format_http())
 
@@ -148,8 +155,6 @@ class ItemHandler(GenericApiHandler):
             @description: delete a item
             @notes:
                 delete a item in items
-
-                This will be added to the Implementation Notes.It lets you put very long text in your api.
         """
         del items[arg]
         self.finish_request("success")
@@ -161,8 +166,7 @@ class ItemOptionParamHandler(GenericApiHandler):
         """
         @return 200: case is created
         """
-        print("ProjectHandler.post: %s -- %s -- %s" % (arg1, arg2, self.request.full_url()))
-        fs = open("/home/swagger/tornado-rest-swagger/%s/%s" % (arg1, arg2), "wb")
+        fs = open("/home/%s/%s" % (arg1, arg2), "wb")
         fs.write(self.request.body)
         self.write("success")
 
@@ -193,7 +197,7 @@ class ItemQueryHandler(GenericApiHandler):
                     res.append(value.format_http())
                 elif value.property2 == property2:
                     res.append(value.format_http())
-        elif items.has_key(property1):
+        elif property1 in items:
             if items.get(property1).property2 == property2:
                 res.append(items.get(property1).format_http())
 
index 9765591..57dc48a 100644 (file)
@@ -3,8 +3,8 @@ try:
 except ImportError:
     from distutils.core import setup
 
-with open('README') as file:
-    long_description = file.read()
+with open('README') as f:
+    long_description = f.read()
 
 setup(name='tornado-swagger',
       version='1.0',
@@ -12,20 +12,19 @@ setup(name='tornado-swagger',
       zip_safe=False,
       packages=['tornado_swagger'],
       package_data={
-        'tornado_swagger': [
-          'static/*.*',
-          'static/css/*.*',
-          'static/images/*.*',
-          'static/lib/*.*',
-          'static/lib/shred/*.*',
-        ]
+          'tornado_swagger': [
+              'static/*.*',
+              'static/css/*.*',
+              'static/images/*.*',
+              'static/lib/*.*',
+              'static/lib/shred/*.*'
+          ]
       },
       description='Extract swagger specs from your tornado project',
       author='Serena Feng',
       license='MIT',
       long_description=long_description,
       install_requires=[
-        'tornado>=3.1',
-        'epydoc>=0.3.1'
-      ],
-)
+          'tornado>=3.1',
+          'epydoc>=0.3.1'
+      ])
index 33c4b53..8bcb966 100644 (file)
@@ -2,8 +2,9 @@
 # -*- coding: utf-8 -*-
 from tornado.web import URLSpec, StaticFileHandler
 
-from settings import *
-from views import *
+from settings import default_settings, \
+    SWAGGER_API_DOCS, SWAGGER_API_LIST, SWAGGER_API_SPEC
+from views import SwaggerUIHandler, SwaggerResourcesHandler, SwaggerApiHandler
 
 __author__ = 'serena'
 
@@ -12,9 +13,27 @@ def swagger_handlers():
     prefix = default_settings.get('swagger_prefix', '/swagger')
     if prefix[-1] != '/':
         prefix += '/'
+
+    def _path(suffix):
+        return prefix + suffix
     return [
-        URLSpec(prefix + r'spec.html$',         SwaggerUIHandler,        default_settings, name=URL_SWAGGER_API_DOCS),
-        URLSpec(prefix + r'spec.json$',         SwaggerResourcesHandler, default_settings, name=URL_SWAGGER_API_LIST),
-        URLSpec(prefix + r'spec$',              SwaggerApiHandler,       default_settings, name=URL_SWAGGER_API_SPEC),
-        (prefix + r'(.*\.(css|png|gif|js))',    StaticFileHandler,       {'path': default_settings.get('static_path')}),
+        URLSpec(
+            _path(r'spec.html$'),
+            SwaggerUIHandler,
+            default_settings,
+            name=SWAGGER_API_DOCS),
+        URLSpec(
+            _path(r'spec.json$'),
+            SwaggerResourcesHandler,
+            default_settings,
+            name=SWAGGER_API_LIST),
+        URLSpec(
+            _path(r'spec$'),
+            SwaggerApiHandler,
+            default_settings,
+            name=SWAGGER_API_SPEC),
+        (
+            _path(r'(.*\.(css|png|gif|js))'),
+            StaticFileHandler,
+            {'path': default_settings.get('static_path')}),
     ]
index bd70c17..8f43c4a 100644 (file)
@@ -6,11 +6,12 @@ __author__ = 'serena'
 
 SWAGGER_VERSION = '1.2'
 
-URL_SWAGGER_API_DOCS = 'swagger-api-docs'
-URL_SWAGGER_API_LIST = 'swagger-api-list'
-URL_SWAGGER_API_SPEC = 'swagger-api-spec'
+SWAGGER_API_DOCS = 'swagger-api-docs'
+SWAGGER_API_LIST = 'swagger-api-list'
+SWAGGER_API_SPEC = 'swagger-api-spec'
 
-STATIC_PATH = os.path.join(os.path.dirname(os.path.normpath(__file__)), 'static')
+STATIC_PATH = os.path.join(os.path.dirname(os.path.normpath(__file__)),
+                           'static')
 
 default_settings = {
     'base_url': '/',
index 50b2cfe..b290e05 100644 (file)
@@ -128,7 +128,7 @@ class DocParser(object):
         if code is None:
             self.properties.setdefault(arg, {}).update({
                 'type': link
-           })
+            })
         elif code == 'list':
             self.properties.setdefault(arg, {}).update({
                 'type': 'array',
@@ -184,7 +184,7 @@ class model(DocParser):
         self.required = []
         self.cls = None
 
-    def __call__(self, *args, **kwargs):
+    def __call__(self, *args):
         if self.cls:
             return self.cls
 
@@ -206,17 +206,21 @@ class model(DocParser):
         argspec.args.remove("self")
         defaults = {}
         if argspec.defaults:
-            defaults = list(zip(argspec.args[-len(argspec.defaults):], argspec.defaults))
+            defaults = list(zip(argspec.args[-len(argspec.defaults):],
+                                argspec.defaults))
         required_args_count = len(argspec.args) - len(defaults)
         for arg in argspec.args[:required_args_count]:
             self.required.append(arg)
             self.properties.setdefault(arg, {'type': 'string'})
         for arg, default in defaults:
-            self.properties.setdefault(arg, {'type': 'string', "default": default})
+            self.properties.setdefault(arg, {
+                'type': 'string',
+                "default": default
+            })
 
 
 class operation(DocParser):
-    def __init__(self, nickname=None, **kwds):
+    def __init__(self, nickname='apis', **kwds):
         super(operation, self).__init__()
         self.nickname = nickname
         self.func = None
@@ -271,5 +275,11 @@ def docs(**opts):
 
 
 class Application(tornado.web.Application):
-    def __init__(self, handlers=None, default_host="", transforms=None, **settings):
-        super(Application, self).__init__(swagger_handlers() + handlers, default_host, transforms, **settings)
+    def __init__(self, handlers=None,
+                 default_host="",
+                 transforms=None,
+                 **settings):
+        super(Application, self).__init__(swagger_handlers() + handlers,
+                                          default_host,
+                                          transforms,
+                                          **settings)
index 1882f00..7624023 100644 (file)
@@ -5,13 +5,17 @@ import json
 import inspect
 import tornado.web
 import tornado.template
-from settings import SWAGGER_VERSION, URL_SWAGGER_API_LIST, URL_SWAGGER_API_SPEC, models
+from settings import SWAGGER_VERSION, \
+    SWAGGER_API_LIST, \
+    SWAGGER_API_SPEC
+from settings import models
 
 __author__ = 'serena'
 
 
 def json_dumps(obj, pretty=False):
-    return json.dumps(obj, sort_keys=True, indent=4, separators=(',', ': ')) if pretty else json.dumps(obj)
+    return json.dumps(obj, sort_keys=True, indent=4, separators=(',', ': ')) \
+        if pretty else json.dumps(obj)
 
 
 class SwaggerUIHandler(tornado.web.RequestHandler):
@@ -22,7 +26,9 @@ class SwaggerUIHandler(tornado.web.RequestHandler):
         return self.static_path
 
     def get(self):
-        discovery_url = urlparse.urljoin(self.request.full_url(), self.reverse_url(URL_SWAGGER_API_LIST))
+        discovery_url = \
+            urlparse.urljoin(self.request.full_url(),
+                             self.reverse_url(SWAGGER_API_LIST))
         self.render('index.html', discovery_url=discovery_url)
 
 
@@ -41,7 +47,7 @@ class SwaggerResourcesHandler(tornado.web.RequestHandler):
             'produces': ["application/json"],
             'description': 'Test Api Spec',
             'apis': [{
-                'path': self.reverse_url(URL_SWAGGER_API_SPEC),
+                'path': self.reverse_url(SWAGGER_API_SPEC),
                 'description': 'Test Api Spec'
             }]
         }
@@ -60,11 +66,14 @@ class SwaggerApiHandler(tornado.web.RequestHandler):
         if apis is None:
             raise tornado.web.HTTPError(404)
 
+        base_path = urlparse.urljoin(self.request.full_url(),
+                                     self.base_url)[:-1]
         specs = {
             'apiVersion': self.api_version,
             'swaggerVersion': SWAGGER_VERSION,
-            'basePath': urlparse.urljoin(self.request.full_url(), self.base_url)[:-1],
-            'apis': [self.__get_api_spec__(path, spec, operations) for path, spec, operations in apis],
+            'basePath': base_path,
+            'apis': [self.__get_api_spec__(path, spec, operations)
+                     for path, spec, operations in apis],
             'models': self.__get_models_spec(models)
         }
         self.finish(json_dumps(specs, self.get_arguments('pretty')))
@@ -103,14 +112,19 @@ class SwaggerApiHandler(tornado.web.RequestHandler):
 
     @staticmethod
     def find_api(host_handlers):
+        def get_path(url, args):
+            return url % tuple(['{%s}' % arg for arg in args])
+
+        def get_operations(cls):
+            return [member.rest_api
+                    for (_, member) in inspect.getmembers(cls)
+                    if hasattr(member, 'rest_api')]
+
         for host, handlers in host_handlers:
             for spec in handlers:
-                for (name, member) in inspect.getmembers(spec.handler_class):
-                    if inspect.ismethod(member) and hasattr(member, 'rest_api'):
-                        spec_path = spec._path % tuple(['{%s}' % arg for arg in member.rest_api.func_args])
-                        operations = [member.rest_api for (name, member) in inspect.getmembers(spec.handler_class)
-                                      if hasattr(member, 'rest_api')]
-                        yield spec_path, spec, operations
+                for (_, mbr) in inspect.getmembers(spec.handler_class):
+                    if inspect.ismethod(mbr) and hasattr(mbr, 'rest_api'):
+                        path = get_path(spec._path, mbr.rest_api.func_args)
+                        operations = get_operations(spec.handler_class)
+                        yield path, spec, operations
                         break
-
-