Merge "Modify YARDSTICK_DB_BACKEND value on zte-pod1."
authormei mei <meimei@huawei.com>
Thu, 19 May 2016 03:27:55 +0000 (03:27 +0000)
committerGerrit Code Review <gerrit@172.30.200.206>
Thu, 19 May 2016 03:27:55 +0000 (03:27 +0000)
24 files changed:
jjb/apex/apex.yml
jjb/armband/armband-ci-jobs.yml
jjb/armband/armband-deploy.sh
jjb/armband/armband-download-artifact.sh
jjb/fuel/fuel-ci-jobs.yml
jjb/fuel/fuel-deploy.sh
jjb/functest/functest-ci-jobs.yml
jjb/joid/joid-deploy.sh
jjb/netready/netready.yml [new file with mode: 0644]
jjb/opnfv/opnfv-docker.sh
jjb/opnfv/opnfv-docker.yml
utils/test/result_collection_api/README.md [new file with mode: 0644]
utils/test/result_collection_api/common/constants.py
utils/test/result_collection_api/resources/handlers.py
utils/test/result_collection_api/resources/models.py
utils/test/result_collection_api/resources/pod_models.py [new file with mode: 0644]
utils/test/result_collection_api/run_test.sh [new file with mode: 0755]
utils/test/result_collection_api/tests/__init__.py [new file with mode: 0644]
utils/test/result_collection_api/tests/unit/__init__.py [new file with mode: 0644]
utils/test/result_collection_api/tests/unit/fake_pymongo.py [new file with mode: 0644]
utils/test/result_collection_api/tests/unit/test_base.py [new file with mode: 0644]
utils/test/result_collection_api/tests/unit/test_fake_pymongo.py [new file with mode: 0644]
utils/test/result_collection_api/tests/unit/test_pod.py [new file with mode: 0644]
utils/test/result_collection_api/tests/unit/test_version.py [new file with mode: 0644]

index 282ae7b..ce673b2 100644 (file)
     properties:
         - build-blocker:
             use-build-blocker: true
+            block-level: 'NODE'
             blocking-jobs:
                 - 'apex-daily.*{stream1}'
                 - 'apex-deploy.*{stream1}'
                 - 'apex-build.*{stream1}'
                 - 'apex-runner.*{stream1}'
                 - 'apex-verify-{stream1}'
+        - throttle:
+            max-per-node: 1
 
     builders:
         - 'apex-build'
             block: true
             same-node: true
 #        - trigger-builds:
-#          - project: 'functest-apex-{slave}-suite-{stream1}'
+#          - project: 'functest-apex-{verify-slave}-suite-{stream1}'
 #            predefined-parameters: |
 #              DEPLOY_SCENARIO=os-nosdn-nofeature-ha
 #              FUNCTEST_SUITE_NAME=vping_userdata
 #            block: true
+#            same-node: true
         - trigger-builds:
           - project: 'apex-deploy-virtual-os-odl_l2-nofeature-ha-{stream1}'
             predefined-parameters: |
     # Required Variables:
     #     stream:    branch with - in place of / (eg. stable)
     #     branch:    branch (eg. stable)
-    node: '{slave}'
+    node: '{daily-slave}'
 
     disabled: false
 
     properties:
         - build-blocker:
             use-build-blocker: true
+            block-level: 'NODE'
             blocking-jobs:
                 - 'apex-deploy.*{stream}'
+        - throttle:
+            max-per-node: 1
 
     builders:
         - 'apex-build'
               BUILD_DIRECTORY=apex-build-{stream}/build
               OPNFV_CLEAN=yes
             git-revision: false
+            same-node: true
             block: true
         - 'apex-upload-artifact'
 
     properties:
         - build-blocker:
             use-build-blocker: true
+            block-level: 'NODE'
             blocking-jobs:
                 - 'apex-deploy.*{stream}'
+        - throttle:
+            max-per-node: 1
 
     builders:
         - 'apex-deploy-virtual'
     properties:
         - build-blocker:
             use-build-blocker: true
+            block-level: 'NODE'
             blocking-jobs:
                 - 'apex-verify.*{stream1}'
                 - 'apex-deploy.*{stream1}'
           - project: 'apex-build-{stream1}'
             git-revision: true
             current-parameters: true
+            same-node: true
             block: true
         - trigger-builds:
           - project: 'apex-deploy-virtual-nosdn-nofeature-ha-{stream1}'
index 5ae8a04..bbf7c40 100644 (file)
 - trigger:
     name: 'armband-os-odl_l2-nofeature-ha-arm-pod1-brahmaputra-trigger'
     triggers:
-        - timed: '0 4 * * *'
+        - timed: '0 20 * * *'
index 97430c1..8b0af31 100755 (executable)
@@ -39,10 +39,10 @@ POD_NAME=${NODE_NAME/*-}
 if [[ ! $LAB_NAME =~ (arm|enea) ]]; then
     echo "Unsupported/unidentified lab $LAB_NAME. Cannot continue!"
     exit 1
-else
-    echo "Using configuration for $LAB_NAME"
 fi
 
+echo "Using configuration for $LAB_NAME"
+
 # create TMPDIR if it doesn't exist
 mkdir -p $TMPDIR
 
@@ -52,8 +52,11 @@ if [[ $LAB_CONFIG_URL =~ ^git:// ]]; then
     LAB_CONFIG_URL=file://${WORKSPACE}/lab-config
 fi
 
+# releng wants us to use nothing else but opnfv.iso for now. We comply.
+ISO_FILE=$WORKSPACE/opnfv.iso
+
 # construct the command
-DEPLOY_COMMAND="$WORKSPACE/ci/deploy.sh -b ${LAB_CONFIG_URL} -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://$WORKSPACE/opnfv.iso -H -B $BRIDGE -S $TMPDIR"
+DEPLOY_COMMAND="$WORKSPACE/ci/deploy.sh -l $LAB_NAME -p $POD_NAME -b ${LAB_CONFIG_URL} -s $DEPLOY_SCENARIO -i file://${ISO_FILE} -H -B $BRIDGE -S $TMPDIR"
 
 # log info to console
 echo "Deployment parameters"
@@ -72,9 +75,6 @@ echo "Issuing command"
 echo "$DEPLOY_COMMAND"
 echo
 
-# FIXME
-export TARGET_LAB=${LAB_NAME}
-export TARGET_POD=${POD_NAME}
 $DEPLOY_COMMAND
 
 echo
index 15ad67d..18b55d7 100755 (executable)
@@ -13,35 +13,33 @@ set -o pipefail
 if [[ "$JOB_NAME" =~ "merge" ]]; then
     echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties"
     # get the properties file for the Armband Fuel ISO built for a merged change
-    curl -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
+    curl -f -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
 else
     # get the latest.properties file in order to get info regarding latest artifact
     echo "Downloading http://$GS_URL/latest.properties"
-    curl -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
+    curl -f -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
 fi
 
-# check if we got the file
-# FIXME: the file is created even if it didn't exist on the host
-#        We should check that the contents are sane
-[[ -f latest.properties ]] || exit 1
-
-# source the file so we get artifact metadata
+# source the file so we get artifact metadata, it will exit if it doesn't exist
 source latest.properties
 
 # echo the info about artifact that is used during the deployment
 OPNFV_ARTIFACT=${OPNFV_ARTIFACT_URL/*\/}
 echo "Using $OPNFV_ARTIFACT for deployment"
 
+# Releng doesn't want us to use anything but opnfv.iso for now. We comply.
+ISO_FILE=${WORKSPACE}/opnfv.iso
+
 # using ISOs for verify & merge jobs from local storage will be enabled later
 if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
     # check if we already have the ISO to avoid redownload
     ISOSTORE="/iso_mount/opnfv_ci/${GIT_BRANCH##*/}"
     if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then
         echo "ISO exists locally. Skipping the download and using the file from ISO store"
-        ln -s $ISOSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.iso
+        ln -s $ISOSTORE/$OPNFV_ARTIFACT ${ISO_FILE}
         echo "--------------------------------------------------------"
         echo
-        ls -al $WORKSPACE/opnfv.iso
+        ls -al ${ISO_FILE}
         echo
         echo "--------------------------------------------------------"
         echo "Done!"
@@ -49,18 +47,22 @@ if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
     fi
 fi
 
+# Use gsutils if available
+if $(which gsutil &>/dev/null); then
+    DOWNLOAD_URL="gs://$OPNFV_ARTIFACT_URL"
+    CMD="gsutil cp ${DOWNLOAD_URL} ${ISO_FILE}"
+else
+    # download image
+    # -f returns error if the file was not found or on server error
+    DOWNLOAD_URL="http://$OPNFV_ARTIFACT_URL"
+    CMD="curl -f -s -o ${ISO_FILE} ${DOWNLOAD_URL}"
+fi
+
 # log info to console
-echo "Downloading the $INSTALLER_TYPE artifact using URL http://$OPNFV_ARTIFACT_URL"
+echo "Downloading the $INSTALLER_TYPE artifact using URL $DOWNLOAD_URL"
 echo "This could take some time..."
 echo "--------------------------------------------------------"
-echo
-
-# download the file
-curl -s -o $WORKSPACE/opnfv.iso http://$OPNFV_ARTIFACT_URL
-
-# The file is always created, check that it is in fact an ISO image
-[[ $(file $WORKSPACE/opnfv.iso) =~ ISO ]]
-
-echo
+echo "$CMD"
+$CMD
 echo "--------------------------------------------------------"
 echo "Done!"
index 77b711e..acfceda 100644 (file)
@@ -43,6 +43,8 @@
             <<: *master
         - virtual:
             <<: *master
+        - zte-pod1:
+            <<: *master
 #--------------------------------
 #       scenarios
 #--------------------------------
     name: 'fuel-os-nosdn-kvm-noha-ericsson-pod2-brahmaputra-trigger'
     triggers:
         - timed: ''
+
+#-----------------------------------------------
+# ZTE POD1 Triggers running against master branch
+#-----------------------------------------------
+- trigger:
+    name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-master-trigger'
+    triggers:
+        - timed: '0 12 * * *'
+- trigger:
+    name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-onos-nofeature-ha-zte-pod1-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm-ha-zte-pod1-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-ovs-ha-zte-pod1-master-trigger'
+    triggers:
+        - timed: ''
 #-----------------------------------------------
 # Triggers for other PODs
 #-----------------------------------------------
index bab6151..ceccc54 100755 (executable)
@@ -43,8 +43,8 @@ if [[ "$NODE_NAME" =~ "virtual" ]]; then
     POD_NAME="virtual_kvm"
 fi
 
-# we currently support ericsson, intel, and lf labs
-if [[ ! "$LAB_NAME" =~ (ericsson|intel|lf) ]]; then
+# we currently support ericsson, intel, lf and zte labs
+if [[ ! "$LAB_NAME" =~ (ericsson|intel|lf|zte) ]]; then
     echo "Unsupported/unidentified lab $LAB_NAME. Cannot continue!"
     exit 1
 else
index 6cfcfdc..cd3f11e 100644 (file)
     builders:
         - 'functest-cleanup'
         - 'set-functest-env'
-        - 'functest-all'
+        - 'functest-daily'
         - 'functest-store-results'
 
 - builder:
     name: functest-suite-builder
+    builders:
+        - 'functest-cleanup'
+        - 'set-functest-env'
+        - 'functest-suite'
+
+
+- builder:
+    name: functest-suite
     builders:
         - shell: |
             #!/bin/bash
             container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
             docker exec $container_id $cmd
 
+
 - builder:
-    name: functest-all
+    name: functest-daily
     builders:
         - shell: |
             #!/bin/bash
index 907db4f..bcc4b64 100644 (file)
@@ -181,10 +181,17 @@ cat << EOF > $JOID_ADMIN_OPENRC
 export OS_USERNAME=admin
 export OS_PASSWORD=$OS_ADMIN_PASSWORD
 export OS_TENANT_NAME=admin
-export OS_AUTH_URL=http://$KEYSTONE:5000/v2.0
+export OS_AUTH_URL=http://$KEYSTONE:35537/v2.0
 export OS_REGION_NAME=Canonical
+export OS_ENDPOINT_TYPE='adminURL'
+export CINDER_ENDPOINT_TYPE='adminURL'
+export GLANCE_ENDPOINT_TYPE='adminURL'
+export KEYSTONE_ENDPOINT_TYPE='adminURL'
+export NEUTRON_ENDPOINT_TYPE='adminURL'
+export NOVA_ENDPOINT_TYPE='adminURL'
 export SDN_CONTROLLER=$SDN_CONTROLLER_IP
 export SDN_PASSWORD=$SDN_PASSWORD
+export OS_INTERFACE=admin
 EOF
 
 ##
diff --git a/jjb/netready/netready.yml b/jjb/netready/netready.yml
new file mode 100644 (file)
index 0000000..bc8f666
--- /dev/null
@@ -0,0 +1,55 @@
+- project:
+    name: netready
+
+    project: '{name}'
+
+    jobs:
+        - 'netready-verify-{stream}'
+
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+        - brahmaputra:
+            branch: 'stable/{stream}'
+            gs-pathname: '/{stream}'
+
+- job-template:
+    name: 'netready-verify-{stream}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: '$GERRIT_REFSPEC'
+            choosing-strategy: 'gerrit'
+
+    triggers:
+        - gerrit:
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'docs/**'
+
+    builders:
+        - shell: |
+            echo "Nothing to verify!"
index 702c0ce..1093d5f 100644 (file)
@@ -67,16 +67,18 @@ fi
 
 
 # cd to directory where Dockerfile is located
-if [[ "$DOCKER_REPO_NAME" == "opnfv/functest" ]]; then
+if [[ "$DOCKER_REPO_NAME" == "opnfv/bottlenecks" ]]; then
+    cd $WORKSPACE/ci/docker
+elif [[ "$DOCKER_REPO_NAME" == "opnfv/cperf" ]]; then
     cd $WORKSPACE/docker
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/yardstick" ]]; then
-    cd $WORKSPACE/ci/docker/yardstick-ci
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/storperf" ]]; then
+elif [[ "$DOCKER_REPO_NAME" == "opnfv/functest" ]]; then
     cd $WORKSPACE/docker
 elif [[ "$DOCKER_REPO_NAME" == "opnfv/qtip" ]]; then
     cd $WORKSPACE/docker
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/bottlenecks" ]]; then
-    cd $WORKSPACE/ci/docker
+elif [[ "$DOCKER_REPO_NAME" == "opnfv/storeperf" ]]; then
+    cd $WORKSPACE/docker
+elif [[ "$DOCKER_REPO_NAME" == "opnfv/yardstick" ]]; then
+    cd $WORKSPACE/ci/docker/yardstick-ci
 else
     echo "ERROR: DOCKER_REPO_NAME parameter not valid: $DOCKER_REPO_NAME"
     exit 1
index 936f220..6b49242 100644 (file)
@@ -8,6 +8,7 @@
 
     project:
         - 'bottlenecks'
+        - 'cperf'
         - 'functest'
         - 'storperf'
         - 'qtip'
diff --git a/utils/test/result_collection_api/README.md b/utils/test/result_collection_api/README.md
new file mode 100644 (file)
index 0000000..d73274c
--- /dev/null
@@ -0,0 +1,16 @@
+# result_collection_api
+
+## prepare:
+Install:
+
+```
+pip install testtools
+pip install discover
+```
+
+## How to:
+run_test.sh:
+
+```
+bash ./run_test.sh
+```
index 2c825c1..4d39a14 100644 (file)
@@ -12,3 +12,4 @@ DEFAULT_REPRESENTATION = "application/json"
 HTTP_BAD_REQUEST = 400
 HTTP_FORBIDDEN = 403
 HTTP_NOT_FOUND = 404
+HTTP_OK = 200
index c1e8eb1..fff1662 100644 (file)
@@ -13,7 +13,8 @@ from tornado.web import RequestHandler, asynchronous, HTTPError
 from tornado import gen
 from datetime import datetime, timedelta
 
-from models import Pod, TestProject, TestCase, TestResult
+from models import TestProject, TestCase, TestResult
+from resources.pod_models import Pod
 from common.constants import DEFAULT_REPRESENTATION, HTTP_BAD_REQUEST, \
     HTTP_NOT_FOUND, HTTP_FORBIDDEN
 from common.config import prepare_put_request
index 06e95f9..adf6842 100644 (file)
@@ -5,47 +5,41 @@
 # are made available under the terms of the Apache License, Version 2.0\r
 # which accompanies this distribution, and is available at\r
 # http://www.apache.org/licenses/LICENSE-2.0\r
+# feng.xiaowei@zte.com.cn  mv Pod to pod_models.py                 6-18-2016\r
+# feng.xiaowei@zte.com.cn  add MetaCreateResponse/MetaGetResponse  6-18-2016\r
 ##############################################################################\r
 \r
 \r
-class Pod:\r
-    """ describes a POD platform """\r
-    def __init__(self):\r
-        self._id = ""\r
-        self.name = ""\r
-        self.creation_date = ""\r
-        self.mode = ""\r
-        self.details = ""\r
+class MetaCreateResponse(object):\r
+    def __init__(self, success=True, uri=''):\r
+        self.success = success\r
+        self.uri = uri\r
 \r
     @staticmethod\r
-    def pod_from_dict(pod_dict):\r
-        if pod_dict is None:\r
+    def from_dict(meta_dict):\r
+        if meta_dict is None:\r
             return None\r
 \r
-        p = Pod()\r
-        p._id = pod_dict.get('_id')\r
-        p.creation_date = str(pod_dict.get('creation_date'))\r
-        p.name = pod_dict.get('name')\r
-        p.mode = pod_dict.get('mode')\r
-        p.details = pod_dict.get('details')\r
-        return p\r
+        meta = MetaCreateResponse()\r
+        meta.success = meta_dict.get('success')\r
+        meta.uri = meta_dict.get('uri')\r
+        return meta\r
 \r
-    def format(self):\r
-        return {\r
-            "name": self.name,\r
-            "mode": self.mode,\r
-            "details": self.details,\r
-            "creation_date": str(self.creation_date),\r
-        }\r
 \r
-    def format_http(self):\r
-        return {\r
-            "_id": str(self._id),\r
-            "name": self.name,\r
-            "mode": self.mode,\r
-            "details": self.details,\r
-            "creation_date": str(self.creation_date),\r
-        }\r
+class MetaGetResponse(object):\r
+    def __init__(self, success=True, total=0):\r
+        self.success = success\r
+        self.total = total\r
+\r
+    @staticmethod\r
+    def from_dict(meta_dict):\r
+        if meta_dict is None:\r
+            return None\r
+\r
+        meta = MetaGetResponse()\r
+        meta.success = meta_dict.get('success')\r
+        meta.total = meta_dict.get('total')\r
+        return meta\r
 \r
 \r
 class TestProject:\r
diff --git a/utils/test/result_collection_api/resources/pod_models.py b/utils/test/result_collection_api/resources/pod_models.py
new file mode 100644 (file)
index 0000000..5c4ef72
--- /dev/null
@@ -0,0 +1,108 @@
+from models import MetaCreateResponse, MetaGetResponse
+
+
+class PodCreateRequest(object):
+    def __init__(self, name='', mode='', details=''):
+        self.name = name
+        self.mode = mode
+        self.details = details
+
+    def format(self):
+        return {
+            "name": self.name,
+            "mode": self.mode,
+            "details": self.details,
+        }
+
+    @staticmethod
+    def from_dict(req_dict):
+        if req_dict is None:
+            return None
+
+        req = PodCreateRequest()
+        req.name = req_dict.get('name')
+        req.mode = req_dict.get('mode')
+        req.details = req_dict.get('details')
+        return req
+
+
+class Pod(PodCreateRequest):
+    """ describes a POD platform """
+    def __init__(self, name='', mode='', details='', _id='', create_date=''):
+        super(Pod, self).__init__(name, mode, details)
+        self._id = _id
+        self.creation_date = create_date
+
+    @staticmethod
+    def pod_from_dict(pod_dict):
+        if pod_dict is None:
+            return None
+
+        p = Pod()
+        p._id = pod_dict.get('_id')
+        p.creation_date = str(pod_dict.get('creation_date'))
+        p.name = pod_dict.get('name')
+        p.mode = pod_dict.get('mode')
+        p.details = pod_dict.get('details')
+        return p
+
+    def format(self):
+        f = super(Pod, self).format()
+        f['creation_date'] = str(self.creation_date)
+        return f
+
+    def format_http(self):
+        f = self.format()
+        f['_id'] = str(self._id)
+        return f
+
+
+class PodCreateResponse(object):
+    def __init__(self, pod=None, meta=None):
+        self.pod = pod
+        self.meta = meta
+
+    @staticmethod
+    def from_dict(res_dict):
+        if res_dict is None:
+            return None
+
+        res = PodCreateResponse()
+        res.pod = Pod.pod_from_dict(res_dict.get('pod'))
+        res.meta = MetaCreateResponse.from_dict(res_dict.get('meta'))
+        return res
+
+
+class PodGetResponse(PodCreateRequest):
+    def __init__(self, name='', mode='', details='', create_date=''):
+        self.creation_date = create_date
+        super(PodGetResponse, self).__init__(name, mode, details)
+
+    @staticmethod
+    def from_dict(req_dict):
+        if req_dict is None:
+            return None
+
+        res = PodGetResponse()
+        res.creation_date = str(req_dict.get('creation_date'))
+        res.name = req_dict.get('name')
+        res.mode = req_dict.get('mode')
+        res.details = req_dict.get('details')
+        return res
+
+
+class PodsGetResponse(object):
+    def __init__(self, pods=[], meta=None):
+        self.pods = pods
+        self.meta = meta
+
+    @staticmethod
+    def from_dict(res_dict):
+        if res_dict is None:
+            return None
+
+        res = PodsGetResponse()
+        for pod in res_dict.get('pods'):
+            res.pods.append(PodGetResponse.from_dict(pod))
+        res.meta = MetaGetResponse.from_dict(res_dict.get('meta'))
+        return res
diff --git a/utils/test/result_collection_api/run_test.sh b/utils/test/result_collection_api/run_test.sh
new file mode 100755 (executable)
index 0000000..6006fcf
--- /dev/null
@@ -0,0 +1,10 @@
+#! /bin/bash
+
+# Before run this script, make sure that testtools and discover
+# had been installed in your env
+# or else using pip to install them as follows:
+# pip install testtools, discover
+
+find . -type f -name "*.pyc" -delete
+testrargs="discover ./tests/unit"
+python -m testtools.run $testrargs
\ No newline at end of file
diff --git a/utils/test/result_collection_api/tests/__init__.py b/utils/test/result_collection_api/tests/__init__.py
new file mode 100644 (file)
index 0000000..3ed9fd0
--- /dev/null
@@ -0,0 +1 @@
+__author__ = 'root'
diff --git a/utils/test/result_collection_api/tests/unit/__init__.py b/utils/test/result_collection_api/tests/unit/__init__.py
new file mode 100644 (file)
index 0000000..3ed9fd0
--- /dev/null
@@ -0,0 +1 @@
+__author__ = 'root'
diff --git a/utils/test/result_collection_api/tests/unit/fake_pymongo.py b/utils/test/result_collection_api/tests/unit/fake_pymongo.py
new file mode 100644 (file)
index 0000000..e5ded37
--- /dev/null
@@ -0,0 +1,132 @@
+from bson.objectid import ObjectId
+from concurrent.futures import ThreadPoolExecutor
+
+__author__ = 'serena'
+
+
+class MemCursor(object):
+    def __init__(self, collection):
+        self.collection = collection
+        self.count = len(self.collection)
+
+    def _is_next_exist(self):
+        return self.count != 0
+
+    @property
+    def fetch_next(self):
+        with ThreadPoolExecutor(max_workers=2) as executor:
+            result = executor.submit(self._is_next_exist)
+        return result
+
+    def next_object(self):
+        self.count -= 1
+        return self.collection.pop()
+
+
+class MemDb(object):
+
+    def __init__(self):
+        self.contents = []
+        pass
+
+    def _find_one(self, spec_or_id=None, *args):
+        if spec_or_id is not None and not isinstance(spec_or_id, dict):
+            spec_or_id = {"_id": spec_or_id}
+        cursor = self._find(spec_or_id, *args)
+        for result in cursor:
+            return result
+        return None
+
+    def find_one(self, spec_or_id=None, *args):
+        with ThreadPoolExecutor(max_workers=2) as executor:
+            result = executor.submit(self._find_one, spec_or_id, *args)
+        return result
+
+    def _insert(self, doc_or_docs):
+
+        docs = doc_or_docs
+        return_one = False
+        if isinstance(docs, dict):
+            return_one = True
+            docs = [docs]
+
+        ids = []
+        for doc in docs:
+            if '_id' not in doc:
+                doc['_id'] = ObjectId()
+            if not self._find_one(doc['_id']):
+                ids.append(doc['_id'])
+                self.contents.append(doc_or_docs)
+
+        if len(ids) == 0:
+            return None
+        if return_one:
+            return ids[0]
+        else:
+            return ids
+
+    def insert(self, doc_or_docs):
+        with ThreadPoolExecutor(max_workers=2) as executor:
+            result = executor.submit(self._insert, doc_or_docs)
+        return result
+
+    @staticmethod
+    def _in(content, *args):
+        for arg in args:
+            for k, v in arg.iteritems():
+                if content.get(k, None) != v:
+                    return False
+
+        return True
+
+    def _find(self, *args):
+        res = []
+        for content in self.contents:
+            if self._in(content, *args):
+                res.append(content)
+
+        return res
+
+    def find(self, *args):
+        return MemCursor(self._find(*args))
+
+    def _update(self, spec, document):
+        updated = False
+        for index in range(len(self.contents)):
+            content = self.contents[index]
+            if self._in(content, spec):
+                for k, v in document.iteritems():
+                    updated = True
+                    content[k] = v
+            self.contents[index] = content
+        return updated
+
+    def update(self, spec, document):
+        with ThreadPoolExecutor(max_workers=2) as executor:
+            result = executor.submit(self._update, spec, document)
+        return result
+
+    def _remove(self, spec_or_id=None):
+        if spec_or_id is None:
+            self.contents = []
+        if not isinstance(spec_or_id, dict):
+            spec_or_id = {'_id': spec_or_id}
+        for index in range(len(self.contents)):
+            content = self.contents[index]
+            if self._in(content, spec_or_id):
+                del self.contents[index]
+                return True
+        return False
+
+    def remove(self, spec_or_id=None):
+        with ThreadPoolExecutor(max_workers=2) as executor:
+            result = executor.submit(self._remove, spec_or_id)
+        return result
+
+    def clear(self):
+        self._remove()
+
+pod = MemDb()
+test_projects = MemDb()
+test_cases = MemDb()
+test_results = MemDb()
diff --git a/utils/test/result_collection_api/tests/unit/test_base.py b/utils/test/result_collection_api/tests/unit/test_base.py
new file mode 100644 (file)
index 0000000..98190fb
--- /dev/null
@@ -0,0 +1,54 @@
+import json
+from tornado.web import Application
+from tornado.testing import AsyncHTTPTestCase
+
+from resources.handlers import VersionHandler, PodHandler, \
+    TestProjectHandler, TestCasesHandler, TestResultsHandler, DashboardHandler
+import fake_pymongo
+
+
+class TestBase(AsyncHTTPTestCase):
+    headers = {'Content-Type': 'application/json; charset=UTF-8'}
+
+    def setUp(self):
+        self.addCleanup(self._clear)
+        super(TestBase, self).setUp()
+
+    def get_app(self):
+        return Application(
+            [
+                (r"/version", VersionHandler),
+                (r"/pods", PodHandler),
+                (r"/pods/([^/]+)", PodHandler),
+                (r"/test_projects", TestProjectHandler),
+                (r"/test_projects/([^/]+)", TestProjectHandler),
+                (r"/test_projects/([^/]+)/cases", TestCasesHandler),
+                (r"/test_projects/([^/]+)/cases/([^/]+)", TestCasesHandler),
+                (r"/results", TestResultsHandler),
+                (r"/results([^/]*)", TestResultsHandler),
+                (r"/results/([^/]*)", TestResultsHandler),
+                (r"/dashboard", DashboardHandler),
+                (r"/dashboard([^/]*)", DashboardHandler),
+                (r"/dashboard/([^/]*)", DashboardHandler),
+            ],
+            db=fake_pymongo,
+            debug=True,
+        )
+
+    def create(self, uri, body=None):
+        return self.fetch(uri,
+                          method='POST',
+                          body=json.dumps(body),
+                          headers=self.headers)
+
+    def get(self, uri):
+        return self.fetch(uri,
+                          method='GET',
+                          headers=self.headers)
+
+    @staticmethod
+    def _clear():
+        fake_pymongo.pod.clear()
+        fake_pymongo.test_projects.clear()
+        fake_pymongo.test_cases.clear()
+        fake_pymongo.test_results.clear()
diff --git a/utils/test/result_collection_api/tests/unit/test_fake_pymongo.py b/utils/test/result_collection_api/tests/unit/test_fake_pymongo.py
new file mode 100644 (file)
index 0000000..5ddbf28
--- /dev/null
@@ -0,0 +1,52 @@
+import unittest
+from tornado.web import Application
+from tornado import gen
+from tornado.testing import AsyncHTTPTestCase, gen_test
+
+import fake_pymongo
+
+
+class MyTest(AsyncHTTPTestCase):
+    def setUp(self):
+        super(MyTest, self).setUp()
+        self.db = fake_pymongo
+        self.io_loop.run_sync(self.fixture_setup)
+
+    def get_app(self):
+        return Application()
+
+    @gen.coroutine
+    def fixture_setup(self):
+        self.test1 = {'_id': '1', 'name': 'test1'}
+        self.test2 = {'name': 'test2'}
+        yield self.db.pod.insert({'_id': '1', 'name': 'test1'})
+        yield self.db.pod.insert({'name': 'test2'})
+
+    @gen_test
+    def test_find_one(self):
+        user = yield self.db.pod.find_one({'name': 'test1'})
+        self.assertEqual(user, self.test1)
+
+    @gen_test
+    def test_find(self):
+        cursor = self.db.pod.find()
+        names = []
+        while (yield cursor.fetch_next):
+            ob = cursor.next_object()
+            names.append(ob.get('name'))
+        self.assertItemsEqual(names, ['test1', 'test2'])
+
+    @gen_test
+    def test_update(self):
+        yield self.db.pod.update({'_id': '1'}, {'name': 'new_test1'})
+        user = yield self.db.pod.find_one({'_id': '1'})
+        self.assertEqual(user.get('name', None), 'new_test1')
+
+    @gen_test
+    def test_remove(self):
+        yield self.db.pod.remove({'_id': '1'})
+        user = yield self.db.pod.find_one({'_id': '1'})
+        self.assertIsNone(user)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/utils/test/result_collection_api/tests/unit/test_pod.py b/utils/test/result_collection_api/tests/unit/test_pod.py
new file mode 100644 (file)
index 0000000..5a3d485
--- /dev/null
@@ -0,0 +1,86 @@
+import unittest
+import json
+
+from test_base import TestBase
+from resources.pod_models import PodCreateRequest, \
+    PodCreateResponse, PodsGetResponse
+from common.constants import HTTP_OK, HTTP_BAD_REQUEST, HTTP_FORBIDDEN
+
+
+class TestPodCreate(TestBase):
+    req = PodCreateRequest(name='zte-1', mode='alive', details='zte pod 1')
+
+    def test_withoutBody(self):
+        res = self.create('/pods', body=None)
+        self.assertEqual(res.code, HTTP_BAD_REQUEST)
+
+    def test_success(self):
+        res = self.create('/pods', body=self.req.format())
+        self.assertEqual(res.code, HTTP_OK)
+        res_body = PodCreateResponse.from_dict(json.loads(res.body))
+        self._assertMeta(res_body.meta, True)
+        self._assertBody(res_body.pod)
+
+    def test_alreadyExist(self):
+        self.create('/pods', body=self.req.format())
+        res = self.create('/pods', body=self.req.format())
+        self.assertEqual(res.code, HTTP_FORBIDDEN)
+        self.assertIn('already exists', res.body)
+
+    def _assertMeta(self, meta, success):
+        self.assertEqual(meta.success, success)
+        if success:
+            self.assertEqual(meta.uri, '/pods/{}'.format(self.req.name))
+
+    def _assertBody(self, res):
+        self.assertEqual(res.name, self.req.name)
+        self.assertEqual(res.mode, self.req.mode)
+        self.assertEqual(res.details, self.req.details)
+        self.assertIsNotNone(res.creation_date)
+        self.assertIsNotNone(res._id)
+
+
+class TestPodGet(TestBase):
+    def test_notExist(self):
+        res = self.get('/pods/notExist')
+        body = PodsGetResponse.from_dict(json.loads(res.body))
+        self._assertMeta(body.meta, 0)
+
+    def test_getOne(self):
+        self.create('/pods', body=TestPodCreate.req.format())
+        res = self.get('/pods/{}'.format(TestPodCreate.req.name))
+        body = PodsGetResponse.from_dict(json.loads(res.body))
+        self._assertMeta(body.meta, 1)
+        self._assertBody(TestPodCreate.req, body.pods[0])
+
+    def test_list(self):
+        req = PodCreateRequest(name='zte-2', mode='alive', details='zte pod 2')
+        self.create('/pods', body=TestPodCreate.req.format())
+        self.create('/pods', body=req.format())
+        res = self.get('/pods')
+        body = PodsGetResponse.from_dict(json.loads(res.body))
+        self._assertMeta(body.meta, 2)
+        for pod in body.pods:
+            if req.name == pod.name:
+                self._assertBody(req, pod)
+            else:
+                self._assertBody(TestPodCreate.req, pod)
+
+    def _assertMeta(self, meta, total):
+        def check_success():
+            if total is 0:
+                return False
+            else:
+                return True
+        self.assertEqual(meta.total, total)
+        self.assertEqual(meta.success, check_success())
+
+    def _assertBody(self, req, res):
+        self.assertEqual(res.name, req.name)
+        self.assertEqual(res.mode, req.mode)
+        self.assertEqual(res.details, req.details)
+        self.assertIsNotNone(res.creation_date)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/utils/test/result_collection_api/tests/unit/test_version.py b/utils/test/result_collection_api/tests/unit/test_version.py
new file mode 100644 (file)
index 0000000..918f2f0
--- /dev/null
@@ -0,0 +1,14 @@
+import unittest
+
+from test_base import TestBase
+
+__author__ = 'serena'
+
+
+class TestVersion(TestBase):
+    def test_get_version(self):
+        response = self.fetch('/version')
+        self.assertEqual(response.code, 200)
+
+if __name__ == '__main__':
+    unittest.main()