Merge "Add fuel deployment daily jenkins job on zte-pod1."
authorFatih Degirmenci <fatih.degirmenci@ericsson.com>
Wed, 18 May 2016 10:43:44 +0000 (10:43 +0000)
committerGerrit Code Review <gerrit@172.30.200.206>
Wed, 18 May 2016 10:43:44 +0000 (10:43 +0000)
16 files changed:
jjb/apex/apex.yml
jjb/armband/armband-ci-jobs.yml
jjb/armband/armband-deploy.sh
jjb/armband/armband-download-artifact.sh
jjb/joid/joid-deploy.sh
jjb/netready/netready.yml [new file with mode: 0644]
jjb/opnfv/opnfv-docker.sh
jjb/opnfv/opnfv-docker.yml
utils/test/result_collection_api/README.md [new file with mode: 0644]
utils/test/result_collection_api/run_test.sh [new file with mode: 0755]
utils/test/result_collection_api/tests/__init__.py [new file with mode: 0644]
utils/test/result_collection_api/tests/unit/__init__.py [new file with mode: 0644]
utils/test/result_collection_api/tests/unit/fake_pymongo.py [new file with mode: 0644]
utils/test/result_collection_api/tests/unit/test_base.py [new file with mode: 0644]
utils/test/result_collection_api/tests/unit/test_fake_pymongo.py [new file with mode: 0644]
utils/test/result_collection_api/tests/unit/test_version.py [new file with mode: 0644]

index 282ae7b..ba0095f 100644 (file)
             block: true
             same-node: true
 #        - trigger-builds:
-#          - project: 'functest-apex-{slave}-suite-{stream1}'
+#          - project: 'functest-apex-{verify-slave}-suite-{stream1}'
 #            predefined-parameters: |
 #              DEPLOY_SCENARIO=os-nosdn-nofeature-ha
 #              FUNCTEST_SUITE_NAME=vping_userdata
 #            block: true
+#            same-node: true
         - trigger-builds:
           - project: 'apex-deploy-virtual-os-odl_l2-nofeature-ha-{stream1}'
             predefined-parameters: |
     # Required Variables:
     #     stream:    branch with - in place of / (eg. stable)
     #     branch:    branch (eg. stable)
-    node: '{slave}'
+    node: '{daily-slave}'
 
     disabled: false
 
               BUILD_DIRECTORY=apex-build-{stream}/build
               OPNFV_CLEAN=yes
             git-revision: false
+            same-node: true
             block: true
         - 'apex-upload-artifact'
 
           - project: 'apex-build-{stream1}'
             git-revision: true
             current-parameters: true
+            same-node: true
             block: true
         - trigger-builds:
           - project: 'apex-deploy-virtual-nosdn-nofeature-ha-{stream1}'
index 5ae8a04..bbf7c40 100644 (file)
 - trigger:
     name: 'armband-os-odl_l2-nofeature-ha-arm-pod1-brahmaputra-trigger'
     triggers:
-        - timed: '0 4 * * *'
+        - timed: '0 20 * * *'
index 97430c1..8b0af31 100755 (executable)
@@ -39,10 +39,10 @@ POD_NAME=${NODE_NAME/*-}
 if [[ ! $LAB_NAME =~ (arm|enea) ]]; then
     echo "Unsupported/unidentified lab $LAB_NAME. Cannot continue!"
     exit 1
-else
-    echo "Using configuration for $LAB_NAME"
 fi
 
+echo "Using configuration for $LAB_NAME"
+
 # create TMPDIR if it doesn't exist
 mkdir -p $TMPDIR
 
@@ -52,8 +52,11 @@ if [[ $LAB_CONFIG_URL =~ ^git:// ]]; then
     LAB_CONFIG_URL=file://${WORKSPACE}/lab-config
 fi
 
+# releng wants us to use nothing else but opnfv.iso for now. We comply.
+ISO_FILE=$WORKSPACE/opnfv.iso
+
 # construct the command
-DEPLOY_COMMAND="$WORKSPACE/ci/deploy.sh -b ${LAB_CONFIG_URL} -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://$WORKSPACE/opnfv.iso -H -B $BRIDGE -S $TMPDIR"
+DEPLOY_COMMAND="$WORKSPACE/ci/deploy.sh -l $LAB_NAME -p $POD_NAME -b ${LAB_CONFIG_URL} -s $DEPLOY_SCENARIO -i file://${ISO_FILE} -H -B $BRIDGE -S $TMPDIR"
 
 # log info to console
 echo "Deployment parameters"
@@ -72,9 +75,6 @@ echo "Issuing command"
 echo "$DEPLOY_COMMAND"
 echo
 
-# FIXME
-export TARGET_LAB=${LAB_NAME}
-export TARGET_POD=${POD_NAME}
 $DEPLOY_COMMAND
 
 echo
index 15ad67d..18b55d7 100755 (executable)
@@ -13,35 +13,33 @@ set -o pipefail
 if [[ "$JOB_NAME" =~ "merge" ]]; then
     echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties"
     # get the properties file for the Armband Fuel ISO built for a merged change
-    curl -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
+    curl -f -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
 else
     # get the latest.properties file in order to get info regarding latest artifact
     echo "Downloading http://$GS_URL/latest.properties"
-    curl -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
+    curl -f -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
 fi
 
-# check if we got the file
-# FIXME: the file is created even if it didn't exist on the host
-#        We should check that the contents are sane
-[[ -f latest.properties ]] || exit 1
-
-# source the file so we get artifact metadata
+# source the file so we get artifact metadata, it will exit if it doesn't exist
 source latest.properties
 
 # echo the info about artifact that is used during the deployment
 OPNFV_ARTIFACT=${OPNFV_ARTIFACT_URL/*\/}
 echo "Using $OPNFV_ARTIFACT for deployment"
 
+# Releng doesn't want us to use anything but opnfv.iso for now. We comply.
+ISO_FILE=${WORKSPACE}/opnfv.iso
+
 # using ISOs for verify & merge jobs from local storage will be enabled later
 if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
     # check if we already have the ISO to avoid redownload
     ISOSTORE="/iso_mount/opnfv_ci/${GIT_BRANCH##*/}"
     if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then
         echo "ISO exists locally. Skipping the download and using the file from ISO store"
-        ln -s $ISOSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.iso
+        ln -s $ISOSTORE/$OPNFV_ARTIFACT ${ISO_FILE}
         echo "--------------------------------------------------------"
         echo
-        ls -al $WORKSPACE/opnfv.iso
+        ls -al ${ISO_FILE}
         echo
         echo "--------------------------------------------------------"
         echo "Done!"
@@ -49,18 +47,22 @@ if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
     fi
 fi
 
+# Use gsutils if available
+if $(which gsutil &>/dev/null); then
+    DOWNLOAD_URL="gs://$OPNFV_ARTIFACT_URL"
+    CMD="gsutil cp ${DOWNLOAD_URL} ${ISO_FILE}"
+else
+    # download image
+    # -f returns error if the file was not found or on server error
+    DOWNLOAD_URL="http://$OPNFV_ARTIFACT_URL"
+    CMD="curl -f -s -o ${ISO_FILE} ${DOWNLOAD_URL}"
+fi
+
 # log info to console
-echo "Downloading the $INSTALLER_TYPE artifact using URL http://$OPNFV_ARTIFACT_URL"
+echo "Downloading the $INSTALLER_TYPE artifact using URL $DOWNLOAD_URL"
 echo "This could take some time..."
 echo "--------------------------------------------------------"
-echo
-
-# download the file
-curl -s -o $WORKSPACE/opnfv.iso http://$OPNFV_ARTIFACT_URL
-
-# The file is always created, check that it is in fact an ISO image
-[[ $(file $WORKSPACE/opnfv.iso) =~ ISO ]]
-
-echo
+echo "$CMD"
+$CMD
 echo "--------------------------------------------------------"
 echo "Done!"
index 907db4f..bcc4b64 100644 (file)
@@ -181,10 +181,17 @@ cat << EOF > $JOID_ADMIN_OPENRC
 export OS_USERNAME=admin
 export OS_PASSWORD=$OS_ADMIN_PASSWORD
 export OS_TENANT_NAME=admin
-export OS_AUTH_URL=http://$KEYSTONE:5000/v2.0
+export OS_AUTH_URL=http://$KEYSTONE:35537/v2.0
 export OS_REGION_NAME=Canonical
+export OS_ENDPOINT_TYPE='adminURL'
+export CINDER_ENDPOINT_TYPE='adminURL'
+export GLANCE_ENDPOINT_TYPE='adminURL'
+export KEYSTONE_ENDPOINT_TYPE='adminURL'
+export NEUTRON_ENDPOINT_TYPE='adminURL'
+export NOVA_ENDPOINT_TYPE='adminURL'
 export SDN_CONTROLLER=$SDN_CONTROLLER_IP
 export SDN_PASSWORD=$SDN_PASSWORD
+export OS_INTERFACE=admin
 EOF
 
 ##
diff --git a/jjb/netready/netready.yml b/jjb/netready/netready.yml
new file mode 100644 (file)
index 0000000..bc8f666
--- /dev/null
@@ -0,0 +1,55 @@
+- project:
+    name: netready
+
+    project: '{name}'
+
+    jobs:
+        - 'netready-verify-{stream}'
+
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+        - brahmaputra:
+            branch: 'stable/{stream}'
+            gs-pathname: '/{stream}'
+
+- job-template:
+    name: 'netready-verify-{stream}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: '$GERRIT_REFSPEC'
+            choosing-strategy: 'gerrit'
+
+    triggers:
+        - gerrit:
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'docs/**'
+
+    builders:
+        - shell: |
+            echo "Nothing to verify!"
index 702c0ce..1093d5f 100644 (file)
@@ -67,16 +67,18 @@ fi
 
 
 # cd to directory where Dockerfile is located
-if [[ "$DOCKER_REPO_NAME" == "opnfv/functest" ]]; then
+if [[ "$DOCKER_REPO_NAME" == "opnfv/bottlenecks" ]]; then
+    cd $WORKSPACE/ci/docker
+elif [[ "$DOCKER_REPO_NAME" == "opnfv/cperf" ]]; then
     cd $WORKSPACE/docker
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/yardstick" ]]; then
-    cd $WORKSPACE/ci/docker/yardstick-ci
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/storperf" ]]; then
+elif [[ "$DOCKER_REPO_NAME" == "opnfv/functest" ]]; then
     cd $WORKSPACE/docker
 elif [[ "$DOCKER_REPO_NAME" == "opnfv/qtip" ]]; then
     cd $WORKSPACE/docker
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/bottlenecks" ]]; then
-    cd $WORKSPACE/ci/docker
+elif [[ "$DOCKER_REPO_NAME" == "opnfv/storeperf" ]]; then
+    cd $WORKSPACE/docker
+elif [[ "$DOCKER_REPO_NAME" == "opnfv/yardstick" ]]; then
+    cd $WORKSPACE/ci/docker/yardstick-ci
 else
     echo "ERROR: DOCKER_REPO_NAME parameter not valid: $DOCKER_REPO_NAME"
     exit 1
index 936f220..6b49242 100644 (file)
@@ -8,6 +8,7 @@
 
     project:
         - 'bottlenecks'
+        - 'cperf'
         - 'functest'
         - 'storperf'
         - 'qtip'
diff --git a/utils/test/result_collection_api/README.md b/utils/test/result_collection_api/README.md
new file mode 100644 (file)
index 0000000..d73274c
--- /dev/null
@@ -0,0 +1,16 @@
+# result_collection_api
+
+## prepare:
+Install:
+
+```
+pip install testtools
+pip install discover
+```
+
+## How to:
+run_test.sh:
+
+```
+bash ./run_test.sh
+```
diff --git a/utils/test/result_collection_api/run_test.sh b/utils/test/result_collection_api/run_test.sh
new file mode 100755 (executable)
index 0000000..6006fcf
--- /dev/null
@@ -0,0 +1,10 @@
+#! /bin/bash
+
+# Before run this script, make sure that testtools and discover
+# had been installed in your env
+# or else using pip to install them as follows:
+# pip install testtools, discover
+
+find . -type f -name "*.pyc" -delete
+testrargs="discover ./tests/unit"
+python -m testtools.run $testrargs
\ No newline at end of file
diff --git a/utils/test/result_collection_api/tests/__init__.py b/utils/test/result_collection_api/tests/__init__.py
new file mode 100644 (file)
index 0000000..3ed9fd0
--- /dev/null
@@ -0,0 +1 @@
+__author__ = 'root'
diff --git a/utils/test/result_collection_api/tests/unit/__init__.py b/utils/test/result_collection_api/tests/unit/__init__.py
new file mode 100644 (file)
index 0000000..3ed9fd0
--- /dev/null
@@ -0,0 +1 @@
+__author__ = 'root'
diff --git a/utils/test/result_collection_api/tests/unit/fake_pymongo.py b/utils/test/result_collection_api/tests/unit/fake_pymongo.py
new file mode 100644 (file)
index 0000000..e2db460
--- /dev/null
@@ -0,0 +1,129 @@
+from bson.objectid import ObjectId
+from concurrent.futures import ThreadPoolExecutor
+
+__author__ = 'serena'
+
+
+class MemCursor(object):
+    def __init__(self, collection):
+        self.collection = collection
+        self.count = len(self.collection)
+
+    def _is_next_exist(self):
+        return self.count != 0
+
+    @property
+    def fetch_next(self):
+        with ThreadPoolExecutor(max_workers=2) as executor:
+            result = executor.submit(self._is_next_exist)
+        return result
+
+    def next_object(self):
+        self.count -= 1
+        return self.collection.pop()
+
+
+class MemDb(object):
+
+    def __init__(self):
+        self.contents = []
+        pass
+
+    def _find_one(self, spec_or_id=None, *args):
+        if spec_or_id is not None and not isinstance(spec_or_id, dict):
+            spec_or_id = {"_id": spec_or_id}
+        cursor = self._find(spec_or_id, *args)
+        for result in cursor:
+            return result
+        return None
+
+    def find_one(self, spec_or_id=None, *args):
+        with ThreadPoolExecutor(max_workers=2) as executor:
+            result = executor.submit(self._find_one, spec_or_id, *args)
+        return result
+
+    def _insert(self, doc_or_docs):
+
+        docs = doc_or_docs
+        return_one = False
+        if isinstance(docs, dict):
+            return_one = True
+            docs = [docs]
+
+        ids = []
+        for doc in docs:
+            if '_id' not in doc:
+                doc['_id'] = ObjectId()
+            if not self._find_one(doc['_id']):
+                ids.append(doc['_id'])
+                self.contents.append(doc_or_docs)
+
+        if len(ids) == 0:
+            return None
+        if return_one:
+            return ids[0]
+        else:
+            return ids
+
+    def insert(self, doc_or_docs):
+        with ThreadPoolExecutor(max_workers=2) as executor:
+            result = executor.submit(self._insert, doc_or_docs)
+        return result
+
+    @staticmethod
+    def _in(content, *args):
+        for arg in args:
+            for k, v in arg.iteritems():
+                if content.get(k, None) != v:
+                    return False
+
+        return True
+
+    def _find(self, *args):
+        res = []
+        for content in self.contents:
+            if self._in(content, *args):
+                res.append(content)
+
+        return res
+
+    def find(self, *args):
+        return MemCursor(self._find(*args))
+
+    def _update(self, spec, document):
+        updated = False
+        for index in range(len(self.contents)):
+            content = self.contents[index]
+            if self._in(content, spec):
+                for k, v in document.iteritems():
+                    updated = True
+                    content[k] = v
+            self.contents[index] = content
+        return updated
+
+    def update(self, spec, document):
+        with ThreadPoolExecutor(max_workers=2) as executor:
+            result = executor.submit(self._update, spec, document)
+        return result
+
+    def _remove(self, spec_or_id=None):
+        if spec_or_id is None:
+            self.contents = []
+        if not isinstance(spec_or_id, dict):
+            spec_or_id = {'_id': spec_or_id}
+        for index in range(len(self.contents)):
+            content = self.contents[index]
+            if self._in(content, spec_or_id):
+                del self.contents[index]
+                return True
+        return False
+
+    def remove(self, spec_or_id=None):
+        with ThreadPoolExecutor(max_workers=2) as executor:
+            result = executor.submit(self._remove, spec_or_id)
+        return result
+
+pod = MemDb()
+test_projects = MemDb()
+test_cases = MemDb()
+test_results = MemDb()
diff --git a/utils/test/result_collection_api/tests/unit/test_base.py b/utils/test/result_collection_api/tests/unit/test_base.py
new file mode 100644 (file)
index 0000000..b72436e
--- /dev/null
@@ -0,0 +1,36 @@
+from tornado.web import Application
+from tornado.testing import AsyncHTTPTestCase
+
+from resources.handlers import VersionHandler, PodHandler, \
+    TestProjectHandler, TestCasesHandler, TestResultsHandler, DashboardHandler
+import fake_pymongo
+
+
+class TestBase(AsyncHTTPTestCase):
+    def get_app(self):
+        return Application(
+            [
+                (r"/version", VersionHandler),
+                (r"/pods", PodHandler),
+                (r"/pods/([^/]+)", PodHandler),
+                (r"/test_projects", TestProjectHandler),
+                (r"/test_projects/([^/]+)", TestProjectHandler),
+                (r"/test_projects/([^/]+)/cases", TestCasesHandler),
+                (r"/test_projects/([^/]+)/cases/([^/]+)", TestCasesHandler),
+                (r"/results", TestResultsHandler),
+                (r"/results([^/]*)", TestResultsHandler),
+                (r"/results/([^/]*)", TestResultsHandler),
+                (r"/dashboard", DashboardHandler),
+                (r"/dashboard([^/]*)", DashboardHandler),
+                (r"/dashboard/([^/]*)", DashboardHandler),
+            ],
+            db=fake_pymongo,
+            debug=True,
+        )
+
+    def tearDown(self):
+        yield fake_pymongo.pod.remove()
+        yield fake_pymongo.test_projects.remove()
+        yield fake_pymongo.test_cases.remove()
+        yield fake_pymongo.test_results.remove()
+        super(TestBase, self).tearDown()
diff --git a/utils/test/result_collection_api/tests/unit/test_fake_pymongo.py b/utils/test/result_collection_api/tests/unit/test_fake_pymongo.py
new file mode 100644 (file)
index 0000000..5ddbf28
--- /dev/null
@@ -0,0 +1,52 @@
+import unittest
+from tornado.web import Application
+from tornado import gen
+from tornado.testing import AsyncHTTPTestCase, gen_test
+
+import fake_pymongo
+
+
+class MyTest(AsyncHTTPTestCase):
+    def setUp(self):
+        super(MyTest, self).setUp()
+        self.db = fake_pymongo
+        self.io_loop.run_sync(self.fixture_setup)
+
+    def get_app(self):
+        return Application()
+
+    @gen.coroutine
+    def fixture_setup(self):
+        self.test1 = {'_id': '1', 'name': 'test1'}
+        self.test2 = {'name': 'test2'}
+        yield self.db.pod.insert({'_id': '1', 'name': 'test1'})
+        yield self.db.pod.insert({'name': 'test2'})
+
+    @gen_test
+    def test_find_one(self):
+        user = yield self.db.pod.find_one({'name': 'test1'})
+        self.assertEqual(user, self.test1)
+
+    @gen_test
+    def test_find(self):
+        cursor = self.db.pod.find()
+        names = []
+        while (yield cursor.fetch_next):
+            ob = cursor.next_object()
+            names.append(ob.get('name'))
+        self.assertItemsEqual(names, ['test1', 'test2'])
+
+    @gen_test
+    def test_update(self):
+        yield self.db.pod.update({'_id': '1'}, {'name': 'new_test1'})
+        user = yield self.db.pod.find_one({'_id': '1'})
+        self.assertEqual(user.get('name', None), 'new_test1')
+
+    @gen_test
+    def test_remove(self):
+        yield self.db.pod.remove({'_id': '1'})
+        user = yield self.db.pod.find_one({'_id': '1'})
+        self.assertIsNone(user)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/utils/test/result_collection_api/tests/unit/test_version.py b/utils/test/result_collection_api/tests/unit/test_version.py
new file mode 100644 (file)
index 0000000..918f2f0
--- /dev/null
@@ -0,0 +1,14 @@
+import unittest
+
+from test_base import TestBase
+
+__author__ = 'serena'
+
+
+class TestVersion(TestBase):
+    def test_get_version(self):
+        response = self.fetch('/version')
+        self.assertEqual(response.code, 200)
+
+if __name__ == '__main__':
+    unittest.main()