Merge "bug fix: typo in bottlenecks naming (reporting page)"
authorAric Gardner <agardner@linuxfoundation.org>
Thu, 29 Jun 2017 18:44:21 +0000 (18:44 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Thu, 29 Jun 2017 18:44:21 +0000 (18:44 +0000)
jjb/barometer/barometer.yml
jjb/ci_gate_security/anteater-security-audit.sh
jjb/compass4nfv/compass-ci-jobs.yml
jjb/dovetail/dovetail-ci-jobs.yml
jjb/xci/bifrost-periodic-jobs.yml
jjb/xci/osa-periodic-jobs.yml
prototypes/xci/scripts/update-osa-version-files.sh [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/resources/handlers.py
utils/test/testapi/opnfv_testapi/resources/result_handlers.py
utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py

index c8fb9e2..e94639f 100644 (file)
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
+        - barometer-project-parameter:
+            gs-pathname: '{gs-pathname}'
         - 'opnfv-build-centos-defaults'
 
     scm:
index 2b5c26a..9bd3cc3 100644 (file)
@@ -16,7 +16,7 @@ docker pull opnfv/releng-anteater
 echo "--------------------------------------------------------"
 
 cmd="docker run -i $envs $vols --rm opnfv/releng-anteater \
-anteater --project $PROJECT --patchset /home/opnfv/anteater/$PROJECT/patchset"
+/home/opnfv/venv/bin/anteater --project $PROJECT --patchset /home/opnfv/anteater/$PROJECT/patchset"
 echo "Running docker container"
 echo "$cmd"
 $cmd > $WORKSPACE/securityaudit.log 2>&1
index 6793460..2472491 100644 (file)
                 unstable-threshold: 'FAILURE'
         # dovetail only master by now, not sync with A/B/C branches
         # here the stream means the SUT stream, dovetail stream is defined in its own job
-        # only run on os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha scenario
+        # only run on os-(nosdn|odl_l2|onos|odl_l3)-nofeature-ha scenario
+        # run against SUT master branch, dovetail docker image with latest tag
+        # run against SUT danube branch, dovetail docker image with latest tag(odd days)and cvp.X.X.X tag(even days)
         - conditional-step:
-            condition-kind: regex-match
-            regex: os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha
-            label: '{scenario}'
+            condition-kind: and
+            condition-operands:
+                - condition-kind: regex-match
+                  regex: danube
+                  label: '{stream}'
+                - condition-kind: regex-match
+                  regex: os-(nosdn|odl_l2|onos|odl_l3)-nofeature-ha
+                  label: '{scenario}'
+                - condition-kind: day-of-week
+                  day-selector: select-days
+                  days:
+                      MON: true
+                      WED: true
+                      FRI: true
+                      SUN: true
             steps:
                 - trigger-builds:
-                    - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
+                    - project: 'dovetail-compass-{pod}-proposed_tests-master'
                       current-parameters: false
                       predefined-parameters:
                         DEPLOY_SCENARIO={scenario}
                         build-step-failure-threshold: 'never'
                         failure-threshold: 'never'
                         unstable-threshold: 'FAILURE'
+        - conditional-step:
+            condition-kind: and
+            condition-operands:
+                - condition-kind: regex-match
+                  regex: danube
+                  label: '{stream}'
+                - condition-kind: regex-match
+                  regex: os-(nosdn|odl_l2|onos|odl_l3)-nofeature-ha
+                  label: '{scenario}'
+                - condition-kind: day-of-week
+                  day-selector: select-days
+                  days:
+                      TUE: true
+                      THU: true
+                      SAT: true
+            steps:
+                - trigger-builds:
+                    - project: 'dovetail-compass-{pod}-proposed_tests-danube'
+                      current-parameters: false
+                      predefined-parameters:
+                        DEPLOY_SCENARIO={scenario}
+                      block: true
+                      same-node: true
+                      block-thresholds:
+                        build-step-failure-threshold: 'never'
+                        failure-threshold: 'never'
+                        unstable-threshold: 'FAILURE'
+        - conditional-step:
+            condition-kind: and
+            condition-operands:
+                - condition-kind: regex-match
+                  regex: os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha
+                  label: '{scenario}'
+                - condition-kind: regex-match
+                  regex: master
+                  label: '{stream}'
+            steps:
+                - trigger-builds:
+                    - project: 'dovetail-compass-{pod}-proposed_tests-master'
 
 - job-template:
     name: 'compass-deploy-{pod}-daily-{stream}'
index 682948d..9fdce31 100644 (file)
@@ -25,7 +25,7 @@
         branch: 'stable/{stream}'
         dovetail-branch: master
         gs-pathname: '/{stream}'
-        docker-tag: 'latest'
+        docker-tag: 'cvp.0.1.0'
 
 #-----------------------------------
 # POD, PLATFORM, AND BRANCH MAPPING
index 3e9ff67..9773cfd 100644 (file)
     # trigger is disabled until we know which jobs we will have
     # and adjust stuff accordingly
     triggers:
-        - timed: '#@midnight'
+        - timed: ''  # '@midnight'
 
     builders:
         - description-setter:
index 56a4b18..722b077 100644 (file)
     # trigger is disabled until we know which jobs we will have
     # and adjust stuff accordingly
     triggers:
-        - timed: '#@midnight'
+        - timed: ''  # '@midnight'
 
     builders:
         - description-setter:
diff --git a/prototypes/xci/scripts/update-osa-version-files.sh b/prototypes/xci/scripts/update-osa-version-files.sh
new file mode 100644 (file)
index 0000000..92df978
--- /dev/null
@@ -0,0 +1,85 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2017 SUSE LINUX GmbH and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# This script is used to pin the SHAs for the various roles in the
+# ansible-role-requirements file
+
+set -e
+
+# NOTE(hwoarang) This could break if files are re-arranged in the future
+releng_xci_base="$(dirname $(readlink -f $0))/.."
+
+usage() {
+    echo """
+    ${0} <openstack-ansible commit SHA>
+    """
+    exit 0
+}
+
+cleanup() {
+    [[ -d $tempdir ]] && rm -rf $tempdir
+}
+
+printme() {
+    echo "===> $1"
+}
+
+# Only need a single argument
+[[ $# -ne 1 ]] && echo "Invalid number of arguments!" && usage
+
+tempdir="$(mktemp -d)"
+
+trap cleanup EXIT
+
+pushd $tempdir &> /dev/null
+
+printme "Downloading the sources-branch-updater-lib.sh library"
+
+printme "Cloning the openstack-ansible repository"
+(
+    git clone -q git://git.openstack.org/openstack/openstack-ansible && cd openstack-ansible && git checkout -q $1
+)
+
+popd &> /dev/null
+
+pushd $tempdir/openstack-ansible &> /dev/null
+source scripts/sources-branch-updater-lib.sh
+printme "Synchronize roles and packages"
+update_ansible_role_requirements "master" "false" "true"
+
+# Construct the ansible-role-requirements-file
+echo """---
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2017 Ericsson AB and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+# these versions are extracted based on the osa commit ${1} on $(git --no-pager log -1 --format=%cI $1)
+# https://review.openstack.org/gitweb?p=openstack/openstack-ansible.git;a=commit;h=$1
+""" > $releng_xci_base/file/ansible-role-requirements.yml
+cat $tempdir/openstack-ansible/ansible-role-requirements.yml >> $releng_xci_base/file/ansible-role-requirements.yml
+
+# Update the pinned OSA version
+sed -i "/^export OPENSTACK_OSA_VERSION/s@:-\"[a-z0-9]*@:-\"${1}@" $releng_xci_base/config/pinned-versions
+
+popd &> /dev/null
+
+printme ""
+printme "======================= Report ============================"
+printme ""
+printme "The $releng_xci_base/file/ansible-role-requirements.yml and"
+printme "$releng_xci_base/config/pinned-versions files have been"
+printme "updated. Please make sure you test the end result before"
+printme "committing it!"
+printme ""
+printme "==========================================================="
index 42372e8..c7fed8f 100644 (file)
@@ -101,40 +101,71 @@ class GenericApiHandler(web.RequestHandler):
     @web.asynchronous
     @gen.coroutine
     def _list(self, query=None, res_op=None, *args, **kwargs):
+        sort = kwargs.get('sort')
+        page = kwargs.get('page', 0)
+        last = kwargs.get('last', 0)
+        per_page = kwargs.get('per_page', 0)
         if query is None:
             query = {}
-        data = []
-        sort = kwargs.get('sort')
-        page = kwargs.get('page')
-        last = kwargs.get('last')
-        per_page = kwargs.get('per_page')
-
         cursor = self._eval_db(self.table, 'find', query)
-        if sort:
-            cursor = cursor.sort(sort)
-        if last and last != 0:
-            cursor = cursor.limit(last)
-        if page:
-            records_count = yield cursor.count()
-            total_pages, remainder = divmod(records_count, per_page)
-            if remainder > 0:
-                total_pages += 1
-            cursor = cursor.skip((page - 1) * per_page).limit(per_page)
+        records_count = yield cursor.count()
+        total_pages = self._calc_total_pages(records_count,
+                                             last,
+                                             page,
+                                             per_page)
+        pipelines = self._set_pipelines(query, sort, last, page, per_page)
+        cursor = self._eval_db(self.table,
+                               'aggregate',
+                               pipelines,
+                               allowDiskUse=True)
+        data = list()
         while (yield cursor.fetch_next):
             data.append(self.format_data(cursor.next_object()))
         if res_op is None:
             res = {self.table: data}
         else:
             res = res_op(data, *args)
-        if page:
+        if total_pages > 0:
             res.update({
                 'pagination': {
-                    'current_page': page,
+                    'current_page': kwargs.get('page'),
                     'total_pages': total_pages
                 }
             })
         self.finish_request(res)
 
+    @staticmethod
+    def _calc_total_pages(records_count, last, page, per_page):
+        records_nr = records_count
+        if (records_count > last) and (last > 0):
+            records_nr = last
+
+        total_pages = 0
+        if page > 0:
+            total_pages, remainder = divmod(records_nr, per_page)
+            if remainder > 0:
+                total_pages += 1
+        if page > total_pages:
+            raises.BadRequest(
+                'Request page > total_pages [{}]'.format(total_pages))
+        return total_pages
+
+    @staticmethod
+    def _set_pipelines(query, sort, last, page, per_page):
+        pipelines = list()
+        if query:
+            pipelines.append({'$match': query})
+        if sort:
+            pipelines.append({'$sort': sort})
+
+        if page > 0:
+            pipelines.append({'$skip': (page - 1) * per_page})
+            pipelines.append({'$limit': per_page})
+        elif last > 0:
+            pipelines.append({'$limit': last})
+
+        return pipelines
+
     @web.asynchronous
     @gen.coroutine
     @check.not_exist
index 208af6d..1773216 100644 (file)
@@ -147,13 +147,13 @@ class ResultsCLHandler(GenericResultHandler):
             @in trust_indicator: query
             @required trust_indicator: False
         """
-        limitations = {'sort': [('start_date', -1)]}
+        limitations = {'sort': {'start_date': -1}}
         last = self.get_query_argument('last', 0)
         if last is not None:
             last = self.get_int('last', last)
             limitations.update({'last': last})
 
-        page = self.get_query_argument('page', 1)
+        page = self.get_query_argument('page', None)
         if page is not None:
             page = self.get_int('page', page)
             limitations.update({'page': page,
index b2564a6..adaf6f7 100644 (file)
@@ -35,15 +35,14 @@ class MemCursor(object):
         return self.collection.pop()
 
     def sort(self, key_or_list):
-        key = key_or_list[0][0]
-        if key_or_list[0][1] == -1:
-            reverse = True
-        else:
-            reverse = False
+        for k, v in key_or_list.iteritems():
+            if v == -1:
+                reverse = True
+            else:
+                reverse = False
 
-        if key_or_list is not None:
             self.collection = sorted(self.collection,
-                                     key=itemgetter(key), reverse=reverse)
+                                     key=itemgetter(k), reverse=reverse)
         return self
 
     def limit(self, limit):
@@ -202,6 +201,27 @@ class MemDb(object):
     def find(self, *args):
         return MemCursor(self._find(*args))
 
+    def _aggregate(self, *args, **kwargs):
+        res = self.contents
+        print args
+        for arg in args[0]:
+            for k, v in arg.iteritems():
+                if k == '$match':
+                    res = self._find(v)
+        cursor = MemCursor(res)
+        for arg in args[0]:
+            for k, v in arg.iteritems():
+                if k == '$sort':
+                    cursor = cursor.sort(v)
+                elif k == '$skip':
+                    cursor = cursor.skip(v)
+                elif k == '$limit':
+                    cursor = cursor.limit(v)
+        return cursor
+
+    def aggregate(self, *args, **kwargs):
+        return self._aggregate(*args, **kwargs)
+
     def _update(self, spec, document, check_keys=True):
         updated = False