Merge "apex, cperf: Adds cperf job to Apex"
authorTim Rozet <trozet@redhat.com>
Tue, 19 Jul 2016 15:17:45 +0000 (15:17 +0000)
committerGerrit Code Review <gerrit@172.30.200.206>
Tue, 19 Jul 2016 15:17:45 +0000 (15:17 +0000)
64 files changed:
docs/etc/conf.py [deleted file]
docs/etc/opnfv-logo.png [deleted file]
docs/etc/requirements.txt [deleted file]
jjb/apex/apex-upload-artifact.sh
jjb/armband/armband-ci-jobs.yml
jjb/armband/armband-download-artifact.sh
jjb/armband/armband-project-jobs.yml
jjb/armband/upload-artifacts.sh
jjb/bottlenecks/bottlenecks-project-jobs.yml
jjb/compass4nfv/compass-ci-jobs.yml
jjb/compass4nfv/compass-deploy.sh
jjb/compass4nfv/compass-project-jobs.yml
jjb/compass4nfv/compass-upload-artifact.sh
jjb/fastpathmetrics/fastpathmetrics.yml
jjb/fuel/fuel-ci-jobs.yml
jjb/fuel/fuel-deploy.sh
jjb/fuel/fuel-project-jobs.yml
jjb/fuel/fuel-verify-jobs.yml [new file with mode: 0644]
jjb/functest/functest-ci-jobs.yml
jjb/kvmfornfv/kvmfornfv.yml
jjb/multisite/multisite.yml [new file with mode: 0644]
jjb/opnfv/artifact-cleanup.yml [new file with mode: 0644]
jjb/opnfv/installer-params.yml
jjb/opnfv/opnfv-docker.yml
jjb/opnfv/opnfv-lint.yml
jjb/opnfv/slave-params.yml
jjb/parser/parser.yml
jjb/qtip/qtip-ci-jobs.yml
jjb/releng-macros.yaml
jjb/sandbox/basic.sh [new file with mode: 0755]
jjb/sandbox/build.sh [new file with mode: 0755]
jjb/sandbox/deploy.sh [new file with mode: 0755]
jjb/sandbox/functest.sh [new file with mode: 0755]
jjb/sandbox/merge.sh [new file with mode: 0755]
jjb/sandbox/promote.sh [new file with mode: 0755]
jjb/sandbox/sandbox-daily-jobs.yml [new file with mode: 0644]
jjb/sandbox/sandbox-merge-jobs.yml [new file with mode: 0644]
jjb/sandbox/sandbox-verify-jobs.yml [new file with mode: 0644]
jjb/sandbox/sandbox-weekly-jobs.yml [new file with mode: 0644]
jjb/sandbox/test.sh [new file with mode: 0755]
jjb/sandbox/yardstick.sh [new file with mode: 0755]
jjb/yardstick/yardstick-ci-jobs.yml
jjb/yardstick/yardstick-daily.sh
jjb/yardstick/yardstick-project-jobs.yml
utils/docs-build.sh [deleted file]
utils/gpg_import_key.sh [changed mode: 0644->0755]
utils/jenkins-jnlp-connect.sh
utils/retention_script.sh [new file with mode: 0755]
utils/test/reporting/functest/reporting-status.py
utils/test/reporting/functest/reporting-tempest.py
utils/test/reporting/functest/reporting-vims.py
utils/test/reporting/functest/reportingConf.py
utils/test/reporting/functest/reportingUtils.py
utils/test/reporting/functest/template/index-status-tmpl.html
utils/test/reporting/functest/testCase.py
utils/test/result_collection_api/update/README.md
utils/test/result_collection_api/update/templates/__init__.py [new file with mode: 0644]
utils/test/result_collection_api/update/templates/backup_mongodb.py [moved from utils/test/result_collection_api/update/backup.py with 100% similarity]
utils/test/result_collection_api/update/templates/changes_in_mongodb.py [moved from utils/test/result_collection_api/update/changes.py with 100% similarity]
utils/test/result_collection_api/update/templates/restore_mongodb.py [moved from utils/test/result_collection_api/update/restore.py with 100% similarity]
utils/test/result_collection_api/update/templates/rm_olds.sh [new file with mode: 0644]
utils/test/result_collection_api/update/templates/update_mongodb.py [moved from utils/test/result_collection_api/update/update.py with 97% similarity]
utils/test/result_collection_api/update/templates/utils.py [moved from utils/test/result_collection_api/update/utils.py with 100% similarity]
utils/test/result_collection_api/update/update.yml [new file with mode: 0644]

diff --git a/docs/etc/conf.py b/docs/etc/conf.py
deleted file mode 100644 (file)
index c4cbae7..0000000
+++ /dev/null
@@ -1,40 +0,0 @@
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-'''
-Base configuration file for building OPNFV docs
-
-You can override this configuration by putting 'conf.py' in the document
-directory (e.g. docs/how-to-use-docs/conf.py). If there is no 'conf.py'
-in the document directory, this file will be copied to that directory
-before the document builder jobs ('opnfv-docs-verify' and 'opnfv-docs-merge').
-
-You may need python package installation for new sphinx extension.
-Install python package with 'pip' in your machine and add the extension to
-the 'extensions' list below to test the documentation build locally.
-If you feel that your extensions would be useful for other projects too,
-we encourage you to propose a change in the releng repository.
-
-For further guidance see the https://wiki.opnfv.org/documentation/tools page.
-'''
-
-extensions = ['sphinxcontrib.httpdomain',
-              'sphinx.ext.autodoc',
-              'sphinx.ext.viewcode',
-              'sphinx.ext.napoleon']
-
-needs_sphinx = '1.3'
-master_doc = 'index'
-pygments_style = 'sphinx'
-
-html_use_index = False
-numfig = True
-html_logo = 'opnfv-logo.png'
-
-latex_domain_indices = False
-latex_logo = 'opnfv-logo.png'
diff --git a/docs/etc/opnfv-logo.png b/docs/etc/opnfv-logo.png
deleted file mode 100644 (file)
index 1519503..0000000
Binary files a/docs/etc/opnfv-logo.png and /dev/null differ
diff --git a/docs/etc/requirements.txt b/docs/etc/requirements.txt
deleted file mode 100644 (file)
index 4b18507..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-Sphinx==1.3.1
-doc8
-docutils
-setuptools
-six
-sphinxcontrib-httpdomain
index d45c7c0..0dd112b 100755 (executable)
@@ -35,7 +35,7 @@ done
 signiso () {
 time gpg2 -vvv --batch --yes --no-tty \
   --default-key opnfv-helpdesk@rt.linuxfoundation.org  \
-  --passphrase notreallysecure \
+  --passphrase besteffort \
   --detach-sig $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso
 
 gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig 
index 53c652e..9d7c198 100644 (file)
 #--------------------------------
 #        master
 #--------------------------------
-# No master deploys for now
-#        - arm-pod1:
-#            <<: *master
+    pod:
+        - arm-pod1:
+            <<: *master
+        - arm-pod2:
+            <<: *master
 #--------------------------------
 #       scenarios
 #--------------------------------
     scenario:
         # HA scenarios
+        - 'os-nosdn-nofeature-ha':
+            auto-trigger-name: 'daily-trigger-disabled'
         - 'os-odl_l2-nofeature-ha':
             auto-trigger-name: 'armband-{scenario}-{pod}-{stream}-trigger'
+        - 'os-odl_l3-nofeature-ha':
+            auto-trigger-name: 'daily-trigger-disabled'
+        - 'os-odl_l2-bgpvpn-ha':
+            auto-trigger-name: 'daily-trigger-disabled'
 
         # NOHA scenarios
         - 'os-odl_l2-nofeature-noha':
     name: 'armband-os-odl_l2-nofeature-ha-arm-pod1-master-trigger'
     triggers:
         - timed: ''
-
 #---------------------------------------------------------------
 # Enea Armband POD 1 Triggers running against brahmaputra branch
 #---------------------------------------------------------------
 - trigger:
     name: 'armband-os-odl_l2-nofeature-ha-arm-pod1-brahmaputra-trigger'
     triggers:
-        - timed: '0 18 * * *'
+        - timed: ''
+#----------------------------------------------------------
+# Enea Armband POD 2 Triggers running against master branch
+#----------------------------------------------------------
+# No triggers for master for now
+- trigger:
+    name: 'armband-os-odl_l2-nofeature-ha-arm-pod2-master-trigger'
+    triggers:
+        - timed: ''
 #---------------------------------------------------------------
 # Enea Armband POD 2 Triggers running against brahmaputra branch
 #---------------------------------------------------------------
index 18b55d7..7d01c09 100755 (executable)
@@ -10,6 +10,9 @@
 set -o errexit
 set -o pipefail
 
+# Configurable environment variables:
+# ISOSTORE (/iso_mount/opnfv_ci)
+
 if [[ "$JOB_NAME" =~ "merge" ]]; then
     echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties"
     # get the properties file for the Armband Fuel ISO built for a merged change
@@ -33,7 +36,7 @@ ISO_FILE=${WORKSPACE}/opnfv.iso
 # using ISOs for verify & merge jobs from local storage will be enabled later
 if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
     # check if we already have the ISO to avoid redownload
-    ISOSTORE="/iso_mount/opnfv_ci/${GIT_BRANCH##*/}"
+    ISOSTORE=${ISOSTORE:-/iso_mount/opnfv_ci}/${GIT_BRANCH##*/}
     if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then
         echo "ISO exists locally. Skipping the download and using the file from ISO store"
         ln -s $ISOSTORE/$OPNFV_ARTIFACT ${ISO_FILE}
index 732a9ea..764a5d4 100644 (file)
@@ -76,7 +76,7 @@
     parameters:
         - project-parameter:
             project: '{project}'
-        - 'arm-build1-defaults'
+        - 'opnfv-build-arm-defaults'
         - armband-project-parameter:
             gs-pathname: '{gs-pathname}'
 
index f4e84e9..7059ac3 100755 (executable)
@@ -9,6 +9,9 @@
 ##############################################################################
 set -o pipefail
 
+# configurable environment variables:
+# ISOSTORE (/iso_mount/opnfv_ci)
+
 # check if we built something
 if [ -f $WORKSPACE/.noupload ]; then
     echo "Nothing new to upload. Exiting."
@@ -19,11 +22,15 @@ fi
 # source the opnfv.properties to get ARTIFACT_VERSION
 source $WORKSPACE/opnfv.properties
 
+
 # storing ISOs for verify & merge jobs will be done once we get the disk array
 if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
     # store ISO locally on NFS first
-    ISOSTORE="/home/jenkins/opnfv/iso_store"
+    ISOSTORE=${ISOSTORE:-/iso_mount/opnfv_ci}
     if [[ -d "$ISOSTORE" ]]; then
+        ISOSTORE=${ISOSTORE}/${GIT_BRANCH##*/}
+        mkdir -p $ISOSTORE
+
         # remove all but most recent 3 ISOs first to keep iso_mount clean & tidy
         cd $ISOSTORE
         ls -tp | grep -v '/' | tail -n +4 | xargs -d '\n' /bin/rm -f --
index ea000d8..28b49bc 100644 (file)
     parameters:
         - project-parameter:
             project: '{project}'
-        - 'ericsson-build-defaults'
+        - 'opnfv-build-ubuntu-defaults'
         - bottlenecks-parameter:
             gs-packagepath: '{gs-packagepath}'
 
index 52d6785..6bfc737 100644 (file)
@@ -60,6 +60,9 @@
         - 'os-ocl-nofeature-ha':
             disabled: false
             auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+        - 'os-onos-sfc-ha':
+            disabled: false
+            auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
 
     jobs:
         - 'compass-{scenario}-{pod}-daily-{stream}'
             choices:
                 - 'mitaka'
                 - 'liberty'
+        - choice:
+            name: COMPASS_OS_VERSION_OPTION
+            choices:
+                - ''
+                - 'xenial'
 
 ########################
 # trigger macros
     name: 'compass-os-ocl-nofeature-ha-huawei-pod2-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'compass-os-onos-sfc-ha-huawei-pod2-master-trigger'
+    triggers:
+        - timed: ''
 
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-baremetal-master-trigger'
     name: 'compass-os-ocl-nofeature-ha-baremetal-master-trigger'
     triggers:
         - timed: '0 9 * * *'
+- trigger:
+    name: 'compass-os-onos-sfc-ha-baremetal-master-trigger'
+    triggers:
+        - timed: ''
 
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-baremetal-brahmaputra-trigger'
     name: 'compass-os-ocl-nofeature-ha-baremetal-brahmaputra-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'compass-os-onos-sfc-ha-baremetal-brahmaputra-trigger'
+    triggers:
+        - timed: ''
 
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-virtual-master-trigger'
     name: 'compass-os-ocl-nofeature-ha-virtual-master-trigger'
     triggers:
         - timed: '0 9 * * *'
+- trigger:
+    name: 'compass-os-onos-sfc-ha-virtual-master-trigger'
+    triggers:
+        - timed: ''
+
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-virtual-brahmaputra-trigger'
     triggers:
     name: 'compass-os-ocl-nofeature-ha-virtual-brahmaputra-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'compass-os-onos-sfc-ha-virtual-brahmaputra-trigger'
+    triggers:
+        - timed: ''
index 7d09d53..0259849 100644 (file)
@@ -27,6 +27,8 @@ export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
 
 if [[ "${DEPLOY_SCENARIO}" =~ "-ocl" ]]; then
     export NETWORK_CONF_FILE=network_ocl.yml
+elif [[ "${DEPLOY_SCENARIO}" =~ "-onos" ]]; then
+    export NETWORK_CONF_FILE=network_onos.yml
 else
     export NETWORK_CONF_FILE=network.yml
 fi
@@ -44,6 +46,9 @@ cd $WORKSPACE
 
 export OS_VERSION=${COMPASS_OS_VERSION}
 export OPENSTACK_VERSION=${COMPASS_OPENSTACK_VERSION}
+if [[ "${COMPASS_OS_VERSION_OPTION}" = "xenial" ]] && [[ "${OPENSTACK_VERSION}" = "mitaka" ]]; then
+    export OPENSTACK_VERSION=${OPENSTACK_VERSION}_${OS_VERSION}
+fi
 ./deploy.sh --dha ${DHA_CONF} --network ${NETWORK_CONF}
 if [ $? -ne 0 ]; then
     echo "depolyment failed!"
index da28687..6e10e2f 100644 (file)
         - compass-project-parameter:
             installer: '{installer}'
             gs-pathname: '{gs-pathname}'
-        - 'ericsson-build-defaults'
+        - 'opnfv-build-ubuntu-defaults'
         - '{installer}-defaults'
 
     scm:
index 34b1db9..73b7f07 100644 (file)
@@ -10,6 +10,27 @@ echo
 # source the opnfv.properties to get ARTIFACT_VERSION
 source $BUILD_DIRECTORY/opnfv.properties
 
+# clone releng repository
+echo "Cloning releng repository..."
+[ -d releng ] && rm -rf releng
+git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
+#this is where we import the siging key
+if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
+  source $WORKSPACE/releng/utils/gpg_import_key.sh
+fi
+
+signiso () {
+time gpg2 -vvv --batch --yes --no-tty \
+  --default-key opnfv-helpdesk@rt.linuxfoundation.org  \
+  --passphrase besteffort \
+  --detach-sig $BUILD_DIRECTORY/compass.iso
+
+gsutil cp $BUILD_DIRECTORY/compass.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig
+echo "ISO signature Upload Complete!"
+}
+
+signiso
+
 # upload artifact and additional files to google storage
 gsutil cp $BUILD_DIRECTORY/compass.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
 gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
index ad1b601..40549e3 100644 (file)
             choosing-strategy: 'default'
 
     triggers:
-         - pollscm:
-             cron: '@midnight'
+         - timed: '@midnight'
 
     builders:
         - shell: |
index de7ca6a..e328345 100644 (file)
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '0 0 * * *'
+        - timed: '0 23 * * *'
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '0 4 * * *'
+        - timed: '0 2 * * *'
 - trigger:
     name: 'fuel-os-onos-sfc-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '0 8 * * *'
+        - timed: '0 5 * * *'
 - trigger:
     name: 'fuel-os-onos-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '0 12 * * *'
+        - timed: '0 8 * * *'
 - trigger:
     name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '0 16 * * *'
+        - timed: '0 11 * * *'
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 14 * * *'
 - trigger:
     name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-master-trigger'
     triggers:
-        - timed: '15 9 * * *'
+        - timed: '0 10 * * *'
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-master-trigger'
     triggers:
index 14d48e7..c300417 100755 (executable)
@@ -7,7 +7,6 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-set -o errexit
 set -o nounset
 set -o pipefail
 
@@ -57,10 +56,16 @@ chmod a+x $TMPDIR
 # clone the securedlab repo
 cd $WORKSPACE
 echo "Cloning securedlab repo ${GIT_BRANCH##origin/}"
-git clone ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab --quiet --branch ${GIT_BRANCH##origin/}
+git clone ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab --quiet \
+    --branch ${GIT_BRANCH##origin/}
+
+# log file name
+FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
 
 # construct the command
-DEPLOY_COMMAND="sudo $WORKSPACE/ci/deploy.sh -b file://$WORKSPACE/securedlab -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://$WORKSPACE/opnfv.iso -H -B $BRIDGE -S $TMPDIR"
+DEPLOY_COMMAND="sudo $WORKSPACE/ci/deploy.sh -b file://$WORKSPACE/securedlab \
+    -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://$WORKSPACE/opnfv.iso \
+    -H -B $BRIDGE -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME"
 
 # log info to console
 echo "Deployment parameters"
@@ -80,10 +85,26 @@ echo "$DEPLOY_COMMAND"
 echo
 
 $DEPLOY_COMMAND
+exit_code=$?
 
 echo
 echo "--------------------------------------------------------"
-echo "Deployment is done successfully!"
+echo "Deployment is done!"
+
+# upload logs for baremetal deployments
+# work with virtual deployments is still going on so we skip that for the timebeing
+if [[ "$JOB_NAME" =~ "baremetal-daily" ]]; then
+    echo "Uploading deployment logs"
+    gsutil cp $WORKSPACE/$FUEL_LOG_FILENAME gs://$GS_URL/logs/$FUEL_LOG_FILENAME > /dev/null 2>&1
+    echo "Logs are available as http://$GS_URL/logs/$FUEL_LOG_FILENAME"
+fi
+
+if [[ $exit_code -ne 0 ]]; then
+    echo "Deployment failed!"
+    exit $exit_code
+else
+    echo "Deployment is successful!"
+fi
 
 # Quick and dirty fix for SFC scenatio - will be fixed properly post-release
 if [[ ! "$DEPLOY_SCENARIO" =~ "os-odl_l2-sfc" ]]; then
index 67343fb..c160fb8 100644 (file)
@@ -20,7 +20,6 @@
 
     jobs:
         - 'fuel-build-daily-{stream}'
-        - 'fuel-verify-build-{stream}'
         - 'fuel-merge-build-{stream}'
         - 'fuel-merge-deploy-virtual-{stream}'
         - 'fuel-deploy-generic-daily-{stream}'
@@ -43,7 +42,7 @@
     parameters:
         - project-parameter:
             project: '{project}'
-        - 'ericsson-build-defaults'
+        - 'opnfv-build-ubuntu-defaults'
         - '{installer}-defaults'
         - choice:
             name: FORCE_BUILD
         - email:
             recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com
 
-- job-template:
-    name: 'fuel-verify-build-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: true
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-        - gerrit-parameter:
-            branch: '{branch}'
-        - 'ericsson-build-defaults'
-        - '{installer}-defaults'
-        - fuel-project-parameter:
-            gs-pathname: '{gs-pathname}'
-
-    scm:
-        - gerrit-trigger-scm:
-            credentials-id: '{ssh-credentials}'
-            refspec: '$GERRIT_REFSPEC'
-            choosing-strategy: 'gerrit'
-
-    wrappers:
-        - ssh-agent-credentials:
-            users:
-                - '{ssh-credentials}'
-        - timeout:
-            timeout: 360
-            fail: true
-
-    triggers:
-        - gerrit:
-            trigger-on:
-                - patchset-created-event:
-                    exclude-drafts: 'false'
-                    exclude-trivial-rebase: 'false'
-                    exclude-no-code-change: 'false'
-                - draft-published-event
-                - comment-added-contains-event:
-                    comment-contains-value: 'recheck'
-                - comment-added-contains-event:
-                    comment-contains-value: 'reverify'
-            projects:
-              - project-compare-type: 'ANT'
-                project-pattern: '{project}'
-                branches:
-                  - branch-compare-type: 'ANT'
-                    branch-pattern: '**/{branch}'
-                file-paths:
-                  - compare-type: ANT
-                    pattern: 'ci/**'
-                  - compare-type: ANT
-                    pattern: 'build/**'
-                  - compare-type: ANT
-                    pattern: 'deploy/**'
-                forbidden-file-paths:
-                  - compare-type: ANT
-                    pattern: 'docs/**'
-            readable-message: true
-
-    builders:
-        - shell:
-            !include-raw-escape: ./fuel-build.sh
-        - shell:
-            !include-raw-escape: ./fuel-workspace-cleanup.sh
-
 - job-template:
     name: 'fuel-merge-build-{stream}'
 
             project: '{project}'
         - gerrit-parameter:
             branch: '{branch}'
-        - 'ericsson-build-defaults'
+        - 'opnfv-build-ubuntu-defaults'
         - '{installer}-defaults'
         - fuel-project-parameter:
             gs-pathname: '{gs-pathname}'
diff --git a/jjb/fuel/fuel-verify-jobs.yml b/jjb/fuel/fuel-verify-jobs.yml
new file mode 100644 (file)
index 0000000..6f88981
--- /dev/null
@@ -0,0 +1,273 @@
+- project:
+    name: 'fuel-verify-jobs'
+
+    project: 'fuel'
+
+    installer: 'fuel'
+#####################################
+# branch definitions
+#####################################
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+            disabled: false
+        - colorado:
+            branch: 'stable/{stream}'
+            gs-pathname: '/{stream}'
+            disabled: true
+#####################################
+# patch verification phases
+#####################################
+    phase:
+        - 'basic':
+            slave-label: 'opnfv-build-ubuntu'
+        - 'build':
+            slave-label: 'opnfv-build-ubuntu'
+        - 'deploy-virtual':
+            slave-label: 'fuel-virtual'
+        - 'smoke-test':
+            slave-label: 'fuel-virtual'
+#####################################
+# jobs
+#####################################
+    jobs:
+        - 'fuel-verify-{stream}'
+        - 'fuel-verify-{phase}-{stream}'
+#####################################
+# job templates
+#####################################
+- job-template:
+    name: 'fuel-verify-{stream}'
+
+    project-type: multijob
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 4
+            max-per-node: 1
+            option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - 'fuel-verify-master'
+                - 'fuel-verify-colorado'
+            block-level: 'NODE'
+
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: '$GERRIT_REFSPEC'
+            choosing-strategy: 'gerrit'
+
+    wrappers:
+        - ssh-agent-credentials:
+            users:
+                - '{ssh-credentials}'
+        - timeout:
+            timeout: 360
+            fail: true
+
+    triggers:
+        - gerrit:
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                file-paths:
+                  - compare-type: ANT
+                    pattern: 'ci/**'
+                  - compare-type: ANT
+                    pattern: 'build/**'
+                  - compare-type: ANT
+                    pattern: 'deploy/**'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'docs/**'
+            readable-message: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - 'fuel-virtual-defaults'
+        - 'fuel-verify-defaults':
+            gs-pathname: '{gs-pathname}'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - multijob:
+            name: basic
+            condition: SUCCESSFUL
+            projects:
+                - name: 'fuel-verify-basic-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: build
+            condition: SUCCESSFUL
+            projects:
+                - name: 'fuel-verify-build-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: deploy-virtual
+            condition: SUCCESSFUL
+            projects:
+                - name: 'fuel-verify-deploy-virtual-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                  node-parameters: true
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: smoke-test
+            condition: SUCCESSFUL
+            projects:
+                - name: 'fuel-verify-smoke-test-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                  node-parameters: true
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+
+- job-template:
+    name: 'fuel-verify-{phase}-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 4
+            max-per-node: 1
+            option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - 'fuel-verify-deploy-.*'
+                - 'fuel-verify-test-.*'
+            block-level: 'NODE'
+
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: '$GERRIT_REFSPEC'
+            choosing-strategy: 'gerrit'
+
+    wrappers:
+        - ssh-agent-credentials:
+            users:
+                - '{ssh-credentials}'
+        - timeout:
+            timeout: 360
+            fail: true
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - '{slave-label}-defaults'
+        - '{installer}-defaults'
+        - 'fuel-verify-defaults':
+            gs-pathname: '{gs-pathname}'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - '{project}-verify-{phase}-macro'
+#####################################
+# builder macros
+#####################################
+- builder:
+    name: 'fuel-verify-basic-macro'
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            echo "Not activated!"
+
+- builder:
+    name: 'fuel-verify-build-macro'
+    builders:
+        - shell:
+            !include-raw-escape: ./fuel-build.sh
+        - shell:
+            !include-raw-escape: ./fuel-workspace-cleanup.sh
+
+- builder:
+    name: 'fuel-verify-deploy-virtual-macro'
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            echo "Not activated!"
+
+- builder:
+    name: 'fuel-verify-smoke-test-macro'
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            echo "Not activated!"
+#####################################
+# parameter macros
+#####################################
+- parameter:
+    name: 'fuel-verify-defaults'
+    parameters:
+        - string:
+            name: BUILD_DIRECTORY
+            default: $WORKSPACE/build_output
+            description: "Directory where the build artifact will be located upon the completion of the build."
+        - string:
+            name: CACHE_DIRECTORY
+            default: $HOME/opnfv/cache/$INSTALLER_TYPE
+            description: "Directory where the cache to be used during the build is located."
+        - string:
+            name: GS_URL
+            default: artifacts.opnfv.org/$PROJECT{gs-pathname}
+            description: "URL to Google Storage."
index f9cf011..727419d 100644 (file)
             slave-label: fuel-virtual
             installer: fuel
             <<: *brahmaputra
-
-# just in case if things go wrong
-        - lf-pod2:
-            slave-label: fuel-baremetal
-            installer: fuel
-            <<: *master
-
 # joid CI PODs
         - baremetal:
             slave-label: joid-baremetal
index b042c56..aa8b645 100644 (file)
@@ -23,7 +23,7 @@
             project: '{project}'
         - gerrit-parameter:
             branch: '{branch}'
-        - 'ericsson-build-defaults'
+        - 'opnfv-build-ubuntu-defaults'
 
     scm:
         - gerrit-trigger-scm:
@@ -62,7 +62,7 @@
             project: '{project}'
         - gerrit-parameter:
             branch: '{branch}'
-        - 'ericsson-build-defaults'
+        - 'opnfv-build-ubuntu-defaults'
 
     scm:
         - gerrit-trigger-scm:
diff --git a/jjb/multisite/multisite.yml b/jjb/multisite/multisite.yml
new file mode 100644 (file)
index 0000000..f92a4c1
--- /dev/null
@@ -0,0 +1,122 @@
+###################################################
+# All the jobs except verify have been removed!
+# They will only be enabled on request by projects!
+###################################################
+- project:
+    name: multisite
+
+    project: '{name}'
+
+    jobs:
+        - 'multisite-verify-{stream}'
+        - 'multisite-kingbird-daily-{stream}'
+
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+            disabled: false
+
+- job-template:
+    name: 'multisite-verify-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: '$GERRIT_REFSPEC'
+            choosing-strategy: 'gerrit'
+
+    triggers:
+        - gerrit:
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'docs/**|.gitignore'
+
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            echo "Hello World"
+
+- job-template:
+    name: 'multisite-kingbird-daily-{stream}'
+
+    project-type: freestyle
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 3
+            max-per-node: 2
+            option: 'project'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - string:
+            name: KINGBIRD_LOG_FILE
+            default: $WORKSPACE/kingbird.log
+        - 'intel-virtual6-defaults'
+
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: ''
+            choosing-strategy: 'default'
+
+    triggers:
+         - timed: '@midnight'
+
+    builders:
+        - 'multisite-kingbird-deploy'
+        - 'multisite-kingbird-log-upload'
+########################
+# builder macros
+########################
+- builder:
+    name: 'multisite-kingbird-deploy'
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            $WORKSPACE/tools/kingbird/deploy.sh
+- builder:
+    name: 'multisite-kingbird-log-upload'
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            echo "Here is where we upload kingbird logs to artifact repo"
+            echo "We just check the existence of log file"
+            ls -al $KINGBIRD_LOG_FILE
diff --git a/jjb/opnfv/artifact-cleanup.yml b/jjb/opnfv/artifact-cleanup.yml
new file mode 100644 (file)
index 0000000..b0f8191
--- /dev/null
@@ -0,0 +1,42 @@
+- project:
+    name: artifact-cleanup
+
+    project: 'releng'
+
+    jobs:
+        - 'artifact-cleanup-daily-{stream}'
+
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+
+
+- job-template:
+    name: 'artifact-cleanup-daily-{stream}'
+
+    # Job template for daily builders
+    #
+    # Required Variables:
+    #     stream:    branch with - in place of / (eg. stable)
+    #     branch:    branch (eg. stable)
+    node: master
+
+    disabled: false
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+
+    scm:
+        - git-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: ''
+            branch: '{branch}'
+
+    triggers:
+        - timed: 'H H * * *'
+
+    builders:
+        - shell: |
+            $WORKSPACE/utils/retention_script.sh
index f95d79f..60fee92 100644 (file)
             name: CPU_ARCHITECTURE
             default: 'amd64'
             description: "CPU Architecture to use for Ubuntu distro "
+
+- parameter:
+    name: 'sandbox-defaults'
+    parameters:
+        - string:
+            name: INSTALLER_IP
+            default: '10.20.0.2'
+            description: 'IP of the installer'
+        - string:
+            name: INSTALLER_TYPE
+            default: sandbox
+            description: 'Installer used for deploying OPNFV on this POD'
+        - string:
+            name: EXTERNAL_NETWORK
+            default: 'admin_floating_net'
+            description: 'external network for test'
index 6b49242..6b4861c 100644 (file)
@@ -34,7 +34,7 @@
     parameters:
         - project-parameter:
             project: '{project}'
-        - 'ericsson-build-defaults'
+        - 'opnfv-build-ubuntu-defaults'
         - string:
             name: PUSH_IMAGE
             default: "true"
@@ -77,7 +77,7 @@
     parameters:
         - project-parameter:
             project: 'yardstick'
-        - 'ericsson-build-defaults'
+        - 'opnfv-build-ubuntu-defaults'
         - string:
             name: PUSH_IMAGE
             default: "true"
index aeea34e..4f3f7ac 100644 (file)
@@ -51,7 +51,7 @@
                     comment-contains-value: 'reverify'
             projects:
               - project-compare-type: 'REG_EXP'
-                project-pattern: 'functest'
+                project-pattern: 'functest|sdnvpn'
                 branches:
                   - branch-compare-type: 'ANT'
                     branch-pattern: '**/{branch}'
index e5313c8..7b99830 100644 (file)
@@ -1,13 +1,14 @@
+#####################################################
+# Parameters for slaves using old labels
+# This will be cleaned up once the new job structure and
+# use of the new labels are in place
+#####################################################
 - parameter:
     name: 'apex-daily-master-defaults'
     parameters:
         - label:
             name: SLAVE_LABEL
             default: 'apex-daily-master'
-        - string:
-            name: INSTALLER_VERSION
-            default: latest
-            description: 'Version of the installer to deploy'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             name: SLAVE_LABEL
             default: 'apex-verify-master'
         - string:
-            name: INSTALLER_VERSION
-            default: latest
-            description: 'Version of the installer to deploy'
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
+        - string:
+            name: SSH_KEY
+            default: /root/.ssh/id_rsa
+            description: 'SSH key to use for Apex'
+- parameter:
+    name: 'lf-pod1-defaults'
+    parameters:
+        - node:
+            name: SLAVE_NAME
+            description: 'Slave name on Jenkins'
+            allowed-slaves:
+                - lf-pod1
+            default-slaves:
+                - lf-pod1
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             name: SSH_KEY
             default: /root/.ssh/id_rsa
             description: 'SSH key to use for Apex'
+#####################################################
+# Parameters for CI baremetal PODs
+#####################################################
 - parameter:
-    name: 'fuel-baremetal-defaults'
+    name: 'apex-baremetal-defaults'
     parameters:
         - label:
             name: SLAVE_LABEL
-            default: 'fuel-baremetal'
+            default: 'apex-baremetal'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
+        - string:
+            name: SSH_KEY
+            default: /root/.ssh/id_rsa
+            description: 'SSH key to use for Apex'
 - parameter:
-    name: 'fuel-virtual-defaults'
+    name: 'compass-baremetal-defaults'
     parameters:
         - label:
             name: SLAVE_LABEL
-            default: 'fuel-virtual'
+            default: 'compass-baremetal'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'fuel-baremetal-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'fuel-baremetal'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             name: EXTERNAL_NETWORK
             default: ext-net;flat;10.5.15.5;10.5.15.250;10.5.15.254;10.5.15.0/24
             description: "External network to create for pod5 (name;type;first ip;last ip; gateway;network)"
+#####################################################
+# Parameters for CI virtual PODs
+#####################################################
 - parameter:
-    name: 'joid-virtual-defaults'
+    name: 'apex-virtual-defaults'
     parameters:
         - label:
             name: SLAVE_LABEL
-            default: 'joid-virtual'
+            default: 'apex-virtual'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-- parameter:
-    name: 'compass-baremetal-defaults'
-    parameters:
-        - label:
-            name: SLAVE_LABEL
-            default: 'compass-baremetal'
         - string:
-            name: GIT_BASE
-            default: https://gerrit.opnfv.org/gerrit/$PROJECT
-            description: 'Git URL to use on this Jenkins Slave'
+            name: SSH_KEY
+            default: /root/.ssh/id_rsa
+            description: 'SSH key to use for Apex'
 - parameter:
     name: 'compass-virtual-defaults'
     parameters:
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
 - parameter:
-    name: 'lf-pod1-defaults'
+    name: 'fuel-virtual-defaults'
     parameters:
-        - node:
-            name: SLAVE_NAME
-            description: 'Slave name on Jenkins'
-            allowed-slaves:
-                - lf-pod1
-            default-slaves:
-                - lf-pod1
-        - string:
-            name: INSTALLER_VERSION
-            default: latest
-            description: 'Version of the installer to deploy'
+        - label:
+            name: SLAVE_LABEL
+            default: 'fuel-virtual'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-        - string:
-            name: SSH_KEY
-            default: /root/.ssh/id_rsa
-            description: 'SSH key to use for Apex'
-- parameter:
-    name: 'lf-pod2-defaults'
-    parameters:
-        - node:
-            name: SLAVE_NAME
-            description: 'Slave name on Jenkins'
-            allowed-slaves:
-                - lf-pod2
-            default-slaves:
-                - lf-pod2
-        - string:
-            name: GIT_BASE
-            default: ssh://gerrit.opnfv.org:29418/$PROJECT
-            description: 'Git URL to use on this Jenkins Slave'
 - parameter:
-    name: 'ericsson-pod1-defaults'
+    name: 'joid-virtual-defaults'
     parameters:
-        - node:
-            name: SLAVE_NAME
-            description: 'Slave name on Jenkins'
-            allowed-slaves:
-                - ericsson-pod1
-            default-slaves:
-                - ericsson-pod1
+        - label:
+            name: SLAVE_LABEL
+            default: 'joid-virtual'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
+#####################################################
+# Parameters for build slaves
+#####################################################
 - parameter:
-    name: 'ericsson-pod2-defaults'
+    name: 'opnfv-build-arm-defaults'
     parameters:
-        - node:
-            name: SLAVE_NAME
-            description: 'Slave name on Jenkins'
-            allowed-slaves:
-                - ericsson-pod2
-            default-slaves:
-                - ericsson-pod2
+        - label:
+            name: SLAVE_LABEL
+            default: 'opnfv-build-arm'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-
+        - string:
+            name: BUILD_DIRECTORY
+            default: $WORKSPACE/build_output
+            description: "Directory where the build artifact will be located upon the completion of the build."
 - parameter:
-    name: 'intel-pod2-defaults'
+    name: 'opnfv-build-centos-defaults'
     parameters:
-        - node:
-            name: SLAVE_NAME
-            description: 'Slave name on Jenkins'
-            allowed-slaves:
-                - intel-pod2
-            default-slaves:
-                - intel-pod2
-        - string:
-            name: INSTALLER_VERSION
-            default: stable
-            description: 'Version of the installer to deploy'
+        - label:
+            name: SLAVE_LABEL
+            default: 'opnfv-build-centos'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
         - string:
-            name: SSH_KEY
-            default: /root/.ssh/id_rsa
-            description: 'SSH key to use for Apex'
-
+            name: BUILD_DIRECTORY
+            default: $WORKSPACE/build_output
+            description: "Directory where the build artifact will be located upon the completion of the build."
 - parameter:
-    name: 'intel-pod3-defaults'
+    name: 'opnfv-build-ubuntu-defaults'
     parameters:
-        - node:
-            name: SLAVE_NAME
-            description: 'Slave name on Jenkins'
-            allowed-slaves:
-                - intel-pod3
-            default-slaves:
-                - intel-pod3
+        - label:
+            name: SLAVE_LABEL
+            default: 'opnfv-build-ubuntu'
+            description: 'Slave label on Jenkins'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-
+        - string:
+            name: BUILD_DIRECTORY
+            default: $WORKSPACE/build_output
+            description: "Directory where the build artifact will be located upon the completion of the build."
 - parameter:
-    name: 'intel-pod5-defaults'
+    name: 'huawei-build-defaults'
     parameters:
         - node:
             name: SLAVE_NAME
             description: 'Slave name on Jenkins'
             allowed-slaves:
-                - intel-pod5
+                - huawei-build
             default-slaves:
-                - intel-pod5
-        - string:
-            name: INSTALLER_VERSION
-            default: stable
-            description: 'Version of the installer to deploy'
+                - huawei-build
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-        - string:
-            name: CEPH_DISKS
-            default: /srv
-            description: "Disks to use by ceph (comma separated list)"
-        - string:
-            name: EXTERNAL_NETWORK
-            default: ext-net;flat;10.5.15.5;10.5.15.250;10.5.15.254;10.5.15.0/24
-            description: "External network to create for pod5 (name;type;first ip;last ip; gateway;network)"
-
+#####################################################
+# Parameters for none-CI PODs
+#####################################################
 - parameter:
-    name: 'intel-pod6-defaults'
+    name: 'ericsson-pod1-defaults'
     parameters:
         - node:
             name: SLAVE_NAME
             description: 'Slave name on Jenkins'
             allowed-slaves:
-                - intel-pod6
+                - ericsson-pod1
             default-slaves:
-                - intel-pod6
-        - string:
-            name: INSTALLER_VERSION
-            default: latest
-            description: 'Version of the installer to deploy'
+                - ericsson-pod1
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-        - string:
-            name: CEPH_DISKS
-            default: /srv
-            description: "Disks to use by ceph (comma separated list)"
-        - string:
-            name: EXTERNAL_NETWORK
-            default: ext-net;flat;10.6.15.5;10.6.15.250;10.6.15.254;10.6.15.0/24
-            description: "External network to create for pod6 (name;type;first ip;last ip; gateway;network)"
-
 - parameter:
-    name: 'intel-pod7-defaults'
+    name: 'intel-pod2-defaults'
     parameters:
         - node:
             name: SLAVE_NAME
             description: 'Slave name on Jenkins'
             allowed-slaves:
-                - intel-pod7
+                - intel-pod2
             default-slaves:
-                - intel-pod7
-        - string:
-            name: INSTALLER_VERSION
-            default: latest
-            description: 'Version of the installer to deploy'
+                - intel-pod2
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             name: SSH_KEY
             default: /root/.ssh/id_rsa
             description: 'SSH key to use for Apex'
-
 - parameter:
-    name: 'intel-pod8-defaults'
-    parameters:
-        - node:
-            name: SLAVE_NAME
-            description: 'Slave name on Jenkins'
-            allowed-slaves:
-                - intel-pod8
-            default-slaves:
-                - intel-pod8
-        - string:
-            name: INSTALLER_VERSION
-            default: latest
-            description: 'Version of the installer to deploy'
-        - string:
-            name: GIT_BASE
-            default: https://gerrit.opnfv.org/gerrit/$PROJECT
-            description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
-    name: 'huawei-build-defaults'
-    parameters:
-        - node:
-            name: SLAVE_NAME
-            description: 'Slave name on Jenkins'
-            allowed-slaves:
-                - huawei-build
-            default-slaves:
-                - huawei-build
-        - string:
-            name: INSTALLER_VERSION
-            default: stable
-            description: 'Version of the installer to deploy'
-        - string:
-            name: GIT_BASE
-            default: https://gerrit.opnfv.org/gerrit/$PROJECT
-            description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
-    name: 'huawei-pod1-defaults'
+    name: 'intel-pod3-defaults'
     parameters:
         - node:
             name: SLAVE_NAME
             description: 'Slave name on Jenkins'
             allowed-slaves:
-                - huawei-pod1
+                - intel-pod3
             default-slaves:
-                - huawei-pod1
-        - string:
-            name: INSTALLER_VERSION
-            default: stable
-            description: 'Version of the installer to deploy'
+                - intel-pod3
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-
 - parameter:
     name: 'huawei-pod2-defaults'
     parameters:
                 - huawei-pod2
             default-slaves:
                 - huawei-pod2
-        - string:
-            name: INSTALLER_VERSION
-            default: stable
-            description: 'Version of the installer to deploy'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-
 - parameter:
     name: 'huawei-pod3-defaults'
     parameters:
         - label:
             name: SLAVE_LABEL
             default: 'huawei-test'
-        - string:
-            name: INSTALLER_VERSION
-            default: stable
-            description: 'Version of the installer to deploy'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-
 - parameter:
     name: 'huawei-pod4-defaults'
     parameters:
         - label:
             name: SLAVE_LABEL
             default: 'huawei-test'
-        - string:
-            name: INSTALLER_VERSION
-            default: stable
-            description: 'Version of the installer to deploy'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-
 - parameter:
     name: 'juniper-pod1-defaults'
     parameters:
                 - juniper-pod1
             default-slaves:
                 - juniper-pod1
-        - string:
-            name: INSTALLER_VERSION
-            default: latest
-            description: 'Version of the installer to deploy'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             name: CEPH_DISKS
             default: /srv
             description: "Disks to use by ceph (comma separated list)"
-
 - parameter:
     name: 'orange-pod2-defaults'
     parameters:
                 - orange-pod2
             default-slaves:
                 - orange-pod2
-        - string:
-            name: INSTALLER_VERSION
-            default: latest
-            description: 'Version of the installer to deploy'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             name: EXTERNAL_NETWORK
             default: ext-net;flat;161.105.231.2;161.105.231.62;161.105.231.1;161.105.231.0/26
             description: "External network to create (name;type;first ip;last ip; gateway;network)"
-
 - parameter:
     name: 'orange-pod5-defaults'
     parameters:
                 - orange-pod5
             default-slaves:
                 - orange-pod5
-        - string:
-            name: INSTALLER_VERSION
-            default: latest
-            description: 'Version of the installer to deploy'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
-    name: 'ericsson-build-defaults'
-    parameters:
-        - label:
-            name: SLAVE_LABEL
-            default: 'ericsson-build'
-        - string:
-            name: GIT_BASE
-            default: https://gerrit.opnfv.org/gerrit/$PROJECT
-            description: 'Git URL to use on these Jenkins Slaves'
-
-- parameter:
-    name: 'intel-virtual2-defaults'
-    parameters:
-        - node:
-            name: SLAVE_NAME
-            description: 'Slave name on Jenkins'
-            allowed-slaves:
-                - intel-virtual2
-            default-slaves:
-                - intel-virtual2
-        - string:
-            name: INSTALLER_VERSION
-            default: latest
-            description: 'Version of the installer to deploy'
-        - string:
-            name: GIT_BASE
-            default: https://gerrit.opnfv.org/gerrit/$PROJECT
-            description: 'Git URL to use on this Jenkins Slave'
-        - string:
-            name: SSH_KEY
-            default: /root/.ssh/id_rsa
-            description: 'SSH key to use for Apex'
-
 - parameter:
     name: 'dell-pod1-defaults'
     parameters:
                 - dell-pod1
             default-slaves:
                 - dell-pod1
-        - string:
-            name: INSTALLER_VERSION
-            default: latest
-            description: 'Version of the installer to deploy'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-
 - parameter:
     name: 'dell-pod2-defaults'
     parameters:
                 - dell-pod2
             default-slaves:
                 - dell-pod2
-        - string:
-            name: INSTALLER_VERSION
-            default: latest
-            description: 'Version of the installer to deploy'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-
 - parameter:
     name: 'nokia-pod1-defaults'
     parameters:
                 - nokia-pod1
             default-slaves:
                 - nokia-pod1
-        - string:
-            name: INSTALLER_VERSION
-            default: stable
-            description: 'Version of the installer to deploy'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             name: SSH_KEY
             default: /root/.ssh/id_rsa
             description: 'SSH key to use for Apex'
-
-- parameter:
-    name: 'arm-build1-defaults'
-    parameters:
-        - node:
-            name: SLAVE_NAME
-            description: 'Slave name on Jenkins'
-            allowed-slaves:
-                - arm-build1
-            default-slaves:
-                - arm-build1
-        - string:
-            name: GIT_BASE
-            default: https://gerrit.opnfv.org/gerrit/$PROJECT
-            description: 'Git URL to use on this Jenkins Slave'
-
 - parameter:
     name: 'arm-pod1-defaults'
     parameters:
             name: LAB_CONFIG_URL
             default: ssh://git@git.enea.com/pharos/lab-config
             description: 'Base URI to the configuration directory'
-
 - parameter:
     name: 'arm-pod2-defaults'
     parameters:
             name: LAB_CONFIG_URL
             default: ssh://git@git.enea.com/pharos/lab-config
             description: 'Base URI to the configuration directory'
-
 - parameter:
-    name: 'opnfv-build-centos-defaults'
+    name: 'intel-virtual6-defaults'
+    parameters:
+        - node:
+            name: SLAVE_NAME
+            description: 'Slave name on Jenkins'
+            allowed-slaves:
+                - intel-virtual6
+            default-slaves:
+                - intel-virtual6
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
+#####################################################
+# These slaves are just dummy slaves for sandbox jobs
+#####################################################
+- parameter:
+    name: 'sandbox-baremetal-defaults'
     parameters:
         - label:
             name: SLAVE_LABEL
-            default: 'opnfv-build-centos'
+            default: 'sandbox-baremetal'
+            description: 'Slave label on Jenkins'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             default: $WORKSPACE/build_output
             description: "Directory where the build artifact will be located upon the completion of the build."
 - parameter:
-    name: 'opnfv-build-ubuntu-defaults'
+    name: 'sandbox-virtual-defaults'
     parameters:
         - label:
             name: SLAVE_LABEL
-            default: 'opnfv-build-ubuntu'
+            default: 'sandbox-virtual'
+            description: 'Slave label on Jenkins'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
+        - string:
+            name: BUILD_DIRECTORY
+            default: $WORKSPACE/build_output
+            description: "Directory where the build artifact will be located upon the completion of the build."
+- parameter:
+    name: 'dummy-pod1-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'dummy-pod1'
             description: 'Slave label on Jenkins'
         - string:
             name: GIT_BASE
index b8a40cc..8c72838 100644 (file)
@@ -64,7 +64,8 @@
             set -o xtrace
             export PATH=$PATH:/usr/local/bin/
 
-            # pep8 check parser/tosca2heat/tosca-parser
-            echo "Running tox -e pep8 on tosca2heat ..."
-            cd $WORKSPACE/tosca2heat/tosca-parser && tox -e pep8
-            cd $WORKSPACE/tosca2heat/heat-translator && tox -e pep8
+            # ut and pep8 check parser/tosca2heat
+            echo "Running tox on tosca2heat/tosca-parser ..."
+            cd $WORKSPACE/tosca2heat/tosca-parser && tox
+            echo "Running tox on tosca2heat/heat-translator ..."
+            cd $WORKSPACE/tosca2heat/heat-translator && tox
index 6e9a20d..36f026d 100644 (file)
@@ -81,7 +81,7 @@
 
     publishers:
         - email:
-            recipients: nauman.ahad@xflowresearch.com, mofassir.arif@xflowresearch.com, vikram@nvirters.com
+            recipients: nauman.ahad@xflowresearch.com, mofassir.arif@xflowresearch.com, vikram@nvirters.com, zhang.yujunz@zte.com.cn
 
 ###########################
 #biuilder macros
index d8866bd..7733aba 100644 (file)
     triggers:
         - timed: ''
 
+- trigger:
+    name: 'weekly-trigger-disabled'
+    triggers:
+        - timed: ''
+
 - trigger:
     name: 'brahmaputra-trigger-daily-enabled'
     triggers:
             set -o errexit
             set -o xtrace
             export PATH=$PATH:/usr/local/bin/
-            git clone ssh://gerrit.opnfv.org:29418/releng
-            GERRIT_COMMENT=gerrit_comment.txt ./releng/utils/docs-build.sh
+            git clone ssh://gerrit.opnfv.org:29418/opnfvdocs docs_build/_opnfvdocs
+            GERRIT_COMMENT=gerrit_comment.txt ./docs_build/_opnfvdocs/scripts/docs-build.sh
 
 - builder:
     name: upload-under-review-docs-to-opnfv-artifacts
                 -type f -name "*.py" -print | \
                 xargs flake8 --exit-zero -qq --count 2>&1)"
 
+            # Ensure we start with a clean environment
+            rm -f lint.log
+
             if [ ! -z $FLAKE_COUNT ]; then
-              echo "Flake8 Violations: $FLAKE_COUNT" >> lint.log
+              echo "Flake8 Violations: $FLAKE_COUNT" > lint.log
               find . \
                   -path './releng_flake8' -prune -o \
                   -type f -name "*.py" -print | \
               cat violation.log >> lint.log
               sed -r -i '4,$s/^/ /g' lint.log
               rm violation.log
-            else
-              echo -e "Flake8 Violations: 0" > lint.log
             fi
 
             deactivate
             set -o pipefail
             set -o xtrace
             export PATH=$PATH:/usr/local/bin/
+
+            # If no violations were found, no lint log will exist.
             if [[ -e lint.log ]] ; then
                 echo -e "\nposting linting report to gerrit...\n"
+
                 cat lint.log
                 echo
+
                 ssh -p 29418 gerrit.opnfv.org \
                     "gerrit review -p $GERRIT_PROJECT \
                      -m \"$(cat lint.log)\" \
                      $GERRIT_PATCHSET_REVISION \
                      --notify NONE"
+
+                exit 1
             fi
 
 - builder:
diff --git a/jjb/sandbox/basic.sh b/jjb/sandbox/basic.sh
new file mode 100755 (executable)
index 0000000..3326855
--- /dev/null
@@ -0,0 +1,63 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+    JOB_TYPE=${BASH_REMATCH[0]}
+else
+    echo "Unable to determine job type!"
+    exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+    verify)
+        echo "Running as part of verify job"
+        ;;
+    merge)
+        echo "Running as part of merge job"
+        ;;
+    daily)
+        echo "Running as part of daily job"
+        ;;
+    weekly)
+        echo "Running as part of weekly job"
+        ;;
+    *)
+        echo "Job type $JOB_TYPE is not supported!"
+        exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
+
+sleep 60
diff --git a/jjb/sandbox/build.sh b/jjb/sandbox/build.sh
new file mode 100755 (executable)
index 0000000..3326855
--- /dev/null
@@ -0,0 +1,63 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+    JOB_TYPE=${BASH_REMATCH[0]}
+else
+    echo "Unable to determine job type!"
+    exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+    verify)
+        echo "Running as part of verify job"
+        ;;
+    merge)
+        echo "Running as part of merge job"
+        ;;
+    daily)
+        echo "Running as part of daily job"
+        ;;
+    weekly)
+        echo "Running as part of weekly job"
+        ;;
+    *)
+        echo "Job type $JOB_TYPE is not supported!"
+        exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
+
+sleep 60
diff --git a/jjb/sandbox/deploy.sh b/jjb/sandbox/deploy.sh
new file mode 100755 (executable)
index 0000000..3326855
--- /dev/null
@@ -0,0 +1,63 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+    JOB_TYPE=${BASH_REMATCH[0]}
+else
+    echo "Unable to determine job type!"
+    exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+    verify)
+        echo "Running as part of verify job"
+        ;;
+    merge)
+        echo "Running as part of merge job"
+        ;;
+    daily)
+        echo "Running as part of daily job"
+        ;;
+    weekly)
+        echo "Running as part of weekly job"
+        ;;
+    *)
+        echo "Job type $JOB_TYPE is not supported!"
+        exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
+
+sleep 60
diff --git a/jjb/sandbox/functest.sh b/jjb/sandbox/functest.sh
new file mode 100755 (executable)
index 0000000..2f9be27
--- /dev/null
@@ -0,0 +1,61 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+    JOB_TYPE=${BASH_REMATCH[0]}
+else
+    echo "Unable to determine job type!"
+    exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+    verify)
+        echo "Running as part of verify job"
+        ;;
+    merge)
+        echo "Running as part of merge job"
+        ;;
+    daily)
+        echo "Running as part of daily job"
+        ;;
+    weekly)
+        echo "Running as part of weekly job"
+        ;;
+    *)
+        echo "Job type $JOB_TYPE is not supported!"
+        exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
diff --git a/jjb/sandbox/merge.sh b/jjb/sandbox/merge.sh
new file mode 100755 (executable)
index 0000000..2f9be27
--- /dev/null
@@ -0,0 +1,61 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+    JOB_TYPE=${BASH_REMATCH[0]}
+else
+    echo "Unable to determine job type!"
+    exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+    verify)
+        echo "Running as part of verify job"
+        ;;
+    merge)
+        echo "Running as part of merge job"
+        ;;
+    daily)
+        echo "Running as part of daily job"
+        ;;
+    weekly)
+        echo "Running as part of weekly job"
+        ;;
+    *)
+        echo "Job type $JOB_TYPE is not supported!"
+        exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
diff --git a/jjb/sandbox/promote.sh b/jjb/sandbox/promote.sh
new file mode 100755 (executable)
index 0000000..2f9be27
--- /dev/null
@@ -0,0 +1,61 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+    JOB_TYPE=${BASH_REMATCH[0]}
+else
+    echo "Unable to determine job type!"
+    exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+    verify)
+        echo "Running as part of verify job"
+        ;;
+    merge)
+        echo "Running as part of merge job"
+        ;;
+    daily)
+        echo "Running as part of daily job"
+        ;;
+    weekly)
+        echo "Running as part of weekly job"
+        ;;
+    *)
+        echo "Job type $JOB_TYPE is not supported!"
+        exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
diff --git a/jjb/sandbox/sandbox-daily-jobs.yml b/jjb/sandbox/sandbox-daily-jobs.yml
new file mode 100644 (file)
index 0000000..fc7244e
--- /dev/null
@@ -0,0 +1,264 @@
+- project:
+    name: 'sandbox-daily-jobs'
+
+    project: 'sandbox'
+
+    installer: 'sandbox'
+
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+    master: &master
+        stream: master
+        branch: '{stream}'
+        gs-pathname: ''
+#--------------------------------
+# POD, INSTALLER, AND BRANCH MAPPING
+#--------------------------------
+#        CI PODs
+#--------------------------------
+    pod:
+        - baremetal:
+            slave-label: sandbox-baremetal
+            <<: *master
+        - virtual:
+            slave-label: fuel-virtual
+            <<: *master
+#--------------------------------
+#        None-CI PODs
+#--------------------------------
+        - dummy-pod1:
+            slave-label: dummy-pod1
+            <<: *master
+#--------------------------------
+#       scenarios
+#--------------------------------
+    scenario:
+        # HA scenarios
+        - 'os-nosdn-nofeature-ha':
+            auto-trigger-name: 'daily-trigger-disabled'
+        - 'os-odl_l2-nofeature-ha':
+            auto-trigger-name: 'daily-trigger-disabled'
+
+    jobs:
+        - 'sandbox-{scenario}-{pod}-daily-{stream}'
+        - 'sandbox-deploy-{pod}-daily-{stream}'
+        - 'yardstick-sandbox-{pod}-daily-{stream}'
+        - 'functest-sandbox-{pod}-daily-{stream}'
+
+########################
+# job templates
+########################
+- job-template:
+    name: 'sandbox-{scenario}-{pod}-daily-{stream}'
+
+    concurrent: false
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 4
+            max-per-node: 1
+            option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - 'sandbox-os-.*?-{pod}-daily-{stream}'
+            block-level: 'NODE'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+    triggers:
+        - '{auto-trigger-name}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - '{installer}-defaults'
+        - '{slave-label}-defaults'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: '{scenario}'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - trigger-builds:
+            - project: 'sandbox-deploy-{pod}-daily-{stream}'
+              current-parameters: false
+              predefined-parameters:
+                DEPLOY_SCENARIO={scenario}
+              same-node: true
+              block: true
+        - trigger-builds:
+            - project: 'yardstick-sandbox-{pod}-daily-{stream}'
+              current-parameters: false
+              predefined-parameters:
+                DEPLOY_SCENARIO={scenario}
+              same-node: true
+              block: true
+              block-thresholds:
+                build-step-failure-threshold: 'never'
+                failure-threshold: 'never'
+                unstable-threshold: 'FAILURE'
+        - trigger-builds:
+            - project: 'functest-sandbox-{pod}-daily-{stream}'
+              current-parameters: false
+              predefined-parameters:
+                DEPLOY_SCENARIO={scenario}
+              block: true
+              same-node: true
+              block-thresholds:
+                build-step-failure-threshold: 'never'
+                failure-threshold: 'never'
+                unstable-threshold: 'FAILURE'
+
+- job-template:
+    name: 'sandbox-deploy-{pod}-daily-{stream}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 4
+            max-per-node: 1
+            option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - 'fuel-deploy-{pod}-daily-{stream}'
+                - 'fuel-deploy-generic-daily-.*'
+            block-level: 'NODE'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - '{installer}-defaults'
+        - '{slave-label}-defaults'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-odl_l2-nofeature-ha'
+
+    scm:
+        - git-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: ''
+            branch: '{branch}'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - 'sandbox-deploy-daily-builder'
+
+- job-template:
+    name: 'yardstick-sandbox-{pod}-daily-{stream}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-per-node: 1
+            option: 'project'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER Suite: $YARDSTICK_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+        - timeout:
+            timeout: 400
+            abort: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - '{installer}-defaults'
+        - '{slave-label}-defaults':
+            installer: '{installer}'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-odl_l2-nofeature-ha'
+
+    scm:
+        - git-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: ''
+            branch: '{branch}'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - 'yardstick-sandbox-daily-builder'
+
+- job-template:
+    name: 'functest-sandbox-{pod}-daily-{stream}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-per-node: 1
+            option: 'project'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER Suite: $FUNCTEST_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+        - timeout:
+            timeout: 400
+            abort: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - '{slave-label}-defaults'
+        - '{installer}-defaults'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-odl_l2-nofeature-ha'
+
+    scm:
+        - git-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: ''
+            branch: '{branch}'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - 'functest-sandbox-daily-builder'
+
+#####################################
+# builder macros
+#####################################
+- builder:
+    name: 'sandbox-deploy-daily-builder'
+    builders:
+        - shell:
+            !include-raw: ./deploy.sh
+
+- builder:
+    name: 'functest-sandbox-daily-builder'
+    builders:
+        - shell:
+            !include-raw: ./functest.sh
+
+- builder:
+    name: 'yardstick-sandbox-daily-builder'
+    builders:
+        - shell:
+            !include-raw: ./yardstick.sh
diff --git a/jjb/sandbox/sandbox-merge-jobs.yml b/jjb/sandbox/sandbox-merge-jobs.yml
new file mode 100644 (file)
index 0000000..69fcb43
--- /dev/null
@@ -0,0 +1,159 @@
+- project:
+    name: 'sandbox-merge-jobs'
+
+    project: 'sandbox'
+
+    installer: 'sandbox'
+
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+
+# what are the verification activities we do for this installer
+    activity:
+        - 'basic'
+        - 'build'
+        - 'promote'
+
+    jobs:
+        - 'sandbox-merge-{stream}'
+        - 'sandbox-merge-{activity}-{stream}'
+
+- job-template:
+    name: 'sandbox-merge-{stream}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - 'opnfv-build-ubuntu-defaults'
+
+    wrappers:
+        - ssh-agent-credentials:
+            users:
+                - '{ssh-credentials}'
+        - timeout:
+            timeout: 360
+            fail: true
+
+    triggers:
+        - gerrit:
+            trigger-on:
+                - change-merged-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'remerge'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                    - branch-compare-type: 'ANT'
+                      branch-pattern: '**/{branch}'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'docs/**'
+            readable-message: true
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - '{project}-merge-builder'
+        - trigger-builds:
+            - project: 'sandbox-merge-basic-{stream}'
+              current-parameters: false
+              predefined-parameters: |
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+              block: true
+        - trigger-builds:
+            - project: 'sandbox-merge-build-{stream}'
+              current-parameters: false
+              predefined-parameters: |
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+              block: true
+        - trigger-builds:
+            - project: 'sandbox-merge-promote-{stream}'
+              current-parameters: false
+              predefined-parameters: |
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+              block: true
+
+- job-template:
+    name: 'sandbox-merge-{activity}-{stream}'
+
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: ''
+            choosing-strategy: 'default'
+
+    wrappers:
+        - ssh-agent-credentials:
+            users:
+                - '{ssh-credentials}'
+        - timeout:
+            timeout: 360
+            fail: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - '{installer}-defaults'
+        - '{project}-merge-{activity}-parameter'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - '{project}-merge-{activity}-builder'
+
+#####################################
+# parameter builders
+#####################################
+- parameter:
+    name: 'sandbox-merge-basic-parameter'
+    parameters:
+        - 'opnfv-build-ubuntu-defaults'
+
+- parameter:
+    name: 'sandbox-merge-build-parameter'
+    parameters:
+        - 'opnfv-build-ubuntu-defaults'
+
+- parameter:
+    name: 'sandbox-merge-promote-parameter'
+    parameters:
+        - 'opnfv-build-centos-defaults'
+#####################################
+# builder builders
+#####################################
+- builder:
+    name: 'sandbox-merge-builder'
+    builders:
+        - shell:
+            !include-raw: ./merge.sh
+
+- builder:
+    name: 'sandbox-merge-basic-builder'
+    builders:
+        - shell:
+            !include-raw: ./basic.sh
+
+- builder:
+    name: 'sandbox-merge-build-builder'
+    builders:
+        - shell:
+            !include-raw: ./build.sh
+
+- builder:
+    name: 'sandbox-merge-promote-builder'
+    builders:
+        - shell:
+            !include-raw: ./promote.sh
diff --git a/jjb/sandbox/sandbox-verify-jobs.yml b/jjb/sandbox/sandbox-verify-jobs.yml
new file mode 100644 (file)
index 0000000..e0bc218
--- /dev/null
@@ -0,0 +1,234 @@
+- project:
+    name: 'sandbox-verify-jobs'
+
+    project: 'sandbox'
+
+    installer: 'sandbox'
+#####################################
+# branch definitions
+#####################################
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+            disabled: false
+        - colorado:
+            branch: 'stable/{stream}'
+            gs-pathname: '/{stream}'
+            disabled: true
+#####################################
+# patch verification phases
+#####################################
+    phase:
+        - 'basic':
+            slave-label: 'opnfv-build-ubuntu'
+        - 'build':
+            slave-label: 'opnfv-build-ubuntu'
+        - 'deploy-virtual':
+            slave-label: 'sandbox-virtual'
+        - 'smoke-test':
+            slave-label: 'sandbox-virtual'
+#####################################
+# jobs
+#####################################
+    jobs:
+        - 'sandbox-verify-{stream}'
+        - 'sandbox-verify-{phase}-{stream}'
+#####################################
+# job templates
+#####################################
+- job-template:
+    name: 'sandbox-verify-{stream}'
+
+    project-type: multijob
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 2
+            max-per-node: 1
+            option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - 'sandbox-verify-master'
+                - 'sandbox-verify-colorado'
+            block-level: 'NODE'
+
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: '$GERRIT_REFSPEC'
+            choosing-strategy: 'gerrit'
+
+    wrappers:
+        - ssh-agent-credentials:
+            users:
+                - '{ssh-credentials}'
+        - timeout:
+            timeout: 360
+            fail: true
+
+    triggers:
+        - gerrit:
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'docs/**|.gitignore'
+            readable-message: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - 'sandbox-virtual-defaults'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - multijob:
+            name: basic
+            condition: SUCCESSFUL
+            projects:
+                - name: 'sandbox-verify-basic-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: build
+            condition: SUCCESSFUL
+            projects:
+                - name: 'sandbox-verify-build-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: deploy-virtual
+            condition: SUCCESSFUL
+            projects:
+                - name: 'sandbox-verify-deploy-virtual-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                  node-parameters: true
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: smoke-test
+            condition: SUCCESSFUL
+            projects:
+                - name: 'sandbox-verify-smoke-test-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                  node-parameters: true
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+
+- job-template:
+    name: 'sandbox-verify-{phase}-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 2
+            max-per-node: 1
+            option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - 'sandbox-verify-deploy-.*'
+                - 'sandbox-verify-test-.*'
+            block-level: 'NODE'
+
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: '$GERRIT_REFSPEC'
+            choosing-strategy: 'gerrit'
+
+    wrappers:
+        - ssh-agent-credentials:
+            users:
+                - '{ssh-credentials}'
+        - timeout:
+            timeout: 360
+            fail: true
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - '{installer}-defaults'
+        - '{slave-label}-defaults'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - '{project}-verify-{phase}-macro'
+#####################################
+# builder macros
+#####################################
+- builder:
+    name: 'sandbox-verify-basic-macro'
+    builders:
+        - shell:
+            !include-raw: ./basic.sh
+
+- builder:
+    name: 'sandbox-verify-build-macro'
+    builders:
+        - shell:
+            !include-raw: ./build.sh
+
+- builder:
+    name: 'sandbox-verify-deploy-virtual-macro'
+    builders:
+        - shell:
+            !include-raw: ./deploy.sh
+
+- builder:
+    name: 'sandbox-verify-smoke-test-macro'
+    builders:
+        - shell:
+            !include-raw: ./test.sh
diff --git a/jjb/sandbox/sandbox-weekly-jobs.yml b/jjb/sandbox/sandbox-weekly-jobs.yml
new file mode 100644 (file)
index 0000000..52f8529
--- /dev/null
@@ -0,0 +1,264 @@
+- project:
+    name: 'sandbox-weekly-jobs'
+
+    project: 'sandbox'
+
+    installer: 'sandbox'
+
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+    master: &master
+        stream: master
+        branch: '{stream}'
+        gs-pathname: ''
+#--------------------------------
+# POD, INSTALLER, AND BRANCH MAPPING
+#--------------------------------
+#        CI PODs
+#--------------------------------
+    pod:
+        - baremetal:
+            slave-label: sandbox-baremetal
+            <<: *master
+        - virtual:
+            slave-label: fuel-virtual
+            <<: *master
+#--------------------------------
+#        None-CI PODs
+#--------------------------------
+        - dummy-pod1:
+            slave-label: dummy-pod1
+            <<: *master
+#--------------------------------
+#       scenarios
+#--------------------------------
+    scenario:
+        # HA scenarios
+        - 'os-nosdn-nofeature-ha':
+            auto-trigger-name: 'weekly-trigger-disabled'
+        - 'os-odl_l2-nofeature-ha':
+            auto-trigger-name: 'weekly-trigger-disabled'
+
+    jobs:
+        - 'sandbox-{scenario}-{pod}-weekly-{stream}'
+        - 'sandbox-deploy-{pod}-weekly-{stream}'
+        - 'yardstick-sandbox-{pod}-weekly-{stream}'
+        - 'functest-sandbox-{pod}-weekly-{stream}'
+
+########################
+# job templates
+########################
+- job-template:
+    name: 'sandbox-{scenario}-{pod}-weekly-{stream}'
+
+    concurrent: false
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 4
+            max-per-node: 1
+            option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - 'sandbox-os-.*?-{pod}-weekly-{stream}'
+            block-level: 'NODE'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+    triggers:
+        - '{auto-trigger-name}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - '{installer}-defaults'
+        - '{slave-label}-defaults'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: '{scenario}'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - trigger-builds:
+            - project: 'sandbox-deploy-{pod}-weekly-{stream}'
+              current-parameters: false
+              predefined-parameters:
+                DEPLOY_SCENARIO={scenario}
+              same-node: true
+              block: true
+        - trigger-builds:
+            - project: 'yardstick-sandbox-{pod}-weekly-{stream}'
+              current-parameters: false
+              predefined-parameters:
+                DEPLOY_SCENARIO={scenario}
+              same-node: true
+              block: true
+              block-thresholds:
+                build-step-failure-threshold: 'never'
+                failure-threshold: 'never'
+                unstable-threshold: 'FAILURE'
+        - trigger-builds:
+            - project: 'functest-sandbox-{pod}-weekly-{stream}'
+              current-parameters: false
+              predefined-parameters:
+                DEPLOY_SCENARIO={scenario}
+              block: true
+              same-node: true
+              block-thresholds:
+                build-step-failure-threshold: 'never'
+                failure-threshold: 'never'
+                unstable-threshold: 'FAILURE'
+
+- job-template:
+    name: 'sandbox-deploy-{pod}-weekly-{stream}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 4
+            max-per-node: 1
+            option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - 'fuel-deploy-{pod}-weekly-{stream}'
+                - 'fuel-deploy-generic-weekly-.*'
+            block-level: 'NODE'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - '{installer}-defaults'
+        - '{slave-label}-defaults'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-odl_l2-nofeature-ha'
+
+    scm:
+        - git-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: ''
+            branch: '{branch}'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - 'sandbox-deploy-weekly-builder'
+
+- job-template:
+    name: 'yardstick-sandbox-{pod}-weekly-{stream}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-per-node: 1
+            option: 'project'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER Suite: $YARDSTICK_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+        - timeout:
+            timeout: 400
+            abort: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - '{installer}-defaults'
+        - '{slave-label}-defaults':
+            installer: '{installer}'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-odl_l2-nofeature-ha'
+
+    scm:
+        - git-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: ''
+            branch: '{branch}'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - 'yardstick-sandbox-weekly-builder'
+
+- job-template:
+    name: 'functest-sandbox-{pod}-weekly-{stream}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-per-node: 1
+            option: 'project'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER Suite: $FUNCTEST_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+        - timeout:
+            timeout: 400
+            abort: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - '{slave-label}-defaults'
+        - '{installer}-defaults'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-odl_l2-nofeature-ha'
+
+    scm:
+        - git-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: ''
+            branch: '{branch}'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - 'functest-sandbox-weekly-builder'
+
+#####################################
+# builder macros
+#####################################
+- builder:
+    name: 'sandbox-deploy-weekly-builder'
+    builders:
+        - shell:
+            !include-raw: ./deploy.sh
+
+- builder:
+    name: 'functest-sandbox-weekly-builder'
+    builders:
+        - shell:
+            !include-raw: ./functest.sh
+
+- builder:
+    name: 'yardstick-sandbox-weekly-builder'
+    builders:
+        - shell:
+            !include-raw: ./yardstick.sh
diff --git a/jjb/sandbox/test.sh b/jjb/sandbox/test.sh
new file mode 100755 (executable)
index 0000000..3326855
--- /dev/null
@@ -0,0 +1,63 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+    JOB_TYPE=${BASH_REMATCH[0]}
+else
+    echo "Unable to determine job type!"
+    exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+    verify)
+        echo "Running as part of verify job"
+        ;;
+    merge)
+        echo "Running as part of merge job"
+        ;;
+    daily)
+        echo "Running as part of daily job"
+        ;;
+    weekly)
+        echo "Running as part of weekly job"
+        ;;
+    *)
+        echo "Job type $JOB_TYPE is not supported!"
+        exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
+
+sleep 60
diff --git a/jjb/sandbox/yardstick.sh b/jjb/sandbox/yardstick.sh
new file mode 100755 (executable)
index 0000000..2f9be27
--- /dev/null
@@ -0,0 +1,61 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+    JOB_TYPE=${BASH_REMATCH[0]}
+else
+    echo "Unable to determine job type!"
+    exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+    verify)
+        echo "Running as part of verify job"
+        ;;
+    merge)
+        echo "Running as part of merge job"
+        ;;
+    daily)
+        echo "Running as part of daily job"
+        ;;
+    weekly)
+        echo "Running as part of weekly job"
+        ;;
+    *)
+        echo "Job type $JOB_TYPE is not supported!"
+        exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
index 8b8ced1..6b92288 100644 (file)
             installer: fuel
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *brahmaputra
-
-# just in case if things go wrong
-        - lf-pod2:
-            slave-label: '{pod}'
-            installer: fuel
-            auto-trigger-name: 'daily-trigger-disabled'
-            <<: *master
-
 # joid CI PODs
         - baremetal:
             slave-label: joid-baremetal
             default: '{docker-tag}'
             description: 'Tag to pull docker image'
         - string:
-            name: YARDSTICK_SUITE_NAME
-            default: opnfv_${{NODE_NAME}}_{testsuite}.yaml
-            description: 'Path to test suite'
+            name: YARDSTICK_SCENARIO_SUITE_NAME
+            default: opnfv_${{DEPLOY_SCENARIO}}_{testsuite}.yaml
+            description: 'Path to test scenario suite'
         - string:
             name: CI_DEBUG
             default: 'false'
index 176f1b9..e8df9be 100755 (executable)
@@ -33,7 +33,7 @@ docker pull opnfv/yardstick:$DOCKER_TAG >$redirect
 
 # Run docker
 cmd="sudo docker run ${opts} ${envs} ${labconfig} ${sshkey} opnfv/yardstick \
-    exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SUITE_NAME}"
+    exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
 echo "Yardstick: Running docker cmd: ${cmd}"
 ${cmd}
 
index c6f3173..7656c92 100644 (file)
@@ -31,7 +31,7 @@
             project: '{project}'
         - gerrit-parameter:
             branch: '{branch}'
-        - 'ericsson-build-defaults'
+        - 'opnfv-build-ubuntu-defaults'
 
     scm:
         - gerrit-trigger-scm:
@@ -70,7 +70,7 @@
             project: '{project}'
         - gerrit-parameter:
             branch: '{branch}'
-        - 'ericsson-build-defaults'
+        - 'opnfv-build-ubuntu-defaults'
         - string:
             name: GS_URL
             default: '$GS_BASE{gs-pathname}'
             sphinx-apidoc -o docs/apidocs yardstick
 
             # build docs
-            git clone ssh://gerrit.opnfv.org:29418/releng
-            GERRIT_COMMENT=gerrit_comment.txt ./releng/utils/docs-build.sh
+            git clone ssh://gerrit.opnfv.org:29418/opnfvdocs docs_build/_opnfvdocs
+            GERRIT_COMMENT=gerrit_comment.txt ./docs_build/_opnfvdocs/scripts/docs-build.sh
 
             deactivate
diff --git a/utils/docs-build.sh b/utils/docs-build.sh
deleted file mode 100755 (executable)
index 48037db..0000000
+++ /dev/null
@@ -1,234 +0,0 @@
-#!/bin/bash -e
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 NEC and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-export PATH=$PATH:/usr/local/bin/
-
-DOCS_DIR=${DOCS_DIR:-docs}
-INDEX_RST=${INDEX_RST:-index.rst}
-BUILD_DIR=${BUILD_DIR:-docs_build}
-OUTPUT_DIR=${OUTPUT_DIR:-docs_output}
-SRC_DIR=${SRC_DIR:-$BUILD_DIR/_src}
-VENV_DIR=${VENV_DIR:-$BUILD_DIR/_venv}
-RELENG_DIR=${RELENG_DIR:-releng}
-GERRIT_COMMENT=${GERRIT_COMMENT:-}
-
-get_title_script="
-import os
-from docutils import core, nodes
-
-try:
-    with open('index.rst', 'r') as file:
-        data = file.read()
-    doctree = core.publish_doctree(data,
-        settings_overrides={'report_level': 5,
-                            'halt_level': 5})
-    if isinstance(doctree[0], nodes.title):
-        title = doctree[0]
-    else:
-        for c in doctree.children:
-            if isinstance(c, nodes.section):
-                title = c[0]
-                break
-    print title.astext()
-except:
-    print 'None'"
-revision="$(git rev-parse --short HEAD)"
-rev_full="$(git rev-parse HEAD)"
-version="$(git describe --abbrev=0 2> /dev/null || echo draft) ($revision)"
-project="$(basename $(git rev-parse --show-toplevel))"
-html_notes="    Revision: $rev_full\n    Build date: $(date -u +'%Y-%m-%d')"
-default_conf='releng/docs/etc/conf.py'
-opnfv_logo='releng/docs/etc/opnfv-logo.png'
-
-function check_rst_doc() {
-    _src="$1"
-
-    # Note: This check may fail in many jobs for building project docs, since
-    #       the old sample has lines more than 120. We ignore failures on this
-    #       check right now, but these have to be fixed before OPNFV B release.
-    _out=$(doc8 --max-line-length 240 --ignore D000 "$_src") || {
-        _msg='Warning: rst validation (doc8) has failed, please fix the following error(s).'
-        _errs=$(echo "$_out" | sed -n -e "/^$_src/s/^/    /p")
-        echo
-        echo -e "$_msg\n$_errs"
-        echo
-        [[ -n "$GERRIT_COMMENT" ]] && echo -e "$_msg\n$_errs" >> "$GERRIT_COMMENT"
-    }
-}
-
-function add_html_notes() {
-    _src="$1"
-
-    find "$_src" -name '*.rst' | while read file
-    do
-        if grep -q -e ' _sha1_' "$file" ; then
-            # TODO: remove this, once old templates were removed from all repos.
-            echo
-            echo "Warn: '_sha1_' was found in [$file], use the latest document template."
-            echo "      See https://wiki.opnfv.org/documentation/tools ."
-            echo
-            sed -i "s/ _sha1_/ $git_sha1/g" "$file"
-        fi
-        sed -i -e "\$a\\\n..\n$html_notes" "$file"
-    done
-}
-
-function prepare_src_files() {
-    mkdir -p "$(dirname $SRC_DIR)"
-
-    [[ -e "$SRC_DIR" ]] && rm -rf "$SRC_DIR"
-    cp -r "$DOCS_DIR" "$SRC_DIR"
-    add_html_notes "$SRC_DIR"
-}
-
-function add_config() {
-    _conf="$1"
-    _param="$2"
-    _val="$3"
-
-    if ! grep -q -e "^$_param = " "$_conf" ; then
-        echo "Adding '$_param' into $_conf ..."
-        echo "$_param = $_val" >> "$_conf"
-    fi
-}
-
-function is_top_dir() {
-    [[ "$1" == "$DOCS_DIR" ]]
-}
-
-function generate_name_for_top_dir() {
-    for suffix in '' '.top' '.all' '.master' '_' '__' '___'
-    do
-        _name="$(basename $DOCS_DIR)$suffix"
-        [[ -e "$DOCS_DIR/$_name" ]] && continue
-        echo "$_name"
-        return
-    done
-
-    echo "Error: cannot find name for top directory [$DOCS_DIR]"
-    exit 1
-}
-
-function generate_name() {
-    _dir=$1
-
-    if is_top_dir "$_dir" ; then
-        _name=$(generate_name_for_top_dir $DOCS_DIR)
-    else
-        _name="${_dir#$DOCS_DIR/}"
-    fi
-    # Replace '/' by '_'
-    echo "${_name////_}"
-}
-
-
-check_rst_doc $DOCS_DIR
-
-if [[ ! -d "$RELENG_DIR" ]] ; then
-    echo "Error: $RELENG_DIR dir not found. See https://wiki.opnfv.org/documentation/tools ."
-    exit 1
-fi
-
-prepare_src_files
-
-if ! which virtualenv > /dev/null ; then
-    echo "Error: 'virtualenv' not found. Exec 'sudo pip install virtualenv' first."
-    exit 1
-fi
-
-virtualenv "$VENV_DIR"
-source "$VENV_DIR/bin/activate"
-pip install -r "$RELENG_DIR/docs/etc/requirements.txt"
-
-find $DOCS_DIR -name $INDEX_RST -printf '%h\n' | while read dir
-do
-    name=$(generate_name $dir)
-    if is_top_dir "$dir" ; then
-        src="$SRC_DIR"
-    else
-        src="$SRC_DIR/${dir#$DOCS_DIR/}"
-    fi
-    build="$BUILD_DIR/$name"
-    output="$OUTPUT_DIR/$name"
-    conf="$src/conf.py"
-
-    echo
-    echo "#################${dir//?/#}"
-    echo "Building DOCS in $dir"
-    echo "#################${dir//?/#}"
-    echo
-
-    [[ ! -f "$conf" ]] && cp "$default_conf" "$conf"
-    title=$(cd $src; python -c "$get_title_script")
-    latex_conf="[('index', '$name.tex', \"$title\", 'OPNFV', 'manual'),]"
-    add_config "$conf" 'latex_documents' "$latex_conf"
-    add_config "$conf" 'release' "u'$version'"
-    add_config "$conf" 'version' "u'$version'"
-    add_config "$conf" 'project' "u'$project'"
-    add_config "$conf" 'copyright' "u'$(date +%Y), OPNFV'"
-    cp -f $opnfv_logo "$src/opnfv-logo.png"
-
-    mkdir -p "$output"
-
-    sphinx-build -b html -t html -E "$src" "$output"
-
-    # Note: PDF creation may fail in project doc builds.
-    #       We allow this build job to be marked as succeeded with
-    #       failure in PDF creation, but leave message to fix it.
-    #       Any failure has to be fixed before OPNFV B release.
-    {
-        sphinx-build -b latex -t pdf -E "$src" "$build" && \
-            make -C "$build" LATEXOPTS='--interaction=nonstopmode' all-pdf
-    } && {
-        mv "$build/$name.pdf" "$output"
-    } || {
-        msg="Error: PDF creation for $dir has failed, please fix source rst file(s)."
-        echo
-        echo "$msg"
-        echo
-        [[ -n "$GERRIT_COMMENT" ]] && echo "$msg" >> "$GERRIT_COMMENT"
-    }
-
-    # TODO: failures in ODT creation should be handled error and
-    #       cause 'exit 1' before OPNFV B release.
-    tex=$(find $build -name '*.tex' | head -1)
-    odt="${tex%.tex}.odt"
-    if [[ -e $tex ]] && which pandoc > /dev/null ; then
-        (
-            cd $(dirname $tex)
-            pandoc $(basename $tex) -o $(basename $odt)
-        ) && {
-            mv $odt $output/
-        }|| {
-            msg="Error: ODT creation for $dir has failed."
-            echo
-            echo "$msg"
-            echo
-        }
-    else
-        echo "Warn: tex file and/or 'pandoc' are not found, skip ODT creation."
-    fi
-
-    if is_top_dir "$dir" ; then
-        # NOTE: Having top level document (docs/index.rst) is not recommended.
-        #       It may cause conflicts with other docs (mostly with HTML
-        #       folders for contents in top level docs and other document
-        #       folders). But, let's try merge of those contents into the top
-        #       docs directory.
-        (
-            cd $output
-            find . -type d -print | xargs -I d mkdir -p ../d
-            find . -type f -print | xargs -I f mv -b f ../f
-        )
-        rm -rf "$output"
-    fi
-
-done
-
-deactivate
old mode 100644 (file)
new mode 100755 (executable)
index 3afeda8..bb11f0d
@@ -7,15 +7,32 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-
 function isinstalled {
-if rpm -q "$@" >/dev/null 2>&1; then
-  true
-    else
-      echo installing "$1"
-      sudo yum install "$1"
-  false
+
+source /etc/os-release; echo ${ID/*, /}
+
+if [[ ${ID/*, /} =~ "centos" ]]; then
+  if rpm -q "$@" >/dev/null 2>&1; then
+    true
+      else
+        echo installing "$1"
+        sudo yum install "$1"
+    false
+  fi
+
+elif [[ ${ID/*, /} =~ "ubuntu" ]]; then
+  if dpkg-query -W -f'${Status}' "$@" 2>/dev/null | grep -q "ok installed"; then
+    true
+      else
+        echo installing "$1"
+        sudo apt-get install -y "$1"
+    false
+  fi
+else
+  echo "Distro not supported"
+  exit 0
 fi
+
 }
 
 if ! isinstalled gnupg2; then
@@ -40,3 +57,4 @@ else
      rm -f "$NODE_NAME"-subkey
    fi
 fi
+
index 8c41620..d268a28 100755 (executable)
@@ -48,6 +48,14 @@ main () {
         exit 1
     fi
 
+    if [[ $(whoami) != "root" ]]; then
+      if grep "^Defaults requiretty" /etc/sudoers
+        then echo "please comment out Defaults requiretty from /etc/sudoers"
+        exit 1
+      fi
+    fi
+
+
     if [ -d /etc/monit/conf.d ]; then
         monitconfdir="/etc/monit/conf.d/"
     elif [ -d /etc/monit.d ]; then
@@ -87,7 +95,7 @@ main () {
         echo "Writing the following as monit config:"
         cat << EOF | tee $monitconfdir/jenkins
 check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
-start program = "/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'"
+start program = "/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@' with timeout 60 seconds"
 stop program = "/bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'"
 EOF
     }
@@ -96,7 +104,7 @@ EOF
         #test for diff
         if [[ "$(diff $monitconfdir/jenkins <(echo "\
 check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
-start program = \"/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'\"
+start program = \"/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@' with timeout 60 seconds\"
 stop program = \"/bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'\"\
 ") )" ]]; then
             echo "Updating monit config..."
diff --git a/utils/retention_script.sh b/utils/retention_script.sh
new file mode 100755 (executable)
index 0000000..7e50623
--- /dev/null
@@ -0,0 +1,39 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 The Linux Foundation and others
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##############################################################################
+
+PATH=$PATH:/usr/local/bin/
+
+#These are the only projects that generate artifacts
+for x in armband ovsnfv fuel apex compass4nfv
+do
+
+  echo "Looking at artifacts for project $x"
+
+  while IFS= read -r artifact; do
+
+    artifact_date="$(gsutil ls -L $artifact | grep "Creation time:" | awk '{print $4,$5,$6}')"
+    age=$(($(date +%s)-$(date -d"$artifact_date" +%s)))
+    daysold=$(($age/86400))
+
+    if [[ "$daysold" -gt "10" ]]; then
+      echo "$daysold Days old deleting: $(basename $artifact)"
+    else
+      echo "$daysold Days old retaining: $(basename $artifact)"
+    fi
+
+  done < <(gsutil ls gs://artifacts.opnfv.org/"$x" |grep -v "/$")
+done
index adbee36..622c375 100644 (file)
@@ -8,8 +8,6 @@
 #
 import datetime
 import jinja2
-import logging
-import os
 import requests
 import sys
 import time
@@ -21,17 +19,7 @@ import testCase as tc
 import scenarioResult as sr
 
 # Logger
-logFormatter = logging.Formatter("%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s]  %(message)s")
-logger = logging.getLogger()
-
-fileHandler = logging.FileHandler("{0}/{1}".format('.', conf.LOG_FILE))
-fileHandler.setFormatter(logFormatter)
-logger.addHandler(fileHandler)
-
-consoleHandler = logging.StreamHandler()
-consoleHandler.setFormatter(logFormatter)
-logger.addHandler(consoleHandler)
-logger.setLevel(conf.LOG_LEVEL)
+logger = utils.getLogger("Status")
 
 # Initialization
 testValid = []
@@ -48,11 +36,11 @@ response = requests.get(cf)
 
 functest_yaml_config = yaml.load(response.text)
 
-logger.info("****************************************")
-logger.info("*   Generating reporting.....          *")
-logger.info("*   Data retention = %s days           *" % conf.PERIOD)
-logger.info("*                                      *")
-logger.info("****************************************")
+logger.info("*******************************************")
+logger.info("*   Generating reporting scenario status  *")
+logger.info("*   Data retention = %s days              *" % conf.PERIOD)
+logger.info("*                                         *")
+logger.info("*******************************************")
 
 # Retrieve test cases of Tier 1 (smoke)
 config_tiers = functest_yaml_config.get("tiers")
@@ -111,17 +99,22 @@ for version in conf.versions:
                 for test_case in testValid:
                     test_case.checkRunnable(installer, s,
                                             test_case.getConstraints())
-                    logger.debug("testcase %s is %s" % (test_case.getName(),
-                                                        test_case.isRunnable))
+                    logger.debug("testcase %s is %s" %
+                                 (test_case.getDisplayName(),
+                                  test_case.isRunnable))
                     time.sleep(1)
                     if test_case.isRunnable:
                         dbName = test_case.getDbName()
                         name = test_case.getName()
+                        displayName = test_case.getDisplayName()
                         project = test_case.getProject()
                         nb_test_runnable_for_this_scenario += 1
                         logger.info(" Searching results for case %s " %
-                                    (dbName))
+                                    (displayName))
                         result = utils.getResult(dbName, installer, s, version)
+                        # if no result set the value to 0
+                        if result < 0:
+                            result = 0
                         logger.info(" >>>> Test score = " + str(result))
                         test_case.setCriteria(result)
                         test_case.setIsRunnable(True)
@@ -144,18 +137,23 @@ for version in conf.versions:
                     if test_case.isRunnable:
                         dbName = test_case.getDbName()
                         name = test_case.getName()
+                        displayName = test_case.getDisplayName()
                         project = test_case.getProject()
                         logger.info(" Searching results for case %s " %
-                                    (dbName))
+                                    (displayName))
                         result = utils.getResult(dbName, installer, s, version)
-                        test_case.setCriteria(result)
-                        test_case.setIsRunnable(True)
-                        testCases2BeDisplayed.append(tc.TestCase(name,
-                                                                 project,
-                                                                 "",
-                                                                 result,
-                                                                 True,
-                                                                 4))
+                        # at least 1 result for the test
+                        if result > -1:
+                            test_case.setCriteria(result)
+                            test_case.setIsRunnable(True)
+                            testCases2BeDisplayed.append(tc.TestCase(name,
+                                                                     project,
+                                                                     "",
+                                                                     result,
+                                                                     True,
+                                                                     4))
+                        else:
+                            logger.debug("No results found")
 
                     items[s] = testCases2BeDisplayed
             except:
@@ -182,7 +180,7 @@ for version in conf.versions:
             else:
                 logger.info(">>>>> scenario OK, save the information")
                 s_status = "OK"
-                path_validation_file = ("./release/" + version +
+                path_validation_file = (conf.REPORTING_PATH + "/release/" + version +
                                         "/validated_scenario_history.txt")
                 with open(path_validation_file, "a") as f:
                     time_format = "%Y-%m-%d %H:%M"
@@ -193,12 +191,10 @@ for version in conf.versions:
             scenario_result_criteria[s] = sr.ScenarioResult(s_status, s_score)
             logger.info("--------------------------")
 
-        templateLoader = jinja2.FileSystemLoader(os.path.dirname
-                                                 (os.path.abspath
-                                                  (__file__)))
+        templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
         templateEnv = jinja2.Environment(loader=templateLoader)
 
-        TEMPLATE_FILE = "./template/index-status-tmpl.html"
+        TEMPLATE_FILE = "/template/index-status-tmpl.html"
         template = templateEnv.get_template(TEMPLATE_FILE)
 
         outputText = template.render(scenario_stats=scenario_stats,
@@ -208,6 +204,6 @@ for version in conf.versions:
                                      period=conf.PERIOD,
                                      version=version)
 
-        with open("./release/" + version +
+        with open(conf.REPORTING_PATH + "/release/" + version +
                   "/index-status-" + installer + ".html", "wb") as fh:
             fh.write(outputText)
index a065ef4..e3f4e33 100644 (file)
@@ -1,28 +1,44 @@
 from urllib2 import Request, urlopen, URLError
 import json
 import jinja2
-import os
+import reportingConf as conf
+import reportingUtils as utils
 
-installers = ["apex", "compass", "fuel", "joid"]
+installers = conf.installers
 items = ["tests", "Success rate", "duration"]
 
-PERIOD = 7
-print "Generate Tempest automatic reporting"
+PERIOD = conf.PERIOD
+criteria_nb_test = 165
+criteria_duration = 1800
+criteria_success_rate = 90
+
+logger = utils.getLogger("Tempest")
+logger.info("************************************************")
+logger.info("*   Generating reporting Tempest_smoke_serial  *")
+logger.info("*   Data retention = %s days                   *" % PERIOD)
+logger.info("*                                              *")
+logger.info("************************************************")
+
+logger.info("Success criteria:")
+logger.info("nb tests executed > %s s " % criteria_nb_test)
+logger.info("test duration < %s s " % criteria_duration)
+logger.info("success rate > %s " % criteria_success_rate)
+
 for installer in installers:
     # we consider the Tempest results of the last PERIOD days
-    url = "http://testresults.opnfv.org/test/api/v1/results?case=tempest_smoke_serial"
-    request = Request(url + '&period=' + str(PERIOD)
-                      + '&installer=' + installer + '&version=master')
-
+    url = conf.URL_BASE + "?case=tempest_smoke_serial"
+    request = Request(url + '&period=' + str(PERIOD) +
+                      '&installer=' + installer + '&version=master')
+    logger.info("Search tempest_smoke_serial results for installer %s"
+                % installer)
     try:
         response = urlopen(request)
         k = response.read()
         results = json.loads(k)
     except URLError, e:
-        print 'No kittez. Got an error code:', e
+        logger.error("Error code: %s" % e)
 
     test_results = results['results']
-    test_results.reverse()
 
     scenario_results = {}
     criteria = {}
@@ -48,8 +64,8 @@ for installer in installers:
             nb_tests_run = result['details']['tests']
             nb_tests_failed = result['details']['failures']
             if nb_tests_run != 0:
-                success_rate = 100*(int(nb_tests_run)
-                                    - int(nb_tests_failed))/int(nb_tests_run)
+                success_rate = 100*(int(nb_tests_run) -
+                                    int(nb_tests_failed)) / int(nb_tests_run)
             else:
                 success_rate = 0
 
@@ -63,40 +79,49 @@ for installer in installers:
             crit_time = False
 
             # Expect that at least 165 tests are run
-            if nb_tests_run >= 165:
+            if nb_tests_run >= criteria_nb_test:
                 crit_tests = True
 
             # Expect that at least 90% of success
-            if success_rate >= 90:
+            if success_rate >= criteria_success_rate:
                 crit_rate = True
 
             # Expect that the suite duration is inferior to 30m
-            if result['details']['duration'] < 1800:
+            if result['details']['duration'] < criteria_duration:
                 crit_time = True
 
             result['criteria'] = {'tests': crit_tests,
                                   'Success rate': crit_rate,
                                   'duration': crit_time}
-            # error management
+            try:
+                logger.debug("Scenario %s, Installer %s"
+                             % (s_result[1]['scenario'], installer))
+                logger.debug("Nb Test run: %s" % nb_tests_run)
+                logger.debug("Test duration: %s"
+                             % result['details']['duration'])
+                logger.debug("Success rate: %s" % success_rate)
+            except:
+                logger.error("Data format error")
+
+            # Error management
             # ****************
             try:
                 errors = result['details']['errors']
                 result['errors'] = errors.replace('{0}', '')
             except:
-                print "Error field not present (Brahamputra runs?)"
+                logger.error("Error field not present (Brahamputra runs?)")
 
-    mypath = os.path.abspath(__file__)
-    tplLoader = jinja2.FileSystemLoader(os.path.dirname(mypath))
-    templateEnv = jinja2.Environment(loader=tplLoader)
+    templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
+    templateEnv = jinja2.Environment(loader=templateLoader)
 
-    TEMPLATE_FILE = "./template/index-tempest-tmpl.html"
+    TEMPLATE_FILE = "/template/index-tempest-tmpl.html"
     template = templateEnv.get_template(TEMPLATE_FILE)
 
     outputText = template.render(scenario_results=scenario_results,
                                  items=items,
                                  installer=installer)
 
-    with open("./release/master/index-tempest-" +
+    with open(conf.REPORTING_PATH + "/release/master/index-tempest-" +
               installer + ".html", "wb") as fh:
         fh.write(outputText)
-print "Tempest automatic reporting Done"
+logger.info("Tempest automatic reporting succesfully generated.")
index 4033687..d0436ed 100644 (file)
@@ -1,7 +1,11 @@
 from urllib2 import Request, urlopen, URLError
 import json
 import jinja2
-import os
+import reportingConf as conf
+import reportingUtils as utils
+
+logger = utils.getLogger("vIMS")
+
 
 def sig_test_format(sig_test):
     nbPassed = 0
@@ -9,7 +13,7 @@ def sig_test_format(sig_test):
     nbSkipped = 0
     for data_test in sig_test:
         if data_test['result'] == "Passed":
-            nbPassed+= 1
+            nbPassed += 1
         elif data_test['result'] == "Failed":
             nbFailures += 1
         elif data_test['result'] == "Skipped":
@@ -20,21 +24,29 @@ def sig_test_format(sig_test):
     total_sig_test_result['skipped'] = nbSkipped
     return total_sig_test_result
 
-installers = ["fuel", "compass", "joid", "apex"]
-step_order = ["initialisation", "orchestrator", "vIMS", "sig_test"]
+logger.info("****************************************")
+logger.info("*   Generating reporting vIMS          *")
+logger.info("*   Data retention = %s days           *" % conf.PERIOD)
+logger.info("*                                      *")
+logger.info("****************************************")
 
+installers = conf.installers
+step_order = ["initialisation", "orchestrator", "vIMS", "sig_test"]
+logger.info("Start processing....")
 for installer in installers:
-    request = Request('http://testresults.opnfv.org/test/api/v1/results?case=vims&installer=' + installer)
+    logger.info("Search vIMS results for installer %s" % installer)
+    request = Request(conf.URL_BASE + '?case=vims&installer=' + installer)
 
     try:
         response = urlopen(request)
         k = response.read()
         results = json.loads(k)
     except URLError, e:
-        print 'No kittez. Got an error code:', e
+        logger.error("Error code: %s" % e)
 
     test_results = results['results']
-    test_results.reverse()
+
+    logger.debug("Results found: %s" % test_results)
 
     scenario_results = {}
     for r in test_results:
@@ -44,6 +56,7 @@ for installer in installers:
 
     for s, s_result in scenario_results.items():
         scenario_results[s] = s_result[0:5]
+        logger.debug("Search for success criteria")
         for result in scenario_results[s]:
             result["start_date"] = result["start_date"].split(".")[0]
             sig_test = result['details']['sig_test']['result']
@@ -67,17 +80,34 @@ for installer in installers:
             result['pr_step_ok'] = 0
             if nb_step != 0:
                 result['pr_step_ok'] = (float(nb_step_ok)/nb_step)*100
-
-
-    templateLoader = jinja2.FileSystemLoader(os.path.dirname(os.path.abspath(__file__)))
-    templateEnv = jinja2.Environment( loader=templateLoader )
-
-    TEMPLATE_FILE = "./template/index-vims-tmpl.html"
-    template = templateEnv.get_template( TEMPLATE_FILE )
-
-    outputText = template.render( scenario_results = scenario_results, step_order = step_order, installer = installer)
-
-    with open("./release/master/index-vims-" + installer + ".html", "wb") as fh:
+            try:
+                logger.debug("Scenario %s, Installer %s"
+                             % (s_result[1]['scenario'], installer))
+                logger.debug("Orchestrator deployment: %s s"
+                             % result['details']['orchestrator']['duration'])
+                logger.debug("vIMS deployment: %s s"
+                             % result['details']['vIMS']['duration'])
+                logger.debug("Signaling testing: %s s"
+                             % result['details']['sig_test']['duration'])
+                logger.debug("Signaling testing results: %s"
+                             % format_result)
+            except:
+                logger.error("Data badly formatted")
+            logger.debug("------------------------------------------------")
+
+    templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
+    templateEnv = jinja2.Environment(loader=templateLoader)
+
+    TEMPLATE_FILE = "/template/index-vims-tmpl.html"
+    template = templateEnv.get_template(TEMPLATE_FILE)
+
+    outputText = template.render(scenario_results=scenario_results,
+                                 step_order=step_order,
+                                 installer=installer)
+
+    with open(conf.REPORTING_PATH +
+              "/release/master/index-vims-" +
+              installer + ".html", "wb") as fh:
         fh.write(outputText)
 
-
+logger.info("vIMS report succesfully generated")
index 61410b4..a58eeec 100644 (file)
@@ -13,14 +13,16 @@ installers = ["apex", "compass", "fuel", "joid"]
 # installers = ["apex"]
 # list of test cases declared in testcases.yaml but that must not be
 # taken into account for the scoring
-blacklist = ["odl", "ovno", "security_scan"]
+blacklist = ["odl", "ovno", "security_scan", "copper", "moon"]
 # versions = ["brahmaputra", "master"]
 versions = ["master"]
 PERIOD = 10
 MAX_SCENARIO_CRITERIA = 18
 # get the last 5 test results to determinate the success criteria
 NB_TESTS = 5
+# REPORTING_PATH = "/usr/share/nginx/html/reporting/functest"
+REPORTING_PATH = "."
 URL_BASE = 'http://testresults.opnfv.org/test/api/v1/results'
 TEST_CONF = "https://git.opnfv.org/cgit/functest/plain/ci/testcases.yaml"
-LOG_LEVEL = "INFO"
-LOG_FILE = "reporting.log"
+LOG_LEVEL = "ERROR"
+LOG_FILE = REPORTING_PATH + "/reporting.log"
index 2f06b84..5051ffa 100644 (file)
@@ -7,8 +7,26 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 #
 from urllib2 import Request, urlopen, URLError
+import logging
 import json
-import reportingConf
+import reportingConf as conf
+
+
+def getLogger(module):
+    logFormatter = logging.Formatter("%(asctime)s [" +
+                                     module +
+                                     "] [%(levelname)-5.5s]  %(message)s")
+    logger = logging.getLogger()
+
+    fileHandler = logging.FileHandler("{0}/{1}".format('.', conf.LOG_FILE))
+    fileHandler.setFormatter(logFormatter)
+    logger.addHandler(fileHandler)
+
+    consoleHandler = logging.StreamHandler()
+    consoleHandler.setFormatter(logFormatter)
+    logger.addHandler(consoleHandler)
+    logger.setLevel(conf.LOG_LEVEL)
+    return logger
 
 
 def getApiResults(case, installer, scenario, version):
@@ -19,10 +37,10 @@ def getApiResults(case, installer, scenario, version):
     # urllib2.install_opener(opener)
     # url = "http://127.0.0.1:8000/results?case=" + case + \
     #       "&period=30&installer=" + installer
-    url = (reportingConf.URL_BASE + "?case=" + case +
-           "&period=" + str(reportingConf.PERIOD) + "&installer=" + installer +
+    url = (conf.URL_BASE + "?case=" + case +
+           "&period=" + str(conf.PERIOD) + "&installer=" + installer +
            "&scenario=" + scenario + "&version=" + version +
-           "&last=" + str(reportingConf.NB_TESTS))
+           "&last=" + str(conf.NB_TESTS))
     request = Request(url)
 
     try:
@@ -38,9 +56,8 @@ def getApiResults(case, installer, scenario, version):
 def getScenarios(case, installer, version):
 
     case = case.getName()
-    print case
-    url = (reportingConf.URL_BASE + "?case=" + case +
-           "&period=" + str(reportingConf.PERIOD) + "&installer=" + installer +
+    url = (conf.URL_BASE + "?case=" + case +
+           "&period=" + str(conf.PERIOD) + "&installer=" + installer +
            "&version=" + version)
     request = Request(url)
 
@@ -115,11 +132,16 @@ def getResult(testCase, installer, scenario, version):
         # 2: <4 successful consecutive runs but passing the criteria
         # 1: close to pass the success criteria
         # 0: 0% success, not passing
+        # -1: no run available
         test_result_indicator = 0
         nbTestOk = getNbtestOk(scenario_results)
+
         # print "Nb test OK (last 10 days):"+ str(nbTestOk)
         # check that we have at least 4 runs
-        if nbTestOk < 1:
+        if len(scenario_results) < 1:
+            # No results available     
+            test_result_indicator = -1
+        elif nbTestOk < 1:
             test_result_indicator = 0
         elif nbTestOk < 2:
             test_result_indicator = 1
index 89a1d15..0c3fa94 100644 (file)
@@ -76,7 +76,7 @@
                             {% for test in items[scenario] -%}
                             <th>
                             {% if test.getCriteria() > -1 -%}
-                            {{test.getDbName() }}
+                            {{test.getDisplayName() }}
                             {%- endif %}
                                                        {% if test.getTier() > 3 -%}
                             *
index f0e8f59..e19853a 100644 (file)
@@ -19,6 +19,28 @@ class TestCase(object):
         self.criteria = criteria
         self.isRunnable = isRunnable
         self.tier = tier
+        display_name_matrix = {'healthcheck': 'healthcheck',
+                               'vping_ssh': 'vPing (ssh)',
+                               'vping_userdata': 'vPing (userdata)',
+                               'odl': 'ODL',
+                               'onos': 'ONOS',
+                               'ocl': 'OCL',
+                               'tempest_smoke_serial': 'Tempest (smoke)',
+                               'tempest_full_parallel': 'Tempest (full)',
+                               'rally_sanity': 'Rally (smoke)',
+                               'bgpvpn': 'bgpvpn',
+                               'rally_full': 'Rally (full)',
+                               'vims': 'vIMS',
+                               'doctor': 'Doctor',
+                               'promise': 'Promise',
+                               'moon': 'moon',
+                               'copper': 'copper',
+                               'security_scan': 'security'
+                               }
+        try:
+            self.displayName = display_name_matrix[self.name]
+        except:
+            self.displayName = "unknown"
 
     def getName(self):
         return self.name
@@ -74,10 +96,10 @@ class TestCase(object):
         self.isRunnable = is_runnable
 
     def toString(self):
-        testcase = ("Name=" + self.name + ";Criteria=" + str(self.criteria)
-                    + ";Project=" + self.project + ";Constraints="
-                    + str(self.constraints) + ";IsRunnable"
-                    + str(self.isRunnable))
+        testcase = ("Name=" + self.name + ";Criteria=" +
+                    str(self.criteria) + ";Project=" + self.project +
+                    ";Constraints=" + str(self.constraints) +
+                    ";IsRunnable" + str(self.isRunnable))
         return testcase
 
     def getDbName(self):
@@ -98,31 +120,15 @@ class TestCase(object):
                              'rally_full': 'rally_full',
                              'vims': 'vims',
                              'doctor': 'doctor-notification',
-                             'promise': 'promise'
+                             'promise': 'promise',
+                             'moon': 'moon',
+                             'copper': 'copper',
+                             'security_scan': 'security'
                              }
         try:
             return test_match_matrix[self.name]
         except:
             return "unknown"
 
-    def getTestDisplayName(self):
-        # Correspondance name of the test case / name in the DB
-        test_match_matrix = {'healthcheck': 'healthcheck',
-                             'vping_ssh': 'vPing (ssh)',
-                             'vping_userdata': 'vPing (userdata)',
-                             'odl': 'ODL',
-                             'onos': 'ONOS',
-                             'ocl': 'OCL',
-                             'tempest_smoke_serial': 'Tempest (smoke)',
-                             'tempest_full_parallel': 'Tempest (full)',
-                             'rally_sanity': 'Rally (smoke)',
-                             'bgpvpn': 'bgpvpn',
-                             'rally_full': 'Rally (full)',
-                             'vims': 'vIMS',
-                             'doctor': 'Doctor',
-                             'promise': 'Promise'
-                             }
-        try:
-            return test_match_matrix[self.name]
-        except:
-            return "unknown"
+    def getDisplayName(self):
+        return self.displayName
index 41b7fff..d3aef7e 100644 (file)
-# opnfv-testapi update
-
-## How to use:
-
-# backup mongodb,
-# arguments:
-# -u/--url: Mongo DB URL, default = mongodb://127.0.0.1:27017/
-# -o/--output_dir: Output directory for the backup, default = ./
-# the backup output will be put under dir/db__XXXX_XX_XX_XXXXXX/db
-# -d/--db: database for the backup, default = test_results_collection
-```
-python backup.py
-```
-
-# restore mongodb
-# arguments:
-# -u/--url: Mongo DB URL, default = mongodb://127.0.0.1:27017/
-# -i/--input_dir: Input directory for the Restore, must be specified
-# the restore input must be specified to dir/db__XXXX_XX_XX_XXXXXX/db
-# -d/--db: database name after the restore, default = basename of input_dir
-```
-python restore.py
-```
-
-# update mongodb
-# arguments:
-# -u/--url: Mongo DB URL, default = mongodb://127.0.0.1:27017/
-# -d/--db: database name to be updated, default = test_results_collection
-# changes need to be done:
-# change collection name, modify changes.collections_old2New
-# collections_old2New = {
-#     'old_collection': 'new_collection',
-# }
-# change field name, modify changes.fields_old2New
-# fields_old2New = {
-#     'collection': [(query, {'old_field': 'new_field'})]
-# }
-# change the doc, modify changes.docs_old2New
-# docs_old2New = {
-#     'test_results': [
-#         ({'field': 'old_value'}, {'field': 'new_value'}),
-#         (query, {'field': 'new_value'}),
-#     ]
-# }
-```
-python update.py
-```
-
-# update opnfv-testapi process
-# this script must be run right in this directory
-# and remember to change ../etc/config.ini before running this script
-# operations includes:
-# kill running test_collection_api & opnfv-testapi
-# install or update dependencies according to ../requirements.txt
-# install opnfv-testapi
-# run opnfv-testapi
+Welcome to TESTAPI Update!
+========================
+
+
+This file is used to describe how testapi update works
+
+----------
+How to use
+---------------
+
+#### <i class="icon-file"></i> backup mongodb
+
+arguments:
+: -u/--url: Mongo DB URL, default = mongodb://127.0.0.1:27017/
+the backup output will be put under dir/db__XXXX_XX_XX_XXXXXX/db
+-d/--db: database for the backup, default = test_results_collection
+
+usage:
+```
+python backup_mongodb.py
+```
+
+#### <i class="icon-file"></i> restore mongodb
+
+arguments:
+: -u/--url: Mongo DB URL, default = mongodb://127.0.0.1:27017/
+  -i/--input_dir: Input directory for the Restore, must be specified,
+  the restore input must be specified to dir/db__XXXX_XX_XX_XXXXXX/db
+  -d/--db: database name after the restore, default = basename of input_dir
+
+usage:
+```
+python restore_mongodb.py
+```
+#### <i class="icon-file"></i> update mongodb
+
+ arguments:
+: -u/--url: Mongo DB URL, default = mongodb://127.0.0.1:27017/
+ -d/--db: database name to be updated, default = test_results_collection
+
+changes need to be done:
+change collection name, modify changes.collections_old2New
+ > collections_old2New = {
+     'old_collection': 'new_collection',
+ }
+
+ change field name, modify changes.fields_old2New
+ > fields_old2New = {
+     'collection': [(query, {'old_field': 'new_field'})]
+ }
+
+ change the doc, modify changes.docs_old2New
+ > docs_old2New = {
+     'test_results': [
+         ({'field': 'old_value'}, {'field': 'new_value'}),
+         (query, {'field': 'new_value'}),
+     ]
+ }
+
+#### <i class="icon-file"></i> update opnfv-testapi process
+This script must be run right in this directory and remember to
+change ../etc/config.ini before running this script.
+
+operations includes:
+: kill running test_collection_api & opnfv-testapi
+install or update dependencies according to ../requirements.txt
+install opnfv-testapi
+run opnfv-testapi
+
+usage:
 ```
 python update_api.py
 ```
+#### <i class="icon-file"></i> update opnfv/testapi container
+Here ansible-playbook is used to implement auto update.
+Please make sure that the remote server is accessible via ssh.
+
+install ansible, please refer:
+```
+http://docs.ansible.com/ansible/intro_installation.html
+```
+run update.yml
+arguments:
+: host: remote server, must provide
+user: user used to access to remote server, default to root
+port: exposed port used to access to testapi, default to 8000
+image: testapi's docker image, default to opnfv/testapi:latest
+update_path: templates directory in remote server, default to /tmp/testapi
+mongodb_url: url of mongodb, default to 172.17.0.1, docker0 ip
+swagger_url: swagger access url, default to http://host:port
+
+usage:
+```
+ansible-playbook update.yml --extra-vars "
+host=10.63.243.17
+user=zte
+port=8000
+image=opnfv/testapi
+update_path=/tmp/testapi
+mongodb_url=mongodb://172.17.0.1:27017
+swagger_url=http://10.63.243.17:8000"```
+> **Note:**
+
+> - If documents need to be changed, please modify file
+templates/changes_in_mongodb.py, and refer section **update mongodb**
diff --git a/utils/test/result_collection_api/update/templates/__init__.py b/utils/test/result_collection_api/update/templates/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
@@ -6,9 +6,9 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import os
 import argparse
 import datetime
+import os
 
 from utils import execute, main, get_abspath
 
diff --git a/utils/test/result_collection_api/update/templates/rm_olds.sh b/utils/test/result_collection_api/update/templates/rm_olds.sh
new file mode 100644 (file)
index 0000000..c6bca18
--- /dev/null
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+proc_number=`ps -ef | grep opnfv-testapi | grep -v grep | wc -l`
+if [ $proc_number -gt 0 ]; then
+    procs=`ps -ef | grep opnfv-testapi | grep -v grep`
+    echo "begin to kill opnfv-testapi $procs"
+    ps -ef | grep opnfv-testapi | grep -v grep | awk '{print $2}' | xargs kill -kill &>/dev/null
+fi
+
+number=`docker ps -a | awk 'NR != 1' | grep testapi | wc -l`
+if [ $number -gt 0 ]; then
+    containers=number=`docker ps -a | awk 'NR != 1' | grep testapi`
+    echo "begin to rm containers $containers"
+    docker ps -a | awk 'NR != 1' | grep testapi | awk '{print $1}' | xargs docker rm -f &>/dev/null
+fi
@@ -10,8 +10,8 @@ import argparse
 
 from pymongo import MongoClient
 
+from changes_in_mongodb import collections_old2New, fields_old2New, docs_old2New
 from utils import main, parse_mongodb_url
-from changes import collections_old2New, fields_old2New, docs_old2New
 
 parser = argparse.ArgumentParser(description='Update MongoDBs')
 
diff --git a/utils/test/result_collection_api/update/update.yml b/utils/test/result_collection_api/update/update.yml
new file mode 100644 (file)
index 0000000..0883956
--- /dev/null
@@ -0,0 +1,41 @@
+---
+- hosts: "{{ host }}"
+  remote_user: "{{ user }}"
+  become: yes
+  become_method: sudo
+  vars:
+    user: "root"
+    port: "8000"
+    update_path: "/tmp/testapi"
+    image: "opnfv/testapi"
+    mongodb_url: "mongodb://172.17.0.1:27017"
+    swagger_url: "http://{{ host }}:{{ port }}"
+  tasks:
+    - name: create temporary update directory
+      file:
+        path: "{{ update_path }}"
+        state: directory
+    - name: transfer files in templates
+      copy:
+        src: templates/
+        dest: "{{ update_path }}"
+    - name: backup mongodb database
+      command: "python {{ update_path }}/backup_mongodb.py -u {{ mongodb_url }} -o {{ update_path }}"
+    - name: stop and remove old versions
+      command: bash "{{ update_path }}/rm_olds.sh"
+      register: rm_result
+    - debug: msg="{{ rm_result.stderr }}"
+    - name: delete old docker images
+      command: docker rmi "{{ image }}"
+      ignore_errors: true
+    - name: update mongodb
+      command: "python {{ update_path }}/update_mongodb.py -u {{ mongodb_url }}"
+    - name: docker start testapi server
+      command: docker run -dti -p "{{ port }}:8000"
+               -e "mongodb_url={{ mongodb_url }}"
+               -e "swagger_url={{ swagger_url }}"
+               "{{ image }}"
+    - name: remove temporary update directory
+      file:
+        path: "{{ update_path }}"
+        state: absent
\ No newline at end of file