Merge "Changing urls of testapi api and resources"
authorSerena Feng <feng.xiaowei@zte.com.cn>
Fri, 23 Jun 2017 08:12:36 +0000 (08:12 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Fri, 23 Jun 2017 08:12:36 +0000 (08:12 +0000)
98 files changed:
UPSTREAM [new file with mode: 0644]
jjb/apex/apex-deploy.sh
jjb/apex/apex-download-artifact.sh
jjb/apex/apex-gs-cleanup.sh [deleted file]
jjb/apex/apex-iso-verify.sh
jjb/apex/apex-snapshot-create.sh
jjb/apex/apex-upload-artifact.sh
jjb/apex/apex.yml
jjb/apex/apex.yml.j2
jjb/apex/scenarios.yaml.hidden
jjb/armband/armband-ci-jobs.yml
jjb/barometer/barometer-build.sh [new file with mode: 0644]
jjb/barometer/barometer-upload-artifact.sh [new file with mode: 0644]
jjb/barometer/barometer.yml
jjb/bottlenecks/bottlenecks-run-suite.sh
jjb/ci_gate_security/anteater-clone-all-repos.sh [new file with mode: 0755]
jjb/ci_gate_security/anteater-report-to-gerrit.sh [new file with mode: 0644]
jjb/ci_gate_security/anteater-security-audit-weekly.sh [new file with mode: 0644]
jjb/ci_gate_security/anteater-security-audit.sh [new file with mode: 0644]
jjb/ci_gate_security/opnfv-ci-gate-security.yml [moved from jjb/securityaudit/opnfv-security-audit.yml with 50% similarity]
jjb/compass4nfv/compass-build.sh
jjb/compass4nfv/compass-ci-jobs.yml
jjb/compass4nfv/compass-deploy.sh
jjb/compass4nfv/compass-dovetail-jobs.yml
jjb/compass4nfv/compass-download-artifact.sh
jjb/compass4nfv/compass-upload-artifact.sh
jjb/compass4nfv/compass-verify-jobs.yml
jjb/daisy4nfv/daisy-deploy.sh
jjb/daisy4nfv/daisy-project-jobs.yml
jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
jjb/doctor/doctor.yml
jjb/dovetail/dovetail-artifacts-upload.sh
jjb/dovetail/dovetail-artifacts-upload.yml
jjb/dovetail/dovetail-cleanup.sh
jjb/dovetail/dovetail-run.sh
jjb/fuel/fuel-build.sh
jjb/fuel/fuel-daily-jobs.yml
jjb/fuel/fuel-deploy.sh
jjb/fuel/fuel-download-artifact.sh
jjb/functest/functest-daily-jobs.yml
jjb/functest/functest-env-presetup.sh [new file with mode: 0755]
jjb/functest/functest-project-jobs.yml
jjb/functest/functest-suite.sh
jjb/functest/functest-weekly-jobs.yml
jjb/functest/set-functest-env.sh
jjb/global/installer-params.yml
jjb/global/releng-macros.yml
jjb/global/slave-params.yml
jjb/netready/netready.yml
jjb/qtip/helpers/validate-deploy.sh
jjb/releng/opnfv-docker.sh
jjb/releng/opnfv-docker.yml
jjb/releng/opnfv-lint.yml
jjb/storperf/storperf.yml
jjb/xci/bifrost-verify-jobs.yml
jjb/xci/bifrost-verify.sh
jjb/yardstick/yardstick-daily-jobs.yml [moved from jjb/yardstick/yardstick-ci-jobs.yml with 99% similarity]
modules/requirements.txt
modules/setup.py
modules/test-requirements.txt
prototypes/bifrost/scripts/bifrost-provision.sh
prototypes/xci/README.rst
prototypes/xci/config/env-vars
prototypes/xci/config/pinned-versions
prototypes/xci/config/user-vars
prototypes/xci/file/ansible-role-requirements.yml
prototypes/xci/file/install-ansible.sh
prototypes/xci/playbooks/provision-vm-nodes.yml
prototypes/xci/var/opnfv.yml
setup.py
utils/fetch_os_creds.sh
utils/push-test-logs.sh
utils/test/reporting/LICENSE.txt
utils/test/reporting/README.txt
utils/test/reporting/api/api/handlers/testcases.py
utils/test/reporting/functest/reporting-tempest.py
utils/test/reporting/pages/app/images/overview.png
utils/test/reporting/pages/app/scripts/controllers/testvisual.controller.js
utils/test/reporting/pages/app/styles/custome.css
utils/test/reporting/pages/app/views/commons/testCaseVisual.html
utils/test/reporting/pages/app/views/modal/testcasedetail.html
utils/test/testapi/3rd_party/static/testapi-ui/app.js
utils/test/testapi/3rd_party/static/testapi-ui/assets/img/OpenStack_Project_Refstack_mascot_90x90.png [deleted file]
utils/test/testapi/3rd_party/static/testapi-ui/assets/img/openstack-logo.png [deleted file]
utils/test/testapi/3rd_party/static/testapi-ui/assets/img/refstack-logo.png [deleted file]
utils/test/testapi/3rd_party/static/testapi-ui/assets/img/testapi-logo.png [new file with mode: 0644]
utils/test/testapi/3rd_party/static/testapi-ui/components/results/results.html
utils/test/testapi/3rd_party/static/testapi-ui/components/results/resultsController.js
utils/test/testapi/3rd_party/static/testapi-ui/config.json
utils/test/testapi/3rd_party/static/testapi-ui/shared/alerts/alertModalFactory.js
utils/test/testapi/etc/config.ini
utils/test/testapi/opnfv_testapi/common/config.py
utils/test/testapi/opnfv_testapi/resources/handlers.py
utils/test/testapi/opnfv_testapi/resources/result_handlers.py
utils/test/testapi/opnfv_testapi/router/url_mappings.py
utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
utils/test/testapi/opnfv_testapi/ui/auth/sign.py

diff --git a/UPSTREAM b/UPSTREAM
new file mode 100644 (file)
index 0000000..65e44f5
--- /dev/null
+++ b/UPSTREAM
@@ -0,0 +1,448 @@
+# Upstream contributions, bitergia will crawl this and extract the relevant information
+# system is one of Gerrit, Bugzilla, Launchpad (insert more)
+#
+# All the contributions listed in this file are merged commits. Contributions under
+# review, in progress and abandoned are not included.
+# The latest contribution included in this file is 453130 and it is dated 2017-05-23.
+# Contributions from Yolanda Robla Mota have not been included yet.
+# The gerrit query used to list contributions is
+#   status:merged AND (owner: "Markos Chandras" OR owner: "Fatih Degirmenci")
+---
+-
+  url: https://review.openstack.org/#/c/453130/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/466422/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/466421/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/466249/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/465927/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/465686/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/465685/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/464759/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/464180/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/464629/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/463359/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/463313/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/463301/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/463300/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/462488/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/450970/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/463299/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/462863/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/461754/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/462859/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/462443/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/461755/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/461018/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/461017/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/461050/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/458616/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/460617/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/458797/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/453128/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459984/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/460071/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459779/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459775/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459332/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459331/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459330/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459715/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459702/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459684/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459599/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459461/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/458801/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/457709/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/458246/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/458420/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/458419/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/450634/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/457695/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/455290/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/455461/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/448800/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/453609/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/453798/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/453577/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/453574/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/449183/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/452079/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/452160/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/450384/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/450210/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/451848/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/451426/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/450239/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/450095/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/448625/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/447444/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/447435/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/441041/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/444197/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/439938/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/444033/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/442224/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/428345/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/440614/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/439946/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/441042/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/439940/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/433517/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/435433/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/437010/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/436948/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/436000/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/436066/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/436085/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/435994/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/434328/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/433532/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/427682/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/428704/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/430864/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/428716/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/427744/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/426844/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/424853/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/410639/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/407970/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/387441/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/398317/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/401991/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/400150/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/404315/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/389171/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/401164/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/400712/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/392002/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/388242/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/392003/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/392986/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/392004/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/389079/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/388158/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/388840/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/388780/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/388847/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/388748/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/381576/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/381575/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/378532/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/386002/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/381574/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/381541/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/376303/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/379835/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/376534/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/375350/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/371602/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/370258/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/370584/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/363458/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/366835/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/363896/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/353165/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/361652/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/358477/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/359922/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/357268/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/357718/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/356964/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/355485/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/355431/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/355668/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/202056/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/202180/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/192397/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/192720/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/163149/
+  system: Gerrit
index d6bb485..3a2ca60 100755 (executable)
@@ -3,7 +3,7 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
-APEX_PKGS="common undercloud" # removed onos for danube
+APEX_PKGS="common undercloud onos"
 IPV6_FLAG=False
 
 # log info to console
@@ -128,7 +128,7 @@ if [[ "$JOB_NAME" =~ "virtual" ]]; then
   if [[ "$JOB_NAME" == *csit* ]]; then
     DEPLOY_CMD="${DEPLOY_CMD} -e csit-environment.yaml"
   fi
-  if [[ "$JOB_NAME" == *promote* ]]; then
+  if [[ "$PROMOTE" == "True" ]]; then
     DEPLOY_CMD="${DEPLOY_CMD} --virtual-computes 2"
   fi
 else
index f253251..52c3c67 100755 (executable)
@@ -3,7 +3,7 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
-APEX_PKGS="common undercloud" # removed onos for danube
+APEX_PKGS="common undercloud onos"
 
 # log info to console
 echo "Downloading the Apex artifact. This could take some time..."
@@ -23,14 +23,13 @@ if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
   tar -xvf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
   popd > /dev/null
 else
-  echo "Will download RPMs and ISO..."
+  echo "Will download RPMs..."
 
   # Must be RPMs/ISO
-  export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d")
-  echo "Downloading opnfv-${OPNFV_ARTIFACT_VERSION}.properties"
+  echo "Downloading latest properties file"
 
   # get the properties file in order to get info regarding artifacts
-  curl --fail -s -o $BUILD_DIRECTORY/opnfv.properties http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties
+  curl --fail -s -o $BUILD_DIRECTORY/opnfv.properties http://$GS_URL/latest.properties
 
   # source the file so we get OPNFV vars
   source $BUILD_DIRECTORY/opnfv.properties
@@ -56,15 +55,6 @@ else
     echo "Unable to install new RPMs: $RPM_LIST"
     exit 1
   fi
-
-  # log info to console
-  echo "Downloading the ISO artifact using URL http://$OPNFV_ARTIFACT_URL"
-  echo "--------------------------------------------------------"
-  echo
-
-  # Download ISO
-  curl --fail -s -o $BUILD_DIRECTORY/apex.iso http://$OPNFV_ARTIFACT_URL > gsutil.iso.log 2>&1
-
 fi
 
 # TODO: Uncomment these lines to verify SHA512SUMs once the sums are
diff --git a/jjb/apex/apex-gs-cleanup.sh b/jjb/apex/apex-gs-cleanup.sh
deleted file mode 100755 (executable)
index 1629aa8..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Cleaning Google Storage"
-echo "-----------------------"
-echo
-
-thirty_days_ago=$(date -d "30 days ago" +"%Y%m%d")
-
-for i in $(gsutil ls gs://$GS_URL/*201?*); do
-    filedate=$(date -d "$(echo $i | grep -Eo 201[0-9]-?[0-9][0-9]-?[0-9][0-9])" +"%Y%m%d")
-    if [ $filedate -lt $thirty_days_ago ]; then
-      # gsutil indicates what it is removing so no need for output here
-      gsutil rm $i
-    fi
-done
index cdeac04..f102421 100755 (executable)
@@ -8,8 +8,6 @@ echo "Starting the Apex iso verify."
 echo "--------------------------------------------------------"
 echo
 
-BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY
-
 source $BUILD_DIRECTORY/../opnfv.properties
 
 if ! rpm -q virt-install > /dev/null; then
@@ -29,76 +27,37 @@ fi
 # Make sure a pre-existing iso-verify isn't there
 rm_apex_iso_verify
 
+#make sure there is not an existing console log file for the VM
+sudo rm -f /var/log/libvirt/qemu/apex-iso-verify-console.log
+
 # run an install from the iso
 # This streams a serial console to tcp port 3737 on localhost
 sudo virt-install -n apex-iso-verify -r 4096 --vcpus 4 --os-variant=rhel7 \
- --accelerate -v --noautoconsole --nographics \
+ --accelerate -v --noautoconsole \
  --disk path=/var/lib/libvirt/images/apex-iso-verify.qcow2,size=30,format=qcow2 \
  -l $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso \
  --extra-args 'console=ttyS0 console=ttyS0,115200n8 serial inst.ks=file:/iso-verify.ks inst.stage2=hd:LABEL=OPNFV\x20CentOS\x207\x20x86_64:/' \
  --initrd-inject $BUILD_DIRECTORY/../ci/iso-verify.ks \
- --serial tcp,host=:3737,protocol=raw
-
-# Attach to tcpport 3737 and echo the output to stdout
-# watch for a 5 min time out, a power off message or a tcp disconnect
-python << EOP
-#!/usr/bin/env python
-
-import sys
-import socket
-from time import sleep
-from time import time
-
-
-TCP_IP = '127.0.0.1'
-TCP_PORT = 3737
-BUFFER_SIZE = 1024
-
-try:
-    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-    s.connect((TCP_IP, TCP_PORT))
-except Exception, e:
-    print "Failed to connect to the iso-verofy vm's serial console"
-    print "this probably means that the VM failed to start"
-    raise e
-
-activity = time()
-data = s.recv(BUFFER_SIZE)
-last_data = data
-while time() - activity < 300:
-    try:
-        if data != last_data:
-            activity = time()
-        last_data = data
-        data = s.recv(BUFFER_SIZE)
-        sys.stdout.write(data)
-        if 'Powering off' in data:
-            break
-        sleep(.5)
-    except socket.error, e:
-        # for now assuming that the connection was closed
-        # which is good, means the vm finished installing
-        # printing the error output just in case we need to debug
-        print "VM console connection lost: %s" % msg
-        break
-s.close()
-
-if time() - activity > 300:
-    print "failing due to console inactivity"
-    exit(1)
-else:
-    print "Success!"
-EOP
-
-# save the python return code for after cleanup
-python_rc=$?
+ --serial file,path=/var/log/libvirt/qemu/apex-iso-verify-console.log
+
+echo "Waiting for install to finish..."
+sleep 10
+end_time=$(($SECONDS+1500))
+while ! [[ `sudo tail -n1 /var/log/libvirt/qemu/apex-iso-verify-console.log` =~ 'Power down' ]]; do
+  if [ $SECONDS -gt $end_time ] || ! sudo virsh list --all | grep apex-iso-verify | grep running > /dev/null; then
+    sudo cat /var/log/libvirt/qemu/apex-iso-verify-console.log
+    sudo virsh list --all
+    echo "Error: Failed to find power down message after install"
+    exit 1
+  fi
+  sleep 10
+done
+
+sudo cat /var/log/libvirt/qemu/apex-iso-verify-console.log
 
 # clean up
 rm_apex_iso_verify
 
-# Exit with the RC of the Python job
-exit $python_rc
-
 echo
 echo "--------------------------------------------------------"
 echo "Done!"
index b2a3944..342896c 100644 (file)
@@ -13,7 +13,11 @@ set -o nounset
 set -o pipefail
 
 SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
-SNAP_TYPE=$(echo ${JOB_NAME} | sed -n 's/^apex-\(.\+\)-promote.*$/\1/p')
+
+if [ -z "$SNAP_TYPE" ]; then
+  echo "ERROR: SNAP_TYPE not provided...exiting"
+  exit 1
+fi
 
 echo "Creating Apex snapshot..."
 echo "-------------------------"
index 9d0b014..f53451d 100755 (executable)
@@ -75,7 +75,10 @@ uploadrpm () {
 uploadsnap () {
   # Uploads snapshot artifact and updated properties file
   echo "Uploading snapshot artifacts"
-  SNAP_TYPE=$(echo ${JOB_NAME} | sed -n 's/^apex-\(.\+\)-promote.*$/\1/p')
+  if [ -z "$SNAP_TYPE" ]; then
+    echo "ERROR: SNAP_TYPE not provided...exiting"
+    exit 1
+  fi
   gsutil cp $WORKSPACE/apex-${SNAP_TYPE}-snap-`date +%Y-%m-%d`.tar.gz gs://$GS_URL/ > gsutil.iso.log
   if [ "$SNAP_TYPE" == 'csit' ]; then
     gsutil cp $WORKSPACE/snapshot.properties gs://$GS_URL/snapshot.properties > gsutil.latest.log
@@ -123,13 +126,13 @@ elif [ "$ARTIFACT_TYPE" == 'rpm' ]; then
     RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
     RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
     VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
-    for pkg in common undercloud; do # removed onos for danube
+    for pkg in common undercloud onos; do
       RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
     done
     SRPM_INSTALL_PATH=$BUILD_DIRECTORY
     SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
     VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
-    for pkg in common undercloud; do # removed onos for danube
+    for pkg in common undercloud onos; do
       SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
     done
 
index de565ed..a395cf2 100644 (file)
@@ -13,7 +13,7 @@
         - 'apex-csit-promote-daily-{stream}'
         - 'apex-fdio-promote-daily-{stream}'
         - 'apex-verify-iso-{stream}'
-        - 'apex-deploy-test-baremetal-{stream}'
+        - 'apex-run-deploy-test-baremetal-{stream}'
         - 'apex-upload-snapshot'
         - 'apex-create-snapshot'
     # stream:    branch with - in place of / (eg. stable-arno)
@@ -26,6 +26,7 @@
             virtual-slave: 'apex-virtual-master'
             baremetal-slave: 'apex-baremetal-master'
             verify-scenario: 'os-odl-nofeature-ha'
+            concurrent-builds: 3
 
         - danube:
             branch: 'stable/danube'
@@ -34,6 +35,7 @@
             virtual-slave: 'apex-virtual-danube'
             baremetal-slave: 'apex-baremetal-danube'
             verify-scenario: 'os-odl_l3-nofeature-ha'
+            concurrent-builds: 1
             disabled: false
 
     platform:
 
     properties:
         - logrotate-default
-        - build-blocker:
-            use-build-blocker: true
-            block-level: 'NODE'
-            blocking-jobs:
-                - 'apex-daily.*'
-                - 'apex-deploy.*'
-                - 'apex-runner.*'
         - throttle:
             max-per-node: 3
             max-total: 10
                   GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                   GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                 node-parameters: true
-                kill-phase-on: FAILURE
+                kill-phase-on: NEVER
                 abort-all-job: true
                 git-revision: false
 
 
     properties:
         - logrotate-default
-        - build-blocker:
-            use-build-blocker: true
-            block-level: 'NODE'
-            blocking-jobs:
-                - 'apex-daily.*'
-                - 'apex-deploy.*'
-                - 'apex-runner.*'
         - throttle:
             max-per-node: 3
             max-total: 10
                     GERRIT_REFSPEC=$GERRIT_REFSPEC
                     GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                     GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                    GERRIT_EVENT_COMMENT_TEXT=$GERRIT_EVENT_COMMENT_TEXT
                   node-parameters: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                   GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                 node-parameters: true
-                kill-phase-on: FAILURE
+                kill-phase-on: NEVER
                 abort-all-job: true
                 git-revision: false
 
                   node-parameters: false
                   current-parameters: true
                   predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=
                     OPNFV_CLEAN=yes
                     DEPLOY_SCENARIO={verify-scenario}
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   git-revision: false
         - multijob:
-            name: Functest
-            condition: ALWAYS
+            name: CPERF
+            condition: SUCCESSFUL
             projects:
-                - name: 'functest-apex-baremetal-daily-{stream}'
+                - name: 'cperf-apex-intel-pod2-daily-master'
                   node-parameters: true
                   current-parameters: false
                   predefined-parameters:
 
     wrappers:
         - timeout:
-            timeout: 90
+            timeout: 150
             fail: true
 
     properties:
         - logrotate-default
         - throttle:
-            max-per-node: 3
+            max-per-node: {concurrent-builds}
             max-total: 10
             option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            block-level: 'NODE'
+            blocking-jobs:
+                - 'apex-verify-iso-{stream}'
 
     builders:
         - 'apex-build'
 
     properties:
         - logrotate-default
-        - build-blocker:
-            use-build-blocker: true
-            block-level: 'NODE'
-            blocking-jobs:
-                - 'apex-deploy.*'
         - throttle:
             max-per-node: 1
             max-total: 10
 
 # Baremetal Deploy and Test
 - job-template:
-    name: 'apex-deploy-test-baremetal-{stream}'
+    name: 'apex-run-deploy-test-baremetal-{stream}'
 
     # Job template for daily build
     #
             block-level: 'NODE'
             blocking-jobs:
                 - 'apex-verify.*'
-                - 'apex-deploy.*'
                 - 'apex-runner.*'
                 - 'apex-.*-promote.*'
+                - 'apex-run.*'
     builders:
         - description-setter:
             description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
                   current-parameters: true
                   predefined-parameters: |
                     OPNFV_CLEAN=yes
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   git-revision: false
                 - name: 'apex-verify-iso-danube'
                   current-parameters: false
                   predefined-parameters: |
-                    BUILD_DIRECTORY=apex-build-danube/.build
+                    BUILD_DIRECTORY=$WORKSPACE/../apex-build-danube/.build
                     GERRIT_BRANCH=$GERRIT_BRANCH
                     GERRIT_REFSPEC=
                     GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
             condition: SUCCESSFUL
             projects:
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l2-netvirt_gbp_fdio-noha
+                    DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-danube'
+                - name: 'apex-run-deploy-test-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                 - name: 'apex-verify-iso-master'
                   current-parameters: false
                   predefined-parameters: |
-                    BUILD_DIRECTORY=apex-build-master/.build
+                    BUILD_DIRECTORY=$WORKSPACE/../apex-build-master/.build
                     GERRIT_BRANCH=$GERRIT_BRANCH
                     GERRIT_REFSPEC=
                     GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
             condition: SUCCESSFUL
             projects:
 
-                - name: 'apex-deploy-test-baremetal-master'
+                - name: 'apex-run-deploy-test-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-master'
+                - name: 'apex-run-deploy-test-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-master'
+                - name: 'apex-run-deploy-test-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-deploy-test-baremetal-master'
+                - name: 'apex-run-deploy-test-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                     GERRIT_REFSPEC=$GERRIT_REFSPEC
                     GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                     GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                    PROMOTE=True
                   node-parameters: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
             condition: SUCCESSFUL
             projects:
               - name: 'apex-create-snapshot'
-                current-parameters: true
+                current-parameters: false
+                predefined-parameters: |
+                  SNAP_TYPE=csit
                 node-parameters: true
                 kill-phase-on: FAILURE
                 abort-all-job: true
             condition: SUCCESSFUL
             projects:
               - name: 'apex-upload-snapshot'
-                current-parameters: true
+                current-parameters: false
+                predefined-parameters: |
+                  SNAP_TYPE=csit
                 node-parameters: true
                 kill-phase-on: FAILURE
                 abort-all-job: true
                 - name: 'apex-deploy-virtual-{stream}'
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l2-netvirt_gbp_fdio-noha
+                    DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
                     OPNFV_CLEAN=yes
                     GERRIT_BRANCH=$GERRIT_BRANCH
                     GERRIT_REFSPEC=$GERRIT_REFSPEC
                     GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                     GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                    PROMOTE=True
                   node-parameters: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
             condition: SUCCESSFUL
             projects:
               - name: 'apex-create-snapshot'
-                current-parameters: true
+                current-parameters: false
+                predefined-parameters: |
+                  SNAP_TYPE=fdio
                 node-parameters: true
                 kill-phase-on: FAILURE
                 abort-all-job: true
             condition: SUCCESSFUL
             projects:
               - name: 'apex-upload-snapshot'
-                current-parameters: true
+                current-parameters: false
+                predefined-parameters: |
+                  SNAP_TYPE=fdio
                 node-parameters: true
                 kill-phase-on: FAILURE
                 abort-all-job: true
                 git-revision: false
 
-- job-template:
-    name: 'apex-gs-clean-{stream}'
-
-    # Job template for clean
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    node: '{slave}'
-
-    disabled: false
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-            branch: '{branch}'
-        - apex-parameter:
-            gs-pathname: '{gs-pathname}'
-
-    builders:
-        - 'apex-gs-clean'
-
-    triggers:
-        - 'apex-gs-clean-{stream}'
-
 ########################
 # parameter macros
 ########################
             name: GS_URL
             default: $GS_BASE{gs-pathname}
             description: "URL to Google Storage."
+        - string:
+            name: PROMOTE
+            default: 'False'
+            description: "Flag to know if we should promote/upload snapshot artifacts."
 
 ########################
 # builder macros
         - shell:
             !include-raw: ./apex-download-artifact.sh
 
-- builder:
-    name: 'apex-gs-cleanup'
-    builders:
-        - shell:
-            !include-raw: ./apex-gs-cleanup.sh
-
 - builder:
     name: 'apex-deploy'
     builders:
     name: 'apex-danube'
     triggers:
         - timed: '0 12 * * *'
-- trigger:
-    name: 'apex-gs-clean-{stream}'
-    triggers:
-        - timed: '0 2 * * *'
\ No newline at end of file
index d739a70..752cf28 100644 (file)
@@ -13,7 +13,7 @@
         - 'apex-csit-promote-daily-{stream}'
         - 'apex-fdio-promote-daily-{stream}'
         - 'apex-verify-iso-{stream}'
-        - 'apex-deploy-test-baremetal-{stream}'
+        - 'apex-run-deploy-test-baremetal-{stream}'
         - 'apex-upload-snapshot'
         - 'apex-create-snapshot'
     # stream:    branch with - in place of / (eg. stable-arno)
@@ -26,6 +26,7 @@
             virtual-slave: 'apex-virtual-master'
             baremetal-slave: 'apex-baremetal-master'
             verify-scenario: 'os-odl-nofeature-ha'
+            concurrent-builds: 3
 
         - danube:
             branch: 'stable/danube'
@@ -34,6 +35,7 @@
             virtual-slave: 'apex-virtual-danube'
             baremetal-slave: 'apex-baremetal-danube'
             verify-scenario: 'os-odl_l3-nofeature-ha'
+            concurrent-builds: 1
             disabled: false
 
     platform:
 
     properties:
         - logrotate-default
-        - build-blocker:
-            use-build-blocker: true
-            block-level: 'NODE'
-            blocking-jobs:
-                - 'apex-daily.*'
-                - 'apex-deploy.*'
-                - 'apex-runner.*'
         - throttle:
             max-per-node: 3
             max-total: 10
                   GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                   GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                 node-parameters: true
-                kill-phase-on: FAILURE
+                kill-phase-on: NEVER
                 abort-all-job: true
                 git-revision: false
 
 
     properties:
         - logrotate-default
-        - build-blocker:
-            use-build-blocker: true
-            block-level: 'NODE'
-            blocking-jobs:
-                - 'apex-daily.*'
-                - 'apex-deploy.*'
-                - 'apex-runner.*'
         - throttle:
             max-per-node: 3
             max-total: 10
                     GERRIT_REFSPEC=$GERRIT_REFSPEC
                     GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                     GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                    GERRIT_EVENT_COMMENT_TEXT=$GERRIT_EVENT_COMMENT_TEXT
                   node-parameters: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                   GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                 node-parameters: true
-                kill-phase-on: FAILURE
+                kill-phase-on: NEVER
                 abort-all-job: true
                 git-revision: false
 
                   node-parameters: false
                   current-parameters: true
                   predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=
                     OPNFV_CLEAN=yes
                     DEPLOY_SCENARIO={verify-scenario}
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   git-revision: false
         - multijob:
-            name: Functest
-            condition: ALWAYS
+            name: CPERF
+            condition: SUCCESSFUL
             projects:
-                - name: 'functest-apex-baremetal-daily-{stream}'
+                - name: 'cperf-apex-intel-pod2-daily-master'
                   node-parameters: true
                   current-parameters: false
                   predefined-parameters:
 
     wrappers:
         - timeout:
-            timeout: 90
+            timeout: 150
             fail: true
 
     properties:
         - logrotate-default
         - throttle:
-            max-per-node: 3
+            max-per-node: {concurrent-builds}
             max-total: 10
             option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            block-level: 'NODE'
+            blocking-jobs:
+                - 'apex-verify-iso-{stream}'
 
     builders:
         - 'apex-build'
 
     properties:
         - logrotate-default
-        - build-blocker:
-            use-build-blocker: true
-            block-level: 'NODE'
-            blocking-jobs:
-                - 'apex-deploy.*'
         - throttle:
             max-per-node: 1
             max-total: 10
 
 # Baremetal Deploy and Test
 - job-template:
-    name: 'apex-deploy-test-baremetal-{stream}'
+    name: 'apex-run-deploy-test-baremetal-{stream}'
 
     # Job template for daily build
     #
             block-level: 'NODE'
             blocking-jobs:
                 - 'apex-verify.*'
-                - 'apex-deploy.*'
                 - 'apex-runner.*'
                 - 'apex-.*-promote.*'
+                - 'apex-run.*'
     builders:
         - description-setter:
             description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
                   current-parameters: true
                   predefined-parameters: |
                     OPNFV_CLEAN=yes
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   git-revision: false
                 - name: 'apex-verify-iso-{{ stream }}'
                   current-parameters: false
                   predefined-parameters: |
-                    BUILD_DIRECTORY=apex-build-{{ stream }}/.build
+                    BUILD_DIRECTORY=$WORKSPACE/../apex-build-{{ stream }}/.build
                     GERRIT_BRANCH=$GERRIT_BRANCH
                     GERRIT_REFSPEC=
                     GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
             condition: SUCCESSFUL
             projects:
 {% for scenario in scenarios[stream] %}
-                - name: 'apex-deploy-test-baremetal-{{ stream }}'
+                - name: 'apex-run-deploy-test-baremetal-{{ stream }}'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                     GERRIT_REFSPEC=$GERRIT_REFSPEC
                     GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                     GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                    PROMOTE=True
                   node-parameters: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
             condition: SUCCESSFUL
             projects:
               - name: 'apex-create-snapshot'
-                current-parameters: true
+                current-parameters: false
+                predefined-parameters: |
+                  SNAP_TYPE=csit
                 node-parameters: true
                 kill-phase-on: FAILURE
                 abort-all-job: true
             condition: SUCCESSFUL
             projects:
               - name: 'apex-upload-snapshot'
-                current-parameters: true
+                current-parameters: false
+                predefined-parameters: |
+                  SNAP_TYPE=csit
                 node-parameters: true
                 kill-phase-on: FAILURE
                 abort-all-job: true
                 - name: 'apex-deploy-virtual-{stream}'
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l2-netvirt_gbp_fdio-noha
+                    DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
                     OPNFV_CLEAN=yes
                     GERRIT_BRANCH=$GERRIT_BRANCH
                     GERRIT_REFSPEC=$GERRIT_REFSPEC
                     GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                     GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                    PROMOTE=True
                   node-parameters: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
             condition: SUCCESSFUL
             projects:
               - name: 'apex-create-snapshot'
-                current-parameters: true
+                current-parameters: false
+                predefined-parameters: |
+                  SNAP_TYPE=fdio
                 node-parameters: true
                 kill-phase-on: FAILURE
                 abort-all-job: true
             condition: SUCCESSFUL
             projects:
               - name: 'apex-upload-snapshot'
-                current-parameters: true
+                current-parameters: false
+                predefined-parameters: |
+                  SNAP_TYPE=fdio
                 node-parameters: true
                 kill-phase-on: FAILURE
                 abort-all-job: true
                 git-revision: false
 
-- job-template:
-    name: 'apex-gs-clean-{stream}'
-
-    # Job template for clean
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    node: '{slave}'
-
-    disabled: false
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-            branch: '{branch}'
-        - apex-parameter:
-            gs-pathname: '{gs-pathname}'
-
-    builders:
-        - 'apex-gs-clean'
-
-    triggers:
-        - 'apex-gs-clean-{stream}'
-
 ########################
 # parameter macros
 ########################
             name: GS_URL
             default: $GS_BASE{gs-pathname}
             description: "URL to Google Storage."
+        - string:
+            name: PROMOTE
+            default: 'False'
+            description: "Flag to know if we should promote/upload snapshot artifacts."
 
 ########################
 # builder macros
         - shell:
             !include-raw: ./apex-download-artifact.sh
 
-- builder:
-    name: 'apex-gs-cleanup'
-    builders:
-        - shell:
-            !include-raw: ./apex-gs-cleanup.sh
-
 - builder:
     name: 'apex-deploy'
     builders:
     name: 'apex-danube'
     triggers:
         - timed: '0 12 * * *'
-- trigger:
-    name: 'apex-gs-clean-{stream}'
-    triggers:
-        - timed: '0 2 * * *'
+
index bff7d3b..dc9107a 100644 (file)
@@ -15,7 +15,7 @@ danube:
   - 'os-nosdn-kvm-noha'
   - 'os-odl_l2-fdio-noha'
   - 'os-odl_l2-fdio-ha'
-  - 'os-odl_l2-netvirt_gbp_fdio-noha'
+  - 'os-odl_netvirt-fdio-noha'
   - 'os-odl_l2-sfc-noha'
   - 'os-odl_l3-nofeature-noha'
   - 'os-odl_l3-nofeature-ha'
index 17d5204..55d8ff9 100644 (file)
                 build-step-failure-threshold: 'never'
                 failure-threshold: 'never'
                 unstable-threshold: 'FAILURE'
-        # 1.dovetail only master by now, not sync with A/B/C branches
+        # 1.dovetail only master, based on D release
         # 2.here the stream means the SUT stream, dovetail stream is defined in its own job
-        # 3.only debug testsuite here(includes 3 basic testcase,
-        #   i.e. one tempest smoke ipv6, two vping from functest)
+        # 3.only proposed_tests testsuite here(refstack, ha, ipv6, bgpvpn)
         # 4.not used for release criteria or compliance,
         #   only to debug the dovetail tool bugs with arm pods
-        - trigger-builds:
-            - project: 'dovetail-{installer}-{pod}-proposed_tests-{stream}'
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO={scenario}
-              block: true
-              same-node: true
-              block-thresholds:
-                build-step-failure-threshold: 'never'
-                failure-threshold: 'never'
-                unstable-threshold: 'FAILURE'
+        # 5.only run against scenario os-(nosdn|odl_l2)-(nofeature-bgpvpn)-ha
+        - conditional-step:
+            condition-kind: regex-match
+            regex: os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha
+            label: '{scenario}'
+            steps:
+                - trigger-builds:
+                    - project: 'dovetail-{installer}-{pod}-proposed_tests-{stream}'
+                      current-parameters: false
+                      predefined-parameters:
+                        DEPLOY_SCENARIO={scenario}
+                      block: true
+                      same-node: true
+                      block-thresholds:
+                        build-step-failure-threshold: 'never'
+                        failure-threshold: 'never'
+                        unstable-threshold: 'FAILURE'
 
 - job-template:
     name: '{installer}-deploy-{pod}-daily-{stream}'
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 1'
+        - timed: ''
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 2'
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 3'
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 4'
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 5'
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0,20 * * 6'
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0,20 * * 7'
+        - timed: ''
 
 #----------------------------------------------------------------------
 # Enea Armband CI Baremetal Triggers running against danube branch
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 4 * * 1,2,3,4,5'
+        - timed: '0 0,16 * * 2,4'
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 8 * * 1,2,3,4,5'
+        - timed: '0 0 * * 1,5,7'
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 12 * * 1,2,3,4,5'
+        - timed: '0 16 * * 1,5,7'
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 16 * * 1,2,3,4,5'
+        - timed: '0 8 * * 2,4,6'
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 20 * * 1,2,3,4,5'
+        - timed: '0 8 * * 1,3,5,7'
 - trigger:
     name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 4,8 * * 6,7'
+        - timed: '0 0 * * 3,6'
 - trigger:
     name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 12,16 * * 6,7'
+        - timed: '0 16 * * 3,6'
 #---------------------------------------------------------------
 # Enea Armband CI Virtual Triggers running against master branch
 #---------------------------------------------------------------
diff --git a/jjb/barometer/barometer-build.sh b/jjb/barometer/barometer-build.sh
new file mode 100644 (file)
index 0000000..e40841b
--- /dev/null
@@ -0,0 +1,21 @@
+set -x
+
+OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
+OPNFV_ARTIFACT_URL="$GS_URL/$OPNFV_ARTIFACT_VERSION/"
+
+# log info to console
+echo "Starting the build of Barometer RPMs"
+echo "------------------------------------"
+echo
+
+cd ci
+./install_dependencies.sh
+./build_rpm.sh
+cd $WORKSPACE
+
+# save information regarding artifact into file
+(
+    echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
+    echo "OPNFV_ARTIFACT_URL=$OPNFV_ARTIFACT_URL"
+) > $WORKSPACE/opnfv.properties
+
diff --git a/jjb/barometer/barometer-upload-artifact.sh b/jjb/barometer/barometer-upload-artifact.sh
new file mode 100644 (file)
index 0000000..996de48
--- /dev/null
@@ -0,0 +1,40 @@
+#!/bin/bash
+set -o nounset
+set -o pipefail
+
+RPM_WORKDIR=$WORKSPACE/rpmbuild
+RPM_DIR=$RPM_WORKDIR/RPMS/x86_64/
+cd $WORKSPACE/
+
+# source the opnfv.properties to get ARTIFACT_VERSION
+source $WORKSPACE/opnfv.properties
+
+# upload property files
+gsutil cp $WORKSPACE/opnfv.properties gs://$OPNFV_ARTIFACT_URL/opnfv.properties > gsutil.properties.log 2>&1
+gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
+
+echo "Uploading the barometer RPMs to artifacts.opnfv.org"
+echo "---------------------------------------------------"
+echo
+
+gsutil -m cp -r $RPM_DIR/* $OPNFV_ARTIFACT_URL > $WORKSPACE/gsutil.log 2>&1
+
+# Check if the RPMs were pushed
+gsutil ls $OPNFV_ARTIFACT_URL > /dev/null 2>&1
+if [[ $? -ne 0 ]]; then
+  echo "Problem while uploading barometer RPMs to $OPNFV_ARTIFACT_URL!"
+  echo "Check log $WORKSPACE/gsutil.log on the appropriate build server"
+  exit 1
+fi
+
+gsutil -m setmeta \
+    -h "Cache-Control:private, max-age=0, no-transform" \
+    gs://$OPNFV_ARTIFACT_URL/*.rpm > /dev/null 2>&1
+
+echo
+echo "--------------------------------------------------------"
+echo "Done!"
+echo "Artifact is available at $OPNFV_ARTIFACT_URL"
+
+#cleanup the RPM repo from the build machine.
+rm -rf $RPM_WORKDIR
index 9ec30e8..c8fb9e2 100644 (file)
 
     disabled: '{obj:disabled}'
 
-    concurrent: true
+    concurrent: false
 
     properties:
         - logrotate-default
-        - throttle:
-            enabled: true
-            max-total: 3
-            max-per-node: 2
-            option: 'project'
 
     parameters:
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
-        - 'opnfv-build-ubuntu-defaults'
+        - 'opnfv-build-centos-defaults'
 
     scm:
         - git-scm
          - timed: '@midnight'
 
     builders:
-        - shell: |
-            pwd
-            cd src
-            ./install_build_deps.sh
-            make clobber
-            make
+        - shell:
+            !include-raw-escape: ./barometer-build.sh
+        - shell:
+            !include-raw-escape: ./barometer-upload-artifact.sh
+
+########################
+# parameter macros
+########################
+- parameter:
+    name: barometer-project-parameter
+    parameters:
+        - string:
+            name: GS_URL
+            default: '$GS_BASE{gs-pathname}'
+            description: "URL to Google Storage."
index 0df659a..e6f8d1b 100644 (file)
@@ -2,6 +2,7 @@
 #set -e
 [[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 BOTTLENECKS_IMAGE=opnfv/bottlenecks
+REPORT="True"
 
 if [[ $SUITE_NAME == rubbos || $SUITE_NAME == vstf ]]; then
     echo "Bottlenecks: to pull image $BOTTLENECKS_IMAGE:${DOCKER_TAG}"
@@ -53,11 +54,11 @@ else
         echo "Bottlenecks: pulling tutum/influxdb for yardstick"
         docker pull tutum/influxdb:0.13
         sleep 5
-        docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE
+        docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE $REPORT
     elif [[ $SUITE_NAME == posca_stress_ping ]]; then
         TEST_CASE=posca_factor_ping
         sleep 5
-        docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE
+        docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE $REPORT
     fi
 
     echo "Bottlenecks: cleaning up docker-compose images and dockers"
diff --git a/jjb/ci_gate_security/anteater-clone-all-repos.sh b/jjb/ci_gate_security/anteater-clone-all-repos.sh
new file mode 100755 (executable)
index 0000000..8a9e73d
--- /dev/null
@@ -0,0 +1,33 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+set -o errexit
+set -o pipefail
+set -o nounset
+export PATH=$PATH:/usr/local/bin/
+
+
+#WORKSPACE="$(pwd)"
+
+cd $WORKSPACE
+if [ ! -d "$WORKSPACE/allrepos" ]; then
+  mkdir $WORKSPACE/allrepos
+fi
+
+cd $WORKSPACE/allrepos
+
+declare -a PROJECT_LIST
+EXCLUDE_PROJECTS="All-Projects|All-Users|securedlab"
+
+PROJECT_LIST=($(ssh gerrit.opnfv.org -p 29418 gerrit ls-projects | egrep -v $EXCLUDE_PROJECTS))
+echo "PROJECT_LIST=(${PROJECT_LIST[*]})" > $WORKSPACE/opnfv-projects.sh
+
+for PROJECT in ${PROJECT_LIST[@]}; do
+  echo "> Cloning $PROJECT"
+  if [ ! -d "$PROJECT" ]; then
+    git clone "https://gerrit.opnfv.org/gerrit/$PROJECT.git"
+  else
+    pushd "$PROJECT" > /dev/null
+    git pull -f
+    popd > /dev/null
+  fi
+done
diff --git a/jjb/ci_gate_security/anteater-report-to-gerrit.sh b/jjb/ci_gate_security/anteater-report-to-gerrit.sh
new file mode 100644 (file)
index 0000000..fc3018f
--- /dev/null
@@ -0,0 +1,25 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+set -o pipefail
+export PATH=$PATH:/usr/local/bin/
+EXITSTATUS=0
+
+# This Log should always exist
+if [[ -e securityaudit.log ]] ; then
+    echo -e "\nposting security audit report to gerrit...\n"
+
+    #check if log has errors
+    if grep ERROR securityaudit.log; then
+        EXITSTATUS=1
+    fi
+    
+    cat securityaudit.log  | awk -F"ERROR - " '{print $2}' > shortlog
+    
+    ssh -p 29418 gerrit.opnfv.org \
+        "gerrit review -p $GERRIT_PROJECT \
+        -m \"$(cat shortlog)\" \
+        $GERRIT_PATCHSET_REVISION \
+        --notify NONE"
+    
+    exit $EXITSTATUS
+fi
diff --git a/jjb/ci_gate_security/anteater-security-audit-weekly.sh b/jjb/ci_gate_security/anteater-security-audit-weekly.sh
new file mode 100644 (file)
index 0000000..436a173
--- /dev/null
@@ -0,0 +1,37 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+
+echo "--------------------------------------------------------"
+vols="-v $WORKSPACE/allrepos/:/home/opnfv/anteater/allrepos/"
+echo "Pulling releng-anteater docker image"
+echo "--------------------------------------------------------"
+docker pull opnfv/releng-anteater
+echo "--------------------------------------------------------"
+cmd="docker run -id $vols opnfv/releng-anteater /bin/bash"
+echo "Running docker command $cmd"
+container_id=$($cmd)
+echo "Container ID is $container_id"
+source $WORKSPACE/opnfv-projects.sh
+for project in "${PROJECT_LIST[@]}"
+
+do
+  cmd="anteater --project testproj --path /home/opnfv/anteater/allrepos/$project"
+  echo "Executing command inside container"
+  echo "$cmd"
+  echo "--------------------------------------------------------"
+  docker exec $container_id $cmd > $WORKSPACE/"$project".securityaudit.log 2>&1
+done
+
+exit_code=$?
+echo "--------------------------------------------------------"
+echo "Stopping docker container with ID $container_id"
+docker stop $container_id
+
+
+#gsutil cp $WORKSPACE/securityaudit.log \
+#    gs://$GS_URL/$PROJECT-securityaudit-weekly.log 2>&1
+#
+#gsutil -m setmeta \
+#    -h "Content-Type:text/html" \
+#    -h "Cache-Control:private, max-age=0, no-transform" \
+#    gs://$GS_URL/$PROJECT-securityaudit-weekly.log > /dev/null 2>&1
diff --git a/jjb/ci_gate_security/anteater-security-audit.sh b/jjb/ci_gate_security/anteater-security-audit.sh
new file mode 100644 (file)
index 0000000..2b5c26a
--- /dev/null
@@ -0,0 +1,28 @@
+#!/bin/bash
+cd $WORKSPACE
+echo "Generating patchset file to list changed files"
+git diff HEAD^1 --name-only | sed "s#^#/home/opnfv/anteater/$PROJECT/#" > $WORKSPACE/patchset
+echo "Changed files are"
+echo "--------------------------------------------------------"
+cat $WORKSPACE/patchset
+echo "--------------------------------------------------------"
+
+vols="-v $WORKSPACE:/home/opnfv/anteater/$PROJECT"
+envs="-e PROJECT=$PROJECT"
+
+echo "Pulling releng-anteater docker image"
+echo "--------------------------------------------------------"
+docker pull opnfv/releng-anteater
+echo "--------------------------------------------------------"
+
+cmd="docker run -i $envs $vols --rm opnfv/releng-anteater \
+anteater --project $PROJECT --patchset /home/opnfv/anteater/$PROJECT/patchset"
+echo "Running docker container"
+echo "$cmd"
+$cmd > $WORKSPACE/securityaudit.log 2>&1
+exit_code=$?
+echo "--------------------------------------------------------"
+echo "Docker container exited with code: $exit_code"
+echo "--------------------------------------------------------"
+cat securityaudit.log
+exit 0
similarity index 50%
rename from jjb/securityaudit/opnfv-security-audit.yml
rename to jjb/ci_gate_security/opnfv-ci-gate-security.yml
index 732df89..7190352 100644 (file)
@@ -1,5 +1,6 @@
+# SPDX-license-identifier: Apache-2.0
 ########################
-# Job configuration for opnfv-lint
+# Job configuration for opnfv-anteater (security audit)
 ########################
 - project:
 
@@ -9,6 +10,7 @@
 
     jobs:
         - 'opnfv-security-audit-verify-{stream}'
+        - 'opnfv-security-audit-weekly-{stream}'
 
     stream:
         - master:
 ########################
 # job templates
 ########################
+- job-template:
+    name: 'opnfv-security-audit-weekly-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'ericsson-build3'
+            description: 'Slave label on Jenkins'
+        - project-parameter:
+            project: releng
+            branch: '{branch}'
+
+    triggers:
+        - timed: '@weekly'
+
+    builders:
+        - anteater-security-audit-weekly
+
 - job-template:
     name: 'opnfv-security-audit-verify-{stream}'
 
     disabled: '{obj:disabled}'
 
     parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'ericsson-build3'
+            description: 'Slave label on Jenkins'
         - project-parameter:
             project: $GERRIT_PROJECT
             branch: '{branch}'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
 
     scm:
         - git-scm-gerrit
                     comment-contains-value: 'reverify'
             projects:
               - project-compare-type: 'REG_EXP'
-                project-pattern: 'sandbox'
+                project-pattern: 'sandbox|releng'
                 branches:
                   - branch-compare-type: 'ANT'
                     branch-pattern: '**/{branch}'
                 file-paths:
                   - compare-type: ANT
-                    pattern: '**/*.py'
-          skip-vote:
-            successful: true
-            failed: true
-            unstable: true
-            notbuilt: true
+                    pattern: '**'
+            skip-vote:
+                successful: true
+                failed: true
+                unstable: true
+                notbuilt: true
 
     builders:
-        - security-audit-python-code
+        - anteater-security-audit
         - report-security-audit-result-to-gerrit
 ########################
 # builder macros
 ########################
 - builder:
-    name: security-audit-python-code
+    name: anteater-security-audit
     builders:
-        - shell: |
-            #!/bin/bash
-            set -o errexit
-            set -o pipefail
-            set -o xtrace
-            export PATH=$PATH:/usr/local/bin/
-
-            # this is where the security/license audit script will be executed
-            echo "Hello World!"
+        - shell:
+            !include-raw: ./anteater-security-audit.sh
+
 - builder:
     name: report-security-audit-result-to-gerrit
     builders:
-        - shell: |
-            #!/bin/bash
-            set -o errexit
-            set -o pipefail
-            set -o xtrace
-            export PATH=$PATH:/usr/local/bin/
-
-            # If no violations were found, no lint log will exist.
-            if [[ -e securityaudit.log ]] ; then
-                echo -e "\nposting security audit report to gerrit...\n"
-
-                cat securityaudit.log
-                echo
-
-                ssh -p 29418 gerrit.opnfv.org \
-                    "gerrit review -p $GERRIT_PROJECT \
-                     -m \"$(cat securityaudit.log)\" \
-                     $GERRIT_PATCHSET_REVISION \
-                     --notify NONE"
-
-                exit 1
-            fi
+        - shell:
+            !include-raw: ./anteater-report-to-gerrit.sh
+
+- builder:
+    name: anteater-security-audit-weekly
+    builders:
+        - shell:
+            !include-raw:
+                - ./anteater-clone-all-repos.sh
+                - ./anteater-security-audit-weekly.sh
index 093debb..673a9f1 100644 (file)
@@ -24,7 +24,16 @@ then
 fi
 
 cd $WORKSPACE/
-./build.sh  --iso-dir $BUILD_DIRECTORY/ --iso-name compass.iso -c $CACHE_DIRECTORY
+
+if [[ "$BRANCH" == 'stable/danube' ]]; then
+    ./build.sh  --iso-dir $BUILD_DIRECTORY/ --iso-name compass.iso -c $CACHE_DIRECTORY
+    OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/compass.iso | cut -d' ' -f1)
+    OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso
+else
+    ./build.sh --tar-dir $BUILD_DIRECTORY/ --tar-name compass.tar.gz -c $CACHE_DIRECTORY
+    OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/compass.tar.gz | cut -d' ' -f1)
+    OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.tar.gz
+fi
 
 # list the build artifacts
 ls -al $BUILD_DIRECTORY
@@ -34,8 +43,8 @@ ls -al $BUILD_DIRECTORY
     echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
     echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
     echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
-    echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-    echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/compass.iso | cut -d' ' -f1)"
+    echo "OPNFV_ARTIFACT_URL=$OPNFV_ARTIFACT_URL"
+    echo "OPNFV_ARTIFACT_SHA512SUM=$OPNFV_ARTIFACT_SHA512SUM"
     echo "OPNFV_BUILD_URL=$BUILD_URL"
 ) > $BUILD_DIRECTORY/opnfv.properties
 echo
index e1e760d..f4f49b6 100644 (file)
         branch: '{stream}'
         gs-pathname: ''
         disabled: false
+        openstack-version: ocata
     danube: &danube
         stream: danube
         branch: 'stable/{stream}'
         gs-pathname: '/{stream}'
         disabled: false
+        openstack-version: newton
 #--------------------------------
 # POD, INSTALLER, AND BRANCH MAPPING
 #--------------------------------
 #--------------------------------
     pod:
         - baremetal:
-            slave-label: compass-baremetal
+            slave-label: compass-baremetal-master
             os-version: 'xenial'
             <<: *master
         - virtual:
-            slave-label: compass-virtual
+            slave-label: compass-virtual-master
             os-version: 'xenial'
             <<: *master
         - baremetal:
-            slave-label: compass-baremetal
+            slave-label: compass-baremetal-branch
             os-version: 'xenial'
             <<: *danube
         - virtual:
-            slave-label: compass-virtual
+            slave-label: compass-virtual-branch
             os-version: 'xenial'
             <<: *danube
 #--------------------------------
     wrappers:
         - build-name:
             name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+        - fix-workspace-permissions
 
     parameters:
         - project-parameter:
               predefined-parameters: |
                 DEPLOY_SCENARIO={scenario}
                 COMPASS_OS_VERSION={os-version}
+                COMPASS_OPENSTACK_VERSION={openstack-version}
               same-node: true
               block: true
         - trigger-builds:
         - build-name:
             name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
         - timeout:
-            timeout: 120
+            timeout: 240
             abort: true
+        - fix-workspace-permissions
 
     parameters:
         - project-parameter:
     scm:
         - git-scm
 
-    wrappers:
-        - build-name:
-            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
-
     builders:
         - description-setter:
             description: "POD: $NODE_NAME"
             name: GS_URL
             default: '$GS_BASE{gs-pathname}'
             description: "URL to Google Storage."
-        - choice:
-            name: COMPASS_OPENSTACK_VERSION
-            choices:
-                - 'newton'
 
 ########################
 # trigger macros
index 534e17e..2668ccd 100644 (file)
@@ -23,7 +23,11 @@ fi
 echo 1 > /proc/sys/vm/drop_caches
 
 export CONFDIR=$WORKSPACE/deploy/conf
-export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
+if [[ "$BRANCH" = 'stable/danube' ]]; then
+    export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
+else
+    export ISO_URL=file://$BUILD_DIRECTORY/compass.tar.gz
+fi
 
 cd $WORKSPACE
 
index c321655..67d1e4e 100644 (file)
@@ -55,6 +55,7 @@
     wrappers:
         - build-name:
             name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+        - fix-workspace-permissions
 
     triggers:
         - '{auto-trigger-name}'
         - build-name:
             name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
         - timeout:
-            timeout: 120
+            timeout: 240
             abort: true
+        - fix-workspace-permissions
 
     parameters:
         - project-parameter:
     scm:
         - git-scm
 
-    wrappers:
-        - build-name:
-            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
-
     builders:
         - description-setter:
             description: "POD: $NODE_NAME"
index 5a63c4a..f891564 100644 (file)
@@ -18,12 +18,18 @@ curl -s -o $BUILD_DIRECTORY/latest.properties http://$GS_URL/latest.properties
 # source the file so we get OPNFV vars
 source $BUILD_DIRECTORY/latest.properties
 
-# download the file
-curl -s -o $BUILD_DIRECTORY/compass.iso http://$OPNFV_ARTIFACT_URL > gsutil.iso.log 2>&1
-
-# list the file
-ls -al $BUILD_DIRECTORY/compass.iso
+if [[ "$BRANCH" == 'stable/danube' ]]; then
+    # download the file
+    curl -s -o $BUILD_DIRECTORY/compass.iso http://$OPNFV_ARTIFACT_URL > gsutil.iso.log 2>&1
+    # list the file
+    ls -al $BUILD_DIRECTORY/compass.iso
+else
+    # download the file
+    curl -s -o $BUILD_DIRECTORY/compass.tar.gz http://$OPNFV_ARTIFACT_URL > gsutil.tar.gz.log 2>&1
+    # list the file
+    ls -al $BUILD_DIRECTORY/compass.tar.gz
+fi
 
 echo
 echo "--------------------------------------------------------"
-echo "Done!"
\ No newline at end of file
+echo "Done!"
index 73b7f07..87a9334 100644 (file)
@@ -7,6 +7,11 @@ echo "Uploading the $INSTALLER_TYPE artifact. This could take some time..."
 echo "--------------------------------------------------------"
 echo
 
+if [[ "$BRANCH" == 'stable/danube' ]]; then
+    FILETYPE='iso'
+else
+    FILETYPE='tar.gz'
+fi
 # source the opnfv.properties to get ARTIFACT_VERSION
 source $BUILD_DIRECTORY/opnfv.properties
 
@@ -23,16 +28,16 @@ signiso () {
 time gpg2 -vvv --batch --yes --no-tty \
   --default-key opnfv-helpdesk@rt.linuxfoundation.org  \
   --passphrase besteffort \
-  --detach-sig $BUILD_DIRECTORY/compass.iso
+  --detach-sig $BUILD_DIRECTORY/compass.$FILETYPE
 
-gsutil cp $BUILD_DIRECTORY/compass.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig
+gsutil cp $BUILD_DIRECTORY/compass.$FILETYPE.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE.sig
 echo "ISO signature Upload Complete!"
 }
 
 signiso
 
 # upload artifact and additional files to google storage
-gsutil cp $BUILD_DIRECTORY/compass.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
+gsutil cp $BUILD_DIRECTORY/compass.$FILETYPE gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE > gsutil.$FILETYPE.log 2>&1
 gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
 gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
 
@@ -44,19 +49,19 @@ gsutil -m setmeta \
 
 gsutil -m setmeta \
     -h "Cache-Control:private, max-age=0, no-transform" \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
+    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE > /dev/null 2>&1
 
 # disabled errexit due to gsutil setmeta complaints
 #   BadRequestException: 400 Invalid argument
 # check if we uploaded the file successfully to see if things are fine
-gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
+gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE > /dev/null 2>&1
 if [[ $? -ne 0 ]]; then
     echo "Problem while uploading artifact!"
-    echo "Check log $WORKSPACE/gsutil.iso.log on the machine where this build is done."
+    echo "Check log $WORKSPACE/gsutil.$FILETYPE.log on the machine where this build is done."
     exit 1
 fi
 
 echo
 echo "--------------------------------------------------------"
 echo "Done!"
-echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
+echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE"
index 56f54d8..e43f976 100644 (file)
             gs-pathname: ''
             ppa-pathname: '/{stream}'
             disabled: false
+            openstack-version: 'ocata'
+            branch-type: 'master'
         - danube:
             branch: 'stable/{stream}'
             gs-pathname: '/{stream}'
             ppa-pathname: '/{stream}'
             disabled: false
+            openstack-version: 'newton'
+            branch-type: 'branch'
 
     distro:
         - 'xenial':
                 - 'compass-os-.*?-virtual-daily-.*?'
             block-level: 'NODE'
 
-    scm:
-        - git-scm-gerrit
-
     wrappers:
         - ssh-agent-wrapper
         - timeout:
-            timeout: 120
+            timeout: 240
             fail: true
+        - fix-workspace-permissions
+
+    scm:
+        - git-scm-gerrit
 
     triggers:
         - gerrit:
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
-        - 'compass-virtual-defaults'
+        - 'compass-virtual-{branch-type}-defaults'
         - '{installer}-defaults'
         - 'compass-verify-defaults':
             installer: '{installer}'
                   current-parameters: true
                   predefined-parameters: |
                     COMPASS_OS_VERSION={os-version}
+                    COMPASS_OPENSTACK_VERSION={openstack-version}
                   node-parameters: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
                 - 'compass-os-.*?-virtual-daily-.*?'
             block-level: 'NODE'
 
-    scm:
-        - git-scm-gerrit
-
     wrappers:
         - ssh-agent-wrapper
         - timeout:
-            timeout: 120
+            timeout: 240
             fail: true
+        - fix-workspace-permissions
+
+    scm:
+        - git-scm-gerrit
 
     triggers:
         - gerrit:
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
-        - 'compass-virtual-defaults'
+        - 'compass-virtual-{branch-type}-defaults'
         - '{installer}-defaults'
         - 'compass-verify-defaults':
             installer: '{installer}'
                 - 'functest-compass-virtual.*'
             block-level: 'NODE'
 
-    scm:
-        - git-scm-gerrit
-
     wrappers:
         - ssh-agent-wrapper
         - timeout:
-            timeout: 120
+            timeout: 240
             fail: true
+        - fix-workspace-permissions
+
+    scm:
+        - git-scm-gerrit
 
     builders:
         - description-setter:
         - string:
             name: PPA_CACHE
             default: "$WORKSPACE/work/repo/"
-        - choice:
-            name: COMPASS_OPENSTACK_VERSION
-            choices:
-                - 'newton'
         - choice:
             name: COMPASS_OS_VERSION
             choices:
index b512e3f..785f3a5 100755 (executable)
@@ -6,7 +6,7 @@ echo "--------------------------------------------------------"
 echo "This is $INSTALLER_TYPE deploy job!"
 echo "--------------------------------------------------------"
 
-DEPLOY_SCENARIO=${DEPLOY_SCENARIO:-"os-nosdn-nofeature-ha"}
+DEPLOY_SCENARIO=${DEPLOY_SCENARIO:-"os-nosdn-nofeature-noha"}
 BRIDGE=${BRIDGE:-pxebr}
 LAB_NAME=${NODE_NAME/-*}
 POD_NAME=${NODE_NAME/*-}
@@ -29,7 +29,7 @@ git clone ssh://jenkins-zte@gerrit.opnfv.org:29418/securedlab --quiet \
 cp -r securedlab/labs .
 
 DEPLOY_COMMAND="sudo ./ci/deploy/deploy.sh -b $BASE_DIR \
-                -l $LAB_NAME -p $POD_NAME -B $BRIDGE"
+                -l $LAB_NAME -p $POD_NAME -B $BRIDGE -s $DEPLOY_SCENARIO"
 
 # log info to console
 echo """
index e631ee9..0a9d43d 100644 (file)
@@ -71,7 +71,7 @@
             project: '{project}'
             branch: '{branch}'
         - 'opnfv-build-centos-defaults'
-        - 'daisy-defaults'
+        - '{installer}-defaults'
         - '{installer}-project-parameter':
             gs-pathname: '{gs-pathname}'
 
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
-        - 'daisy-defaults'
+        - '{installer}-defaults'
+        - '{slave-label}-defaults'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
         - string:
             name: DEPLOY_SCENARIO
             default: 'os-nosdn-nofeature-ha'
-        - 'daisy-defaults'
-        - '{slave-label}-defaults'
         - '{installer}-project-parameter':
             gs-pathname: '{gs-pathname}'
 
index 9e7b867..561ffbe 100644 (file)
             enabled: true
             max-total: 4
             option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - '{alias}-merge-(master|danube)'
+            block-level: 'NODE'
 
     scm:
         - git-scm
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
-                - '{alias}-merge-(master|danube)'
+                - '{alias}-merge-{phase}-.*'
             block-level: 'NODE'
 
     scm:
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
+        - '{installer}-defaults'
         - '{slave-label}-defaults'
         - '{alias}-merge-defaults':
             gs-pathname: '{gs-pathname}'
index a0ec2eb..dff0ff0 100644 (file)
             enabled: true
             max-total: 4
             option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - '{installer}-merge-build-.*'
+            block-level: 'NODE'
+
     scm:
-        - git-scm
+        - git-scm-gerrit
     wrappers:
         - ssh-agent-wrapper
         - timeout:
             name: unit
             condition: SUCCESSFUL
             projects:
-                - name: '{alias}-verify-{name}-{stream}'
-                  current-parameters: true
+                - name: '{alias}-verify-unit-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    BRANCH=$BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                   node-parameters: false
                   kill-phase-on: FAILURE
                   abort-all-job: true
             enabled: true
             max-total: 6
             option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - '{installer}-merge-build-.*'
+                - '{alias}-verify-build-.*'
+            block-level: 'NODE'
+
     scm:
-        - git-scm
+        - git-scm-gerrit
     wrappers:
         - ssh-agent-wrapper
         - timeout:
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
+        - '{installer}-defaults'
         - '{slave-label}-defaults'
         - '{alias}-verify-defaults':
             gs-pathname: '{gs-pathname}'
+
     builders:
         - description-setter:
             description: "Built on $NODE_NAME"
index 807d436..eb230b5 100644 (file)
             profiler: 'poc'
             auto-trigger-name: 'experimental'
 
+    pod:
+        - arm-pod2:
+            slave-label: '{pod}'
+        - arm-pod3:
+            slave-label: '{pod}'
+
     jobs:
         - 'doctor-verify-{stream}'
+        - 'doctor-{task}-{installer}-{inspector}-{pod}-{stream}'
         - 'doctor-{task}-{installer}-{inspector}-{stream}'
 
 - job-template:
     builders:
         - shell: "[ -e tests/run.sh ] && bash -n ./tests/run.sh"
 
+- job-template:
+    name: 'doctor-{task}-{installer}-{inspector}-{pod}-{stream}'
+
+    node: '{slave-label}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+        - git-scm-gerrit
+
+
+    triggers:
+        - '{auto-trigger-name}':
+            project: '{project}'
+            branch: '{branch}'
+
+    builders:
+        - shell: "[ -e tests/run.sh ] && bash -n ./tests/run.sh"
+
+
 - job-template:
     name: 'doctor-{task}-{installer}-{inspector}-{stream}'
 
index b23deca..f1a9e72 100755 (executable)
@@ -52,7 +52,7 @@ echo "signature Upload Complete!"
 
 upload () {
 # log info to console
-echo "Uploading to artifact. This could take some time..."
+echo "Uploading ${STORE_FILE_NAME} to artifact. This could take some time..."
 echo
 
 cd $WORKSPACE
index 3d9af5e..0c8efbe 100644 (file)
@@ -19,6 +19,8 @@
         - 'dovetail'
         - 'functest'
         - 'yardstick'
+        - 'testapi'
+        - 'mongo'
 
 #############################################
 # job template
@@ -55,7 +57,8 @@
 
     builders:
         - 'dovetail-builder-artifacts-upload'
-        - 'dovetail-workspace-cleanup'
+        - 'dovetail-upload-artifacts-cache-cleanup'
+        - 'dovetail-images-cleanup'
 
 ####################
 # parameter macros
@@ -94,7 +97,7 @@
             !include-raw: ./dovetail-artifacts-upload.sh
 
 - builder:
-    name: dovetail-workspace-cleanup
+    name: dovetail-upload-artifacts-cache-cleanup
     builders:
         - shell: |
             #!/bin/bash
 
             /bin/rm -rf $CACHE_DIR
 
-            # Remove previous running containers if exist
-            if [[ -n "$(docker ps -a | grep $DOCKER_REPO_NAME)" ]]; then
-                echo "Removing existing $DOCKER_REPO_NAME containers..."
-                docker ps -a | grep $DOCKER_REPO_NAME | awk '{print $1}' | xargs docker rm -f
-                t=60
-                # Wait max 60 sec for containers to be removed
-                while [[ $t -gt 0 ]] && [[ -n "$(docker ps| grep $DOCKER_REPO_NAME)" ]]; do
-                    sleep 1
-                    let t=t-1
-                done
-            fi
-
-            # Remove existing images if exist
-            if [[ -n "$(docker images | grep $DOCKER_REPO_NAME)" ]]; then
-                echo "Docker images to remove:"
-                docker images | head -1 && docker images | grep $DOCKER_REPO_NAME
-                image_tags=($(docker images | grep $DOCKER_REPO_NAME | awk '{print $2}'))
-                for tag in "${image_tags[@]}"; do
-                    if [[ -n "$(docker images|grep $DOCKER_REPO_NAME|grep $tag)" ]]; then
-                        echo "Removing docker image $DOCKER_REPO_NAME:$tag..."
-                        docker rmi -f $DOCKER_REPO_NAME:$tag
-                    fi
-                done
-            fi
+- builder:
+    name: dovetail-images-cleanup
+    builders:
+        - shell:
+            !include-raw: ./dovetail-cleanup.sh
index 22b2ba2..0ee789a 100755 (executable)
@@ -2,8 +2,8 @@
 
 [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
-#clean up dependent project docker images, which has no containers and image tag None
-clean_images=(opnfv/functest opnfv/yardstick)
+# clean up dependent project docker images, which has no containers and image tag None
+clean_images=(opnfv/functest opnfv/yardstick opnfv/testapi mongo)
 for clean_image in "${clean_images[@]}"; do
     echo "Removing image $image_id, which has no containers and image tag is None"
     dangling_images=($(docker images -f "dangling=true" | grep ${clean_image} | awk '{print $3}'))
@@ -14,7 +14,7 @@ for clean_image in "${clean_images[@]}"; do
     fi
 done
 
-echo "Remove containers with image dovetail:<None>..."
+echo "Remove containers with image opnfv/dovetail:<None>..."
 dangling_images=($(docker images -f "dangling=true" | grep opnfv/dovetail | awk '{print $3}'))
 if [[ -n ${dangling_images} ]]; then
     for image_id in "${dangling_images[@]}"; do
@@ -24,7 +24,7 @@ if [[ -n ${dangling_images} ]]; then
     done
 fi
 
-echo "Cleaning up dovetail docker containers/images..."
+echo "Cleaning up dovetail docker containers..."
 if [[ ! -z $(docker ps -a | grep opnfv/dovetail) ]]; then
     echo "Removing existing opnfv/dovetail containers..."
     docker ps -a | grep opnfv/dovetail | awk '{print $1}' | xargs docker rm -f >${redirect}
index 3ad67b6..dce7e58 100755 (executable)
@@ -6,9 +6,19 @@
 set -e
 [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
+DOVETAIL_HOME=${WORKSPACE}/cvp
+if [ -d ${DOVETAIL_HOME} ]; then
+    sudo rm -rf ${DOVETAIL_HOME}/*
+else
+    sudo mkdir -p ${DOVETAIL_HOME}
+fi
+
+DOVETAIL_CONFIG=${DOVETAIL_HOME}/pre_config
+sudo mkdir -p ${DOVETAIL_CONFIG}
+
 sshkey=""
 # The path of openrc.sh is defined in fetch_os_creds.sh
-OPENRC=$WORKSPACE/opnfv-openrc.sh
+OPENRC=${DOVETAIL_CONFIG}/env_config.sh
 if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
     instack_mac=$(sudo virsh domiflist undercloud | grep default | \
                   grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
@@ -37,14 +47,15 @@ releng_repo=${WORKSPACE}/releng
 git clone https://gerrit.opnfv.org/gerrit/releng ${releng_repo} >/dev/null
 
 if [[ ${INSTALLER_TYPE} != 'joid' ]]; then
-    ${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} >${redirect}
+    sudo /bin/bash ${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} >${redirect}
 fi
 
 if [[ -f $OPENRC ]]; then
     echo "INFO: openstack credentials path is $OPENRC"
     cat $OPENRC
 else
-    echo "ERROR: file $OPENRC does not exist."
+    echo "ERROR: cannot find file $OPENRC. Please check if it is existing."
+    sudo ls -al ${DOVETAIL_CONFIG}
     exit 1
 fi
 
@@ -65,13 +76,8 @@ else
     echo "HA test cases may not run properly."
 fi
 
-pod_file_dir="/home/opnfv/dovetail/userconfig"
-if [ -d ${pod_file_dir} ]; then
-    sudo rm -rf ${pod_file_dir}/*
-else
-    sudo mkdir -p ${pod_file_dir}
-fi
-cmd="sudo python ${releng_repo}/utils/create_pod_file.py -t ${INSTALLER_TYPE} -i ${INSTALLER_IP} ${options} -f ${pod_file_dir}/pod.yaml"
+cmd="sudo python ${releng_repo}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \
+     -i ${INSTALLER_IP} ${options} -f ${DOVETAIL_CONFIG}/pod.yaml"
 echo ${cmd}
 ${cmd}
 
@@ -79,11 +85,12 @@ deactivate
 
 cd ${WORKSPACE}
 
-if [ -f ${pod_file_dir}/pod.yaml ]; then
-    echo "file ${pod_file_dir}/pod.yaml:"
-    cat ${pod_file_dir}/pod.yaml
+if [ -f ${DOVETAIL_CONFIG}/pod.yaml ]; then
+    echo "file ${DOVETAIL_CONFIG}/pod.yaml:"
+    cat ${DOVETAIL_CONFIG}/pod.yaml
 else
-    echo "Error: There doesn't exist file ${pod_file_dir}/pod.yaml."
+    echo "Error: cannot find file ${DOVETAIL_CONFIG}/pod.yaml. Please check if it is existing."
+    sudo ls -al ${DOVETAIL_CONFIG}
     echo "HA test cases may not run properly."
 fi
 
@@ -91,20 +98,22 @@ ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
 
 if [ "$INSTALLER_TYPE" == "fuel" ]; then
     echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
-    sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${pod_file_dir}/id_rsa
+    sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
 fi
 
+# sdnvpn test case needs to download this image first before running
+sudo wget -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_CONFIG}
+
 opts="--privileged=true -id"
-results_envs="-v /var/run/docker.sock:/var/run/docker.sock \
-              -v /home/opnfv/dovetail/results:/home/opnfv/dovetail/results"
-openrc_volume="-v ${OPENRC}:${OPENRC}"
-userconfig_volume="-v ${pod_file_dir}:${pod_file_dir}"
+
+docker_volume="-v /var/run/docker.sock:/var/run/docker.sock"
+dovetail_home_volume="-v ${DOVETAIL_HOME}:${DOVETAIL_HOME}"
 
 # Pull the image with correct tag
 echo "Dovetail: Pulling image opnfv/dovetail:${DOCKER_TAG}"
 docker pull opnfv/dovetail:$DOCKER_TAG >$redirect
 
-cmd="docker run ${opts} ${results_envs} ${openrc_volume} ${userconfig_volume} \
+cmd="docker run ${opts} -e DOVETAIL_HOME=${DOVETAIL_HOME} ${docker_volume} ${dovetail_home_volume} \
      ${sshkey} opnfv/dovetail:${DOCKER_TAG} /bin/bash"
 echo "Dovetail: running docker run command: ${cmd}"
 ${cmd} >${redirect}
@@ -126,13 +135,13 @@ if [ $(docker ps | grep "opnfv/dovetail:${DOCKER_TAG}" | wc -l) == 0 ]; then
 fi
 
 list_cmd="dovetail list ${TESTSUITE}"
-run_cmd="dovetail run --openrc ${OPENRC} --testsuite ${TESTSUITE} -d"
+run_cmd="dovetail run --testsuite ${TESTSUITE} -d"
 echo "Container exec command: ${list_cmd}"
 docker exec $container_id ${list_cmd}
 echo "Container exec command: ${run_cmd}"
 docker exec $container_id ${run_cmd}
 
-sudo cp -r ${DOVETAIL_REPO_DIR}/results ./
+sudo cp -r ${DOVETAIL_HOME}/results ./
 # To make sure the file owner is the current user, for the copied results files in the above line
 # if not, there will be error when next time to wipe workspace
 # CURRENT_USER=${SUDO_USER:-$USER}
index e1a4c02..2c0d12a 100755 (executable)
@@ -7,6 +7,15 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+
+# disable Fuel iso build for master branch
+if [[ "$BRANCH" == 'master' ]]; then
+    touch $WORKSPACE/.noupload
+    echo "--------------------------------------------------------"
+    echo "Done!"
+    exit 0
+fi
+
 set -o errexit
 set -o nounset
 set -o pipefail
index 2fa8687..6867708 100644 (file)
@@ -73,8 +73,8 @@
             auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
         - 'os-odl_l2-sfc-ha':
             auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-        - 'os-odl_l2-bgpvpn-ha':
-        #    auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
+        - 'os-odl_l2-bgpvpn-ha':
+            auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
         - 'os-nosdn-kvm-ha':
             auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
         - 'os-nosdn-ovs-ha':
     jobs:
         - 'fuel-{scenario}-{pod}-daily-{stream}'
         - 'fuel-deploy-{pod}-daily-{stream}'
-        - 'fuel-os-odl_l2-bgpvpn-ha-{pod}-daily-{stream}'
 
 ########################
 # job templates
                 build-step-failure-threshold: 'never'
                 failure-threshold: 'never'
                 unstable-threshold: 'FAILURE'
-
-    publishers:
-        - email:
-            recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
-
-- job-template:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-{pod}-daily-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: false
-
-    properties:
-        - logrotate-default
-        - throttle:
-            enabled: true
-            max-total: 4
-            max-per-node: 1
-            option: 'project'
-        - build-blocker:
-            use-build-blocker: true
-            blocking-jobs:
-                - 'fuel-os-.*?-{pod}-daily-.*'
-                - 'fuel-os-.*?-{pod}-weekly-.*'
-            block-level: 'NODE'
-
-    wrappers:
-        - build-name:
-            name: '$BUILD_NUMBER - Scenario: os-odl_l2-bgpvpn-ha'
-
-    triggers:
-        - 'fuel-os-odl_l2-bgpvpn-ha-{pod}-daily-{stream}-trigger'
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-            branch: '{branch}'
-        - '{installer}-defaults'
-        - '{slave-label}-defaults':
-            installer: '{installer}'
-        - string:
-            name: DEPLOY_SCENARIO
-            default: "os-odl_l2-bgpvpn-ha"
-        - fuel-ci-parameter:
-            gs-pathname: '{gs-pathname}'
-
-    builders:
-        - description-setter:
-            description: "Built on $NODE_NAME"
-        - trigger-builds:
-            - project: 'fuel-deploy-{pod}-daily-{stream}'
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha
-              same-node: true
-              block: true
-        - trigger-builds:
-            - project: 'functest-fuel-{pod}-daily-{stream}'
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha
-              same-node: true
-              block: true
-              block-thresholds:
-                build-step-failure-threshold: 'never'
-                failure-threshold: 'never'
-                unstable-threshold: 'FAILURE'
-        - trigger-builds:
-            - project: 'yardstick-fuel-{pod}-daily-{stream}'
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha
-              block: true
-              same-node: true
-              block-thresholds:
-                build-step-failure-threshold: 'never'
-                failure-threshold: 'never'
-                unstable-threshold: 'FAILURE'
-        # 1.dovetail only master by now, not sync with A/B/C branches
+        # 1.dovetail only has master, based on D release
         # 2.here the stream means the SUT stream, dovetail stream is defined in its own job
-        # 3.only debug testsuite here(includes basic testcase,
-        #   i.e. refstack ipv6 vpn test cases from functest, HA test case
-        #   from yardstick)
+        # 3.only debug testsuite here(refstack, ha, ipv6, bgpvpn)
         # 4.not used for release criteria or compliance,
-        #   only to debug the dovetail tool bugs with fuel bgpvpn scenario
-        - trigger-builds:
-            - project: 'dovetail-fuel-{pod}-proposed_tests-{stream}'
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha
-              block: true
-              same-node: true
-              block-thresholds:
-                build-step-failure-threshold: 'never'
-                failure-threshold: 'never'
-                unstable-threshold: 'FAILURE'
+        #   only to debug the dovetail tool bugs with bgpvpn
+        # 5,only run against scenario os-odl_l2-bgpvpn-ha(regex used here, can extend to more scenarios future)
+        - conditional-step:
+            condition-kind: regex-match
+            regex: os-odl_l2-bgpvpn-ha
+            label: '{scenario}'
+            steps:
+                - trigger-builds:
+                    - project: 'dovetail-fuel-{pod}-proposed_tests-{stream}'
+                      current-parameters: false
+                      predefined-parameters:
+                        DEPLOY_SCENARIO={scenario}
+                      block: true
+                      same-node: true
+                      block-thresholds:
+                        build-step-failure-threshold: 'never'
+                        failure-threshold: 'never'
+                        unstable-threshold: 'FAILURE'
 
     publishers:
         - email:
-            recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com matthew.lijun@huawei.com
-
+            recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
 
 - job-template:
     name: 'fuel-deploy-{pod}-daily-{stream}'
             name: GS_URL
             default: artifacts.opnfv.org/$PROJECT{gs-pathname}
             description: "URL to Google Storage."
+        - string:
+            name: SSH_KEY
+            default: "/tmp/mcp.rsa"
+            description: "Path to private SSH key to access environment nodes. For MCP deployments only."
 ########################
 # trigger macros
 ########################
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '5 20 * * *'
+        - timed: '' # '5 20 * * *'
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '5 23 * * *'
+        - timed: '' # '5 23 * * *'
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '5 2 * * *'
+        - timed: '' # '5 2 * * *'
 - trigger:
     name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '5 5 * * *'
+        - timed: '' # '5 5 * * *'
 - trigger:
     name: 'fuel-os-onos-sfc-ha-baremetal-daily-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '5 11 * * *'
+        - timed: '' # '5 11 * * *'
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '5 14 * * *'
+        - timed: '' # '5 14 * * *'
 - trigger:
     name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '5 17 * * *'
+        - timed: '' # '5 17 * * *'
 - trigger:
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '30 12 * * *'
+        - timed: '' # '30 12 * * *'
 - trigger:
     name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '30 8 * * *'
+        - timed: '' # '30 8 * * *'
 # NOHA Scenarios
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '35 15 * * *'
+        - timed: '' # '35 15 * * *'
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '5 18 * * *'
+        - timed: '' # '5 18 * * *'
 - trigger:
     name: 'fuel-os-onos-sfc-noha-virtual-daily-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-odl_l2-sfc-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '35 1 * * *'
+        - timed: '' # '35 1 * * *'
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '5 4 * * *'
+        - timed: '' # '5 4 * * *'
 - trigger:
     name: 'fuel-os-nosdn-kvm-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '35 6 * * *'
+        - timed: '' # '35 6 * * *'
 - trigger:
     name: 'fuel-os-nosdn-ovs-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '5 9 * * *'
+        - timed: '' # '5 9 * * *'
 - trigger:
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '30 16 * * *'
+        - timed: '' # '30 16 * * *'
 - trigger:
     name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '30 20 * * *'
+        - timed: '' # '30 20 * * *'
 #-----------------------------------------------
 # Triggers for job running on fuel-virtual against danube branch
 #-----------------------------------------------
index f5bbd18..2fb5c71 100755 (executable)
@@ -12,16 +12,18 @@ set -o pipefail
 
 export TERM="vt220"
 
-# source the file so we get OPNFV vars
-source latest.properties
+if [[ "$BRANCH" != 'master' ]]; then
+    # source the file so we get OPNFV vars
+    source latest.properties
 
-# echo the info about artifact that is used during the deployment
-echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+    # echo the info about artifact that is used during the deployment
+    echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+fi
 
 if [[ "$JOB_NAME" =~ "merge" ]]; then
     # set simplest scenario for virtual deploys to run for merges
     DEPLOY_SCENARIO="os-nosdn-nofeature-ha"
-else
+elif [[ "$BRANCH" != 'master' ]]; then
     # for none-merge deployments
     # checkout the commit that was used for building the downloaded artifact
     # to make sure the ISO and deployment mechanism uses same versions
@@ -75,7 +77,7 @@ echo "--------------------------------------------------------"
 echo "Scenario: $DEPLOY_SCENARIO"
 echo "Lab: $LAB_NAME"
 echo "POD: $POD_NAME"
-echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
+[[ "$BRANCH" != 'master' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
 echo
 echo "Starting the deployment using $INSTALLER_TYPE. This could take some time..."
 echo "--------------------------------------------------------"
index 8cc552e..c3b8253 100755 (executable)
@@ -10,6 +10,9 @@
 set -o errexit
 set -o pipefail
 
+# disable Fuel ISO download for master branch
+[[ "$BRANCH" == 'master' ]] && exit 0
+
 # use proxy url to replace the nomral URL, for googleusercontent.com will be blocked randomly
 [[ "$NODE_NAME" =~ (zte) ]] && GS_URL=${GS_BASE_PROXY%%/*}/$GS_URL
 
index b180f59..8de092d 100644 (file)
         - 'functest-cleanup'
         - 'set-functest-env'
         - 'functest-suite'
+        - 'functest-store-results'
+        - 'functest-exit'
 
 - builder:
     name: functest-daily
     name: set-functest-env
     builders:
         - shell:
-            !include-raw: ./set-functest-env.sh
+            !include-raw:
+                - ./functest-env-presetup.sh
+                - ../../utils/fetch_os_creds.sh
+                - ./set-functest-env.sh
 
 - builder:
     name: functest-store-results
diff --git a/jjb/functest/functest-env-presetup.sh b/jjb/functest/functest-env-presetup.sh
new file mode 100755 (executable)
index 0000000..7a9b09d
--- /dev/null
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
+# Fetch INSTALLER_IP for APEX deployments
+if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
+    echo "Gathering IP information for Apex installer VM"
+    ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+    if sudo virsh list | grep undercloud; then
+        echo "Installer VM detected"
+        undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
+                      grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
+        export INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
+        export sshkey_vol="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
+        sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
+        export stackrc_vol="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
+
+        if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
+            sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
+        fi
+        if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
+          sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
+        fi
+        echo "Installer ip is ${INSTALLER_IP}"
+    else
+        echo "No available installer VM exists and no credentials provided...exiting"
+        exit 1
+    fi
+fi
+
index 14ad73a..7036f20 100644 (file)
@@ -88,4 +88,4 @@
     name: functest-unit-tests-and-docs-build
     builders:
         - shell: |
-            $WORKSPACE/run_unit_tests.sh
+            cd $WORKSPACE && tox
index 228cc3d..5d1ed28 100755 (executable)
@@ -15,4 +15,7 @@ for test in ${tests[@]}; do
     let global_ret_val+=$?
 done
 
-exit $global_ret_val
+ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
+echo ${global_ret_val}>${ret_val_file}
+
+exit 0
index f44f7b8..59d24cc 100644 (file)
         - shell:
             !include-raw: ./functest-cleanup.sh
         - shell:
-            !include-raw: ./set-functest-env.sh
+            !include-raw:
+                - ./functest-env-presetup.sh
+                - ../../utils/fetch_os_creds.sh
+                - ./set-functest-env.sh
         - shell:
             !include-raw: ./functest-loop.sh
         - shell:
index 89dabb0..558e248 100755 (executable)
@@ -1,50 +1,16 @@
 #!/bin/bash
 
 set -e
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-# LAB_CONFIG is used only for joid
+set +u
+set +o pipefail
 
+[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
+# Prepare OpenStack credentials volume
 if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
-    # If production lab then creds may be retrieved dynamically
-    # creds are on the jumphost, always in the same folder
     rc_file_vol="-v $LAB_CONFIG/admin-openrc:/home/opnfv/functest/conf/openstack.creds"
-    # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
-    # replace the default one by the customized one provided by jenkins config
-fi
-
-if [[ ${RC_FILE_PATH} != '' ]] && [[ -f ${RC_FILE_PATH} ]] ; then
-    echo "Credentials file detected: ${RC_FILE_PATH}"
-    # volume if credentials file path is given to Functest
-    rc_file_vol="-v ${RC_FILE_PATH}:/home/opnfv/functest/conf/openstack.creds"
-    RC_FLAG=1
-fi
-
-
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
-    ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-    if sudo virsh list | grep undercloud; then
-        echo "Installer VM detected"
-        undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
-                      grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
-        INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
-        sshkey_vol="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
-        sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
-        stackrc_vol="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
-
-        if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
-            sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
-        fi
-        if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
-          sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
-        fi
-    elif [[ "$RC_FLAG" == 1 ]]; then
-        echo "No available installer VM, but credentials provided...continuing"
-    else
-        echo "No available installer VM exists and no credentials provided...exiting"
-        exit 1
-    fi
-
+else
+    rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/home/opnfv/functest/conf/openstack.creds"
 fi
 
 
@@ -58,11 +24,13 @@ DEPLOY_TYPE=baremetal
 
 echo "Functest: Start Docker and prepare environment"
 
-echo "Functest: Download images that will be used by test cases"
-images_dir="${HOME}/opnfv/functest/images"
-chmod +x ${WORKSPACE}/functest/ci/download_images.sh
-${WORKSPACE}/functest/ci/download_images.sh ${images_dir}
-images_vol="-v ${images_dir}:/home/opnfv/functest/images"
+if [ "$BRANCH" != 'stable/danube' ]; then
+  echo "Functest: Download images that will be used by test cases"
+  images_dir="${HOME}/opnfv/functest/images"
+  chmod +x ${WORKSPACE}/functest/ci/download_images.sh
+  ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} 2> ${redirect}
+  images_vol="-v ${images_dir}:/home/opnfv/functest/images"
+fi
 
 dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
 mkdir -p ${dir_result}
@@ -85,7 +53,11 @@ if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} == *'os-nosdn-openo-h
     envs=${env}" -e OPENO_MSB_ENDPOINT=${openo_msb_endpoint}"
 fi
 
-volumes="${images_vol} ${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
+if [ "$BRANCH" != 'stable/danube' ]; then
+  volumes="${images_vol} ${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
+else
+  volumes="${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
+fi
 
 HOST_ARCH=$(uname -m)
 FUNCTEST_IMAGE="opnfv/functest"
index fc9f34a..ee154af 100644 (file)
             name: INSTALLER_IP
             default: '10.20.0.2'
             description: 'IP of the installer'
+        - string:
+            name: SALT_MASTER_IP
+            default: '192.168.10.100'
+            description: 'IP of the salt master (for mcp deployments)'
+        - string:
+            name: SSH_KEY
+            default: '/tmp/mcp.rsa'
+            description: 'Path to private SSH key to access environment nodes'
         - string:
             name: INSTALLER_TYPE
             default: fuel
     parameters:
         - string:
             name: INSTALLER_IP
-            default: '10.20.0.2'
+            default: '10.20.7.3'
             description: 'IP of the installer'
         - string:
             name: INSTALLER_TYPE
             default: daisy
             description: 'Installer used for deploying OPNFV on this POD'
+        - string:
+            name: BRIDGE
+            default: 'br7'
+            description: 'pxe bridge for booting of Fuel master'
 
 - parameter:
     name: 'infra-defaults'
index ced335c..e4dfa8d 100644 (file)
         - pre-scm-buildstep:
           - shell: |
                 #!/bin/bash
-                sudo chown -R $USER $WORKSPACE || exit 1
+                sudo chown -R $USER:$USER $WORKSPACE || exit 1
 
 - builder:
     name: build-html-and-pdf-docs-output
             find "$local_path" | grep -e 'index.html$' -e 'pdf$' | \
                 sed -e "s|^$local_path|    http://$gs_path|" >> gerrit_comment.txt
 
+# To take advantage of this macro, have your build write
+# out the file 'gerrit_comment.txt' with information to post
+# back to gerrit and include this macro in the list of builders.
 - builder:
-    name: report-docs-build-result-to-gerrit
+    name: report-build-result-to-gerrit
     builders:
         - shell: |
             #!/bin/bash
     builders:
         - build-html-and-pdf-docs-output
         - upload-under-review-docs-to-opnfv-artifacts
-        - report-docs-build-result-to-gerrit
+        - report-build-result-to-gerrit
 
 - builder:
     name: upload-merged-docs
     builders:
         - build-html-and-pdf-docs-output
         - upload-generated-docs-to-opnfv-artifacts
-        - report-docs-build-result-to-gerrit
+        - report-build-result-to-gerrit
         - remove-old-docs-from-opnfv-artifacts
 
 - builder:
index 44666a7..5744222 100644 (file)
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'compass-baremetal-master-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'compass-baremetal-master'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'compass-baremetal-branch-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'compass-baremetal-branch'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
 - parameter:
     name: 'fuel-baremetal-defaults'
     parameters:
         - label:
             name: SLAVE_LABEL
             default: 'daisy-baremetal'
+        - string:
+            name: INSTALLER_IP
+            default: '10.20.11.2'
+            description: 'IP of the installer'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'compass-virtual-master-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'compass-virtual-master'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'compass-virtual-branch-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'compass-virtual-branch'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
 - parameter:
     name: 'fuel-virtual-defaults'
     parameters:
         - label:
             name: SLAVE_LABEL
             default: 'daisy-virtual'
+        - string:
+            name: INSTALLER_IP
+            default: '10.20.11.2'
+            description: 'IP of the installer'
+        - string:
+            name: BRIDGE
+            default: 'daisy1'
+            description: 'pxe bridge for booting of Fuel master'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
         - string:
             name: INSTALLER_IP
-            default: '10.20.7.2'
+            default: '10.20.7.3'
             description: 'IP of the installer'
         - string:
             name: BRIDGE
index 9a4d885..2702c45 100644 (file)
@@ -58,7 +58,7 @@
 - job-template:
     name: 'netready-build-gluon-packages-daily-{stream}'
 
-    disabled: false
+    disabled: true
 
     concurrent: true
 
index 9f3dbe4..af8f8c2 100644 (file)
@@ -1,6 +1,6 @@
 #!/bin/bash
 ##############################################################################
-# Copyright (c) 2016 ZTE and others.
+# Copyright (c) 2017 ZTE and others.
 # All rights reserved. This program and the accompanying materials
 # are made available under the terms of the Apache License, Version 2.0
 # which accompanies this distribution, and is available at
@@ -8,40 +8,20 @@
 ##############################################################################
 set -e
 
-envs="INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP}
--e NODE_NAME=${NODE_NAME} -e CI_DEBUG=${CI_DEBUG}"
-ramfs=/tmp/qtip/ramfs
-cfg_dir=$(dirname $ramfs)
-dir_imgstore="${HOME}/imgstore"
-ramfs_volume="$ramfs:/mnt/ramfs"
-
 echo "--------------------------------------------------------"
 echo "POD: $NODE_NAME"
-echo "INSTALLER: $INSTALLER_TYPE"
 echo "Scenario: $DEPLOY_SCENARIO"
+echo "INSTALLER: $INSTALLER_TYPE"
+echo "INSTALLER_IP: $INSTALLER_IP"
 echo "--------------------------------------------------------"
 
 echo "Qtip: Pulling docker image: opnfv/qtip:${DOCKER_TAG}"
-docker pull opnfv/qtip:$DOCKER_TAG
-
-# use ramfs to fix docker socket connection issue with overlay mode in centos
-if [ ! -d $ramfs ]; then
-    mkdir -p $ramfs
-fi
+docker pull opnfv/qtip:$DOCKER_TAG >/dev/null
 
-if [ ! -z "$(df $ramfs | tail -n -1 | grep $ramfs)" ]; then
-    sudo mount -t tmpfs -o size=32M tmpfs $ramfs
-fi
-
-# enable contro path in docker
-cat <<EOF > ${cfg_dir}/ansible.cfg
-[defaults]
-callback_whitelist = profile_tasks
-[ssh_connection]
-control_path=/mnt/ramfs/ansible-ssh-%%h-%%p-%%r
-EOF
+envs="INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP}
+-e POD_NAME=${NODE_NAME} -e SCENARIO=${DEPLOY_SCENARIO}"
 
-cmd=" docker run -id -e $envs -v ${ramfs_volume} opnfv/qtip:${DOCKER_TAG} /bin/bash"
+cmd=" docker run -id -e $envs opnfv/qtip:${DOCKER_TAG} /bin/bash"
 echo "Qtip: Running docker command: ${cmd}"
 ${cmd}
 
@@ -49,14 +29,12 @@ container_id=$(docker ps | grep "opnfv/qtip:${DOCKER_TAG}" | awk '{print $1}' |
 if [ $(docker ps | grep 'opnfv/qtip' | wc -l) == 0 ]; then
     echo "The container opnfv/qtip with ID=${container_id} has not been properly started. Exiting..."
     exit 1
-else
-    echo "The container ID is: ${container_id}"
-    QTIP_REPO=/home/opnfv/repos/qtip
-    docker cp ${cfg_dir}/ansible.cfg ${container_id}:/home/opnfv/.ansible.cfg
-# TODO(zhihui_wu): use qtip cli to execute benchmark test in the future
-    docker exec -t ${container_id} bash -c "cd ${QTIP_REPO}/qtip/runner/ &&
-    python runner.py -d /home/opnfv/qtip/results/ -b all"
-
 fi
 
+echo "The container ID is: ${container_id}"
+QTIP_REPO=/home/opnfv/repos/qtip
+
+docker exec -t ${container_id} bash -c "bash ${QTIP_REPO}/tests/ci/run_ci.sh"
+
 echo "Qtip done!"
+exit 0
\ No newline at end of file
index 2aa52ad..ebd0c9f 100644 (file)
@@ -73,6 +73,8 @@ fi
 # Get tag version
 echo "Current branch: $BRANCH"
 
+BUILD_BRANCH=$BRANCH
+
 if [[ "$BRANCH" == "master" ]]; then
     DOCKER_TAG="latest"
 elif [[ -n "${RELEASE_VERSION-}" ]]; then
@@ -82,19 +84,17 @@ else
     DOCKER_TAG="stable"
 fi
 
+if [[ -n "${COMMIT_ID-}" && -n "${RELEASE_VERSION-}" ]]; then
+    DOCKER_TAG=$RELEASE_VERSION
+    BUILD_BRANCH=$COMMIT_ID
+fi
+
 # Start the build
 echo "Building docker image: $DOCKER_REPO_NAME:$DOCKER_TAG"
 echo "--------------------------------------------------------"
 echo
-if [[ $DOCKER_REPO_NAME == *"dovetail"* ]]; then
-    if [[ -n "${RELEASE_VERSION-}" ]]; then
-        DOCKER_TAG=${RELEASE_VERSION}
-    fi
-    cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG -f $DOCKERFILE ."
-else
-    cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BRANCH
-        -f $DOCKERFILE ."
-fi
+cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BUILD_BRANCH
+    -f $DOCKERFILE ."
 
 echo ${cmd}
 ${cmd}
index 3b7ec34..095ba41 100644 (file)
@@ -25,6 +25,9 @@
 
     project:
         # projects with jobs for master
+        - 'releng-anteater':
+            <<: *master
+            <<: *other-receivers
         - 'bottlenecks':
             <<: *master
             <<: *other-receivers
             name: DOCKER_REPO_NAME
             default: "opnfv/{project}"
             description: "Dockerhub repo to be pushed to."
+        - string:
+            name: COMMIT_ID
+            default: ""
+            description: "commit id to make a snapshot docker image"
         - string:
             name: RELEASE_VERSION
             default: ""
index 166aea8..8c231c3 100644 (file)
@@ -53,7 +53,7 @@
                     comment-contains-value: 'reverify'
             projects:
               - project-compare-type: 'REG_EXP'
-                project-pattern: 'functest|sdnvpn|qtip|daisy|sfc|escalator|releng'
+                project-pattern: 'functest|sdnvpn|qtip|daisy|sfc|escalator|releng|pharos|octopus|securedlab'
                 branches:
                   - branch-compare-type: 'ANT'
                     branch-pattern: '**/{branch}'
index 709a1eb..13186a1 100644 (file)
             branch: '{stream}'
             gs-pathname: ''
             disabled: false
+            docker-tag: 'latest'
         - danube:
             branch: 'stable/{stream}'
             gs-pathname: '/{stream}'
             disabled: false
+            docker-tag: 'stable'
 
 - job-template:
     name: 'storperf-verify-{stream}'
             project: '{project}'
             branch: '{branch}'
         - 'intel-pod9-defaults'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-nosdn-nofeature-noha'
+        - string:
+            name: DOCKER_TAG
+            default: '{docker-tag}'
+            description: 'Tag to pull docker image'
+        - choice:
+            name: DISK_TYPE
+            choices:
+                - 'SSD'
+                - 'HDD'
+            default: 'HDD'
+            description: 'The type of hard disk that Cinder uses'
+        - string:
+            name: AGENT_COUNT
+            description: 'The number of slave agents to start. Defaults to the cinder node count'
+        - string:
+            name: VOLUME_SIZE
+            default: '4'
+            description: 'Size of Cinder volume (in GB)'
+        - string:
+            name: WORKLOADS
+            default: 'wr,rr,rw'
+            description: 'Workloads to run'
+        - string:
+            name: BLOCK_SIZES
+            default: '2048,16384'
+            description: 'Block sizes for VM I/O operations'
+        - string:
+            name: QUEUE_DEPTHS
+            default: '1,4'
+            description: 'Number of simultaneous I/O operations to keep active'
+        - string:
+            name: STEADY_STATE_SAMPLES
+            default: '10'
+            description: 'Number of samples to use (1 per minute) to measure steady state'
+        - string:
+            name: DEADLINE
+            description: 'Maximum run time in minutes if steady state cannot be found. Defaults to 3 times steady state samples'
+        - choice:
+            name: TEST_CASE
+            choices:
+                - 'snia_steady_state'
+            description: 'The test case to run'
 
     scm:
         - git-scm
 
     triggers:
-        - timed: 'H H * * *'
+        - timed: '0 22 * * *'
 
     builders:
         - shell: |
index c6da9f4..319f8eb 100644 (file)
@@ -20,9 +20,9 @@
 # distros
 #--------------------------------
     distro:
-        - 'trusty':
+        - 'xenial':
             disabled: false
-            dib-os-release: 'trusty'
+            dib-os-release: 'xenial'
             dib-os-element: 'ubuntu-minimal'
             dib-os-packages: 'vlan,vim,less,bridge-utils,language-pack-en,iputils-ping,rsyslog,curl'
             extra-dib-elements: 'openssh-server'
index 2e6f227..b522b89 100755 (executable)
@@ -89,7 +89,7 @@ function cleanup_and_upload() {
 }
 
 # check distro to see if we support it
-if [[ ! "$DISTRO" =~ (trusty|centos7|suse) ]]; then
+if [[ ! "$DISTRO" =~ (xenial|centos7|suse) ]]; then
     echo "Distro $DISTRO is not supported!"
     exit 1
 fi
similarity index 99%
rename from jjb/yardstick/yardstick-ci-jobs.yml
rename to jjb/yardstick/yardstick-daily-jobs.yml
index 5ff36f8..ff1d47e 100644 (file)
 
     publishers:
         - email:
-            recipients: jean.gaoliang@huawei.com limingjiang@huawei.com
+            recipients: jean.gaoliang@huawei.com limingjiang@huawei.com ross.b.brattain@intel.com
 
 ########################
 # builder macros
index 1eaf8d0..2793dd4 100644 (file)
@@ -1,3 +1,3 @@
-paramiko>=2.0.1
-mock==1.3.0
-requests==2.9.1
+paramiko>=2.0 # LGPLv2.1+
+mock>=2.0 # BSD
+requests!=2.12.2,>=2.10.0 # Apache-2.0
index 8ac5cea..0dd635f 100644 (file)
@@ -17,9 +17,9 @@ setup(
     package_data={
     },
     url="https://www.opnfv.org",
-    install_requires=["paramiko>=2.0.1",
-                      "mock==1.3.0",
-                      "nose==1.3.7",
-                      "coverage==4.1",
-                      "requests==2.9.1"]
+    install_requires=["paramiko>=2.0",
+                      "mock>=2.0",
+                      "requests!=2.12.2,>=2.10.0"],
+    test_requires=["nose",
+                   "coverage>=4.0"]
 )
index 99d7f13..c264540 100644 (file)
@@ -1,6 +1,2 @@
-# The order of packages is significant, because pip processes them in the order
-# of appearance. Changing the order has an impact on the overall integration
-# process, which may cause wedges in the gate later.
-
-nose
-coverage
+nose # LGPL
+coverage>=4.0 # Apache-2.0
index 0561962..2b90215 100755 (executable)
@@ -57,8 +57,8 @@ WRITE_INTERFACES_FILE=true
 export DIB_DEV_USER_PWDLESS_SUDO=yes
 export DIB_DEV_USER_PASSWORD=devuser
 
-# Settings for distro: trusty/ubuntu-minimal, 7/centos7, 42.2/suse
-export DIB_OS_RELEASE=${DIB_OS_RELEASE:-trusty}
+# Settings for distro: xenial/ubuntu-minimal, 7/centos7, 42.2/suse
+export DIB_OS_RELEASE=${DIB_OS_RELEASE:-xenial}
 export DIB_OS_ELEMENT=${DIB_OS_ELEMENT:-ubuntu-minimal}
 
 # DIB OS packages
index 6d7af0d..b65abde 100644 (file)
@@ -113,11 +113,17 @@ change into directory where the sandbox script is located
 
 execute sandbox script
 
-    sudo -E ./xci-deploy.sh
+    ./xci-deploy.sh
 
 Issuing above command will start aio sandbox deployment and the sandbox
 should be ready between 1,5 and 2 hours depending on the host machine.
 
+Please remember that the user executing the XCI script will need to
+have an ssh key available, and stored in $HOME/.ssh directory.
+You can generate one by executing
+
+    ssh-keygen -t rsa
+
 Advanced Usage
 --------------
 
@@ -179,6 +185,23 @@ continuously chasing the HEAD of corresponding branches.
 Once a working version is identified, the versions of the upstream components
 are then bumped in releng repo.
 
+==================
+XCI developer tips
+==================
+
+It is possible to run XCI in development mode, in order to test the
+latest changes. When deploying on this mode, the script will use the working
+directories for releng/bifrost/OSA, instead of cloning the whole repositories
+on each run.
+To enable it, you need to export the different DEV_PATH vars:
+
+- export OPNFV_RELENG_DEV_PATH=/opt/releng/
+- export OPENSTACK_BIFROST_DEV_PATH=/opt/bifrost
+- export OPENSTACK_OSA_DEV_PATH=/opt/openstack-ansible
+
+This will cause the deployment to pick the development copies stored at the
+specified directories, and use them instead of cloning those on every run.
+
 ===========================================
 Limitations, Known Issues, and Improvements
 ===========================================
index cefb412..9d4c782 100755 (executable)
@@ -6,7 +6,6 @@ export OPNFV_RELENG_GIT_URL=https://gerrit.opnfv.org/gerrit/releng.git
 export OPENSTACK_BIFROST_GIT_URL=https://git.openstack.org/openstack/bifrost
 export OPENSTACK_OSA_GIT_URL=https://git.openstack.org/openstack/openstack-ansible
 export OPENSTACK_OSA_ETC_PATH=/etc/openstack_deploy
-export CLEAN_DIB_IMAGES=false
 export OPNFV_HOST_IP=192.168.122.2
 export XCI_FLAVOR_ANSIBLE_FILE_PATH=$OPNFV_RELENG_PATH/prototypes/xci/file/$XCI_FLAVOR
 export CI_LOOP=${CI_LOOP:-daily}
index e3b49c7..c426936 100755 (executable)
@@ -22,6 +22,6 @@
 # use releng from master until the development work with the sandbox is complete
 export OPNFV_RELENG_VERSION="master"
 # HEAD of "master" as of 04.04.2017
-export OPENSTACK_BIFROST_VERSION=${OPENSTACK_BIFROST_VERSION:-"6109f824e5510e794dbf1968c3859e8b6356d280"}
+export OPENSTACK_BIFROST_VERSION=${OPENSTACK_BIFROST_VERSION:-"7c9bb5e07c6bc3b42c9a9e8457e5eef511075b38"}
 # HEAD of "master" as of 04.04.2017
 export OPENSTACK_OSA_VERSION=${OPENSTACK_OSA_VERSION:-"d9e1330c7ff9d72a604b6b4f3af765f66a01b30e"}
index f9de940..fd11a58 100755 (executable)
@@ -54,3 +54,8 @@ export OPNFV_OSA_PLAYBOOK=${OPNFV_OSA_PLAYBOOK:-"$OPENSTACK_OSA_PATH/playbooks/s
 export ANSIBLE_VERBOSITY=${ANSIBLE_VERBOSITY-""}
 export LOG_PATH=${LOG_PATH:-${XCI_DEVEL_ROOT}/opnfv/logs}
 export RUN_TEMPEST=${RUN_TEMPEST:-false}
+# Set this to to true to force XCI to re-create the target OS images
+export CLEAN_DIB_IMAGES=${CLEAN_DIB_IMAGES:-false}
+# Set this to a full path pointing to extra config files (containing
+# group_vars/all)
+export XCI_EXTRA_VARS_PATH=${XCI_EXTRA_VARS_PATH:-""}
index 842bcc4..5a96e2a 100644 (file)
@@ -9,6 +9,10 @@
 ##############################################################################
 # these versions are extracted based on the osa commit d9e1330c7ff9d72a604b6b4f3af765f66a01b30e on 04.04.2017
 # https://review.openstack.org/gitweb?p=openstack/openstack-ansible.git;a=commit;h=d9e1330c7ff9d72a604b6b4f3af765f66a01b30e
+- name: ansible-hardening
+  scm: git
+  src: https://git.openstack.org/openstack/ansible-hardening
+  version: 051fe3195f59d1ee8db06fca5d2cce7a25e58861
 - name: apt_package_pinning
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-apt_package_pinning
index daa7f51..67a49b3 100644 (file)
@@ -131,6 +131,6 @@ fi
 
 PIP=$(which pip)
 
-sudo -H -E ${PIP} install "pip>6.0"
+${PIP} install --user "pip>6.0"
 
-pip install ansible==$XCI_ANSIBLE_PIP_VERSION
+${PIP} install --user --upgrade ansible==$XCI_ANSIBLE_PIP_VERSION
index 8be36c7..92b5c55 100644 (file)
         delete: yes
       when:
         - OPNFV_RELENG_DEV_PATH != ""
+    - name: Copy extra vars to releng and bifrost
+      synchronize:
+        src: "{{ XCI_EXTRA_VARS_PATH }}"
+        dest: "{{ item }}"
+      with_items:
+        - "{{ OPNFV_RELENG_PATH }}/prototypes/xci/playbooks"
+        - "{{ OPENSTACK_BIFROST_PATH }}/playbooks/inventory"
+      when:
+        - XCI_EXTRA_VARS_PATH != ""
 
 - hosts: localhost
   connection: local
index 85f532a..aeaface 100644 (file)
@@ -27,3 +27,4 @@ XCI_LOOP: "{{ lookup('env','XCI_LOOP') }}"
 LOG_PATH: "{{ lookup('env','LOG_PATH') }}"
 OPNFV_HOST_IP: "{{ lookup('env','OPNFV_HOST_IP') }}"
 OPNFV_SSH_HOST_KEYS_PATH: "{{ lookup('env', 'OPNFV_SSH_HOST_KEYS_PATH') }}"
+XCI_EXTRA_VARS_PATH: "{{ lookup('env', 'XCI_EXTRA_VARS_PATH') }}"
index 2d9246e..3c93408 100644 (file)
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,7 @@
 from setuptools import setup
 
 setup(
-    name="opnfv",
+    name="releng",
     version="master",
     url="https://www.opnfv.org",
 )
index 6a382a5..993c0b9 100755 (executable)
@@ -7,11 +7,14 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-
+set -o errexit
+set -o nounset
+set -o pipefail
 
 usage() {
-    echo "usage: $0 [-v] -d <destination> -i <installer_type> -a <installer_ip>" >&2
+    echo "usage: $0 [-v] -d <destination> -i <installer_type> -a <installer_ip> [-s <ssh_key>]" >&2
     echo "[-v] Virtualized deployment" >&2
+    echo "[-s <ssh_key>] Path to ssh key. For MCP deployments only" >&2
 }
 
 info ()  {
@@ -51,11 +54,12 @@ swap_to_public() {
 : ${DEPLOY_TYPE:=''}
 
 #Get options
-while getopts ":d:i:a:h:v" optchar; do
+while getopts ":d:i:a:h:s:v" optchar; do
     case "${optchar}" in
         d) dest_path=${OPTARG} ;;
         i) installer_type=${OPTARG} ;;
         a) installer_ip=${OPTARG} ;;
+        s) ssh_key=${OPTARG} ;;
         v) DEPLOY_TYPE="virt" ;;
         *) echo "Non-option argument: '-${OPTARG}'" >&2
            usage
@@ -68,6 +72,9 @@ done
 dest_path=${dest_path:-$HOME/opnfv-openrc.sh}
 installer_type=${installer_type:-$INSTALLER_TYPE}
 installer_ip=${installer_ip:-$INSTALLER_IP}
+if [ "${installer_type}" == "fuel" ] && [ "${BRANCH}" == "master" ]; then
+    installer_ip=${SALT_MASTER_IP}
+fi
 
 if [ -z $dest_path ] || [ -z $installer_type ] || [ -z $installer_ip ]; then
     usage
@@ -87,40 +94,45 @@ ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
 
 # Start fetching the files
 if [ "$installer_type" == "fuel" ]; then
-    #ip_fuel="10.20.0.2"
     verify_connectivity $installer_ip
+    if [ "${BRANCH}" == "master" ]; then
+        ssh_key=${ssh_key:-$SSH_KEY}
+        if [ -z $ssh_key ] || [ ! -f $ssh_key ]; then
+            error "Please provide path to existing ssh key for mcp deployment."
+            exit 2
+        fi
+        ssh_options+=" -i ${ssh_key}"
 
-    env=$(sshpass -p r00tme ssh 2>/dev/null $ssh_options root@${installer_ip} \
-        'fuel env'|grep operational|head -1|awk '{print $1}') &> /dev/null
-    if [ -z $env ]; then
-        error "No operational environment detected in Fuel"
-    fi
-    env_id="${FUEL_ENV:-$env}"
-
-    # Check if controller is alive (online='True')
-    controller_ip=$(sshpass -p r00tme ssh 2>/dev/null $ssh_options root@${installer_ip} \
-        "fuel node --env ${env_id} | grep controller | grep 'True\|  1' | awk -F\| '{print \$5}' | head -1" | \
-        sed 's/ //g') &> /dev/null
+        # retrieving controller vip
+        controller_ip=$(ssh 2>/dev/null ${ssh_options} ubuntu@${installer_ip} \
+            "sudo salt --out txt 'ctl01*' pillar.get _param:openstack_control_address | awk '{print \$2}'" | \
+            sed 's/ //g') &> /dev/null
 
-    if [ -z $controller_ip ]; then
-        error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
-    fi
+        info "Fetching rc file from controller $controller_ip..."
+        ssh ${ssh_options} ubuntu@${controller_ip} "sudo cat /root/keystonercv3" > $dest_path
+    else
+        #ip_fuel="10.20.0.2"
+        env=$(sshpass -p r00tme ssh 2>/dev/null ${ssh_options} root@${installer_ip} \
+            'fuel env'|grep operational|head -1|awk '{print $1}') &> /dev/null
+        if [ -z $env ]; then
+            error "No operational environment detected in Fuel"
+        fi
+        env_id="${FUEL_ENV:-$env}"
 
-    info "Fetching rc file from controller $controller_ip..."
-    sshpass -p r00tme ssh 2>/dev/null $ssh_options root@${installer_ip} \
-        "scp $ssh_options ${controller_ip}:/root/openrc ." &> /dev/null
-    sshpass -p r00tme scp 2>/dev/null $ssh_options root@${installer_ip}:~/openrc $dest_path &> /dev/null
+        # Check if controller is alive (online='True')
+        controller_ip=$(sshpass -p r00tme ssh 2>/dev/null ${ssh_options} root@${installer_ip} \
+            "fuel node --env ${env_id} | grep controller | grep 'True\|  1' | awk -F\| '{print \$5}' | head -1" | \
+            sed 's/ //g') &> /dev/null
 
-    #This file contains the mgmt keystone API, we need the public one for our rc file
-    admin_ip=$(cat $dest_path | grep "OS_AUTH_URL" | sed 's/^.*\=//' | sed "s/^\([\"']\)\(.*\)\1\$/\2/g" | sed s'/\/$//')
-    public_ip=$(sshpass -p r00tme ssh $ssh_options root@${installer_ip} \
-        "ssh ${controller_ip} 'source openrc; openstack endpoint list'" \
-        | grep keystone | grep public | sed 's/ /\n/g' | grep ^http | head -1) &> /dev/null
-        #| grep http | head -1 | cut -d '|' -f 4 | sed 's/v1\/.*/v1\//' | sed 's/ //g') &> /dev/null
-    #NOTE: this is super ugly sed 's/v1\/.*/v1\//'OS_AUTH_URL
-    # but sometimes the output of endpoint-list is like this: http://172.30.9.70:8004/v1/%(tenant_id)s
-    # Fuel virtual need a fix
+        if [ -z $controller_ip ]; then
+            error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
+        fi
 
+        info "Fetching rc file from controller $controller_ip..."
+        sshpass -p r00tme ssh 2>/dev/null ${ssh_options} root@${installer_ip} \
+            "scp ${ssh_options} ${controller_ip}:/root/openrc ." &> /dev/null
+        sshpass -p r00tme scp 2>/dev/null ${ssh_options} root@${installer_ip}:~/openrc $dest_path &> /dev/null
+    fi
     #convert to v3 URL
     auth_url=$(cat $dest_path|grep AUTH_URL)
     if [[ -z `echo $auth_url |grep v3` ]]; then
@@ -157,8 +169,8 @@ elif [ "$installer_type" == "compass" ]; then
     sshpass -p root scp 2>/dev/null $ssh_options root@${installer_ip}:~/admin-openrc.sh $dest_path &> /dev/null
 
     info "This file contains the mgmt keystone API, we need the public one for our rc file"
-    grep "OS_AUTH_URL.*v2" $dest_path > /dev/null 2>&1
-    if [ $?  -eq 0 ] ; then
+
+    if grep "OS_AUTH_URL.*v2" $dest_path > /dev/null 2>&1 ; then
         public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
             "ssh ${controller_ip} 'source /opt/admin-openrc.sh; openstack endpoint show identity '" \
             | grep publicurl | awk '{print $4}')
@@ -223,5 +235,3 @@ fi
 
 echo "-------- Credentials: --------"
 cat $dest_path
-
-exit 0
index 9099657..f0c488a 100644 (file)
@@ -22,6 +22,7 @@ dir_result="${HOME}/opnfv/$project/results/${branch}"
 # + intel-pod12 (vsperf)
 node_list=(\
 'lf-pod1' 'lf-pod2' 'intel-pod2' 'intel-pod12' \
+'lf-virtual2' 'lf-virtual3' \
 'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' \
 'ericsson-pod1' 'ericsson-pod2' \
 'ericsson-virtual1' 'ericsson-virtual2'  'ericsson-virtual3' \
index d447b56..4d0b33c 100644 (file)
@@ -1,5 +1,5 @@
-Creative Commons Attribution 3.0 Unported\r
-http://creativecommons.org/licenses/by/3.0/\r
+This work is licensed under a Creative Commons Attribution 4.0 International License.\r
+SPDX-License-Identifier: CC-BY-4.0\r
 \r
 License\r
 \r
index 31b8d04..049cf62 100644 (file)
@@ -1,6 +1,6 @@
 Phantom by HTML5 UP\r
 html5up.net | @ajlkn\r
-Free for personal and commercial use under the CCA 3.0 license (html5up.net/license)\r
+SPDX-License-Identifier: CC-BY-4.0\r
 \r
 \r
 This is Phantom, a simple design built around a grid of large, colorful, semi-interactive\r
@@ -30,4 +30,4 @@ Credits:
                html5shiv.js (@afarkas @jdalton @jon_neal @rem)\r
                Misc. Sass functions (@HugoGiraudel)\r
                Respond.js (j.mp/respondjs)\r
-               Skel (skel.io)
\ No newline at end of file
+               Skel (skel.io)\r
index 110ac4c..2b91186 100644 (file)
@@ -20,7 +20,7 @@ class TestCases(BaseHandler):
 
         url = '{}/projects/{}/cases'.format(conf.base_url, project)
         cases = requests.get(url).json().get('testcases', [])
-        data = [t['name'] for t in cases]
+        data = [{t['name']: t['catalog_description']} for t in cases]
         self.write(json_encode(data))
 
 
index 6e6585a..0304298 100755 (executable)
@@ -1,4 +1,15 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2017 Orange and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+# SPDX-license-identifier: Apache-2.0
+
 from urllib2 import Request, urlopen, URLError
+from datetime import datetime
 import json
 import jinja2
 import os
@@ -97,7 +108,13 @@ for version in rp_utils.get_config('general.versions'):
                     crit_rate = True
 
                 # Expect that the suite duration is inferior to 30m
-                if result['details']['duration'] < criteria_duration:
+                stop_date = datetime.strptime(result['stop_date'],
+                                              '%Y-%m-%d %H:%M:%S')
+                start_date = datetime.strptime(result['start_date'],
+                                               '%Y-%m-%d %H:%M:%S')
+
+                delta = stop_date - start_date
+                if (delta.total_seconds() < criteria_duration):
                     crit_time = True
 
                 result['criteria'] = {'tests': crit_tests,
index 14dbbff..f1688cf 100644 (file)
Binary files a/utils/test/reporting/pages/app/images/overview.png and b/utils/test/reporting/pages/app/images/overview.png differ
index 7082aed..def8e72 100644 (file)
 angular.module('opnfvApp')
     .controller('testVisualController', ['$scope', '$state', '$stateParams', 'TableFactory', 'ngDialog', '$http', '$loading',
         function($scope, $state, $stateParams, TableFactory, ngDialog, $http, $loading) {
-            $scope.dovet = "59,222,156,317";
-            $scope.functest = "203,163,334,365";
-            $scope.yardstick = "398,161,513,384";
-            $scope.vsperf = "567,163,673,350";
-            $scope.stor = "686,165,789,341";
-            $scope.qtip = "802,164,905,341";
-            $scope.bootleneck = "917,161,1022,338";
-            $scope.noPopArea1 = "30,11,1243,146";
-            $scope.noPopArea2 = "1041,157,1250,561";
-            $scope.noPopArea3 = "15,392,1027,561";
+            $scope.dovet = "50,168,177,443";
+            $scope.functest = "194,173,356,442";
+            $scope.yardstick = "377,183,521,412";
+            $scope.vsperf = "542,185,640,414";
+            $scope.stor = "658,187,750,410";
+            $scope.qtip = "769,190,852,416";
+            $scope.bootleneck = "870,192,983,419";
+            $scope.noPopArea1 = "26,8,1190,180";
+            $scope.noPopArea2 = "1018,193,1190,590";
+            $scope.noPopArea3 = "37,455,1003,584";
 
             init();
             $scope.showSelectValue = 0;
@@ -51,20 +51,41 @@ angular.module('opnfvApp')
                 $http.get(url, config).then(function(response) {
                     if (response.status == 200) {
                         $scope.tableData = response.data;
+
+                        $scope.tableData = constructObjectArray($scope.tableData);
+                        console.log($scope.tableData);
                         $loading.finish('Key');
 
 
+
                     }
                 })
             }
 
+            //construct key value for tableData
+            function constructObjectArray(array) {
+                var templateArray = [];
+                for (var i = 0; i < array.length; i++) {
+                    var key = Object.keys(array[i])[0];
+                    var value = array[i][key];
+                    var temp = {
+                        'key': key,
+                        'value': value
+                    };
+                    templateArray.push(temp);
+
+                }
+
+                return templateArray;
+            }
+
             function getDetail(casename) {
                 TableFactory.getProjectTestCaseDetail().get({
                     'project': $scope.modalName,
                     'testcase': casename
                 }).$promise.then(function(response) {
                     if (response != null) {
-                        $scope.project_name_modal = response.project_name;
+                        $scope.name_modal = response.name;
                         $scope.description_modal = response.description;
                         openTestDetail();
                     }
@@ -108,4 +129,4 @@ angular.module('opnfvApp')
 
 
         }
-    ]);
\ No newline at end of file
+    ]);
index 7ab869b..8e567ca 100644 (file)
@@ -71,6 +71,7 @@ html {
     border-radius: 5px 5px 5px 5px;
     background-color: #f3f3f4;
     opacity: 0.9;
+    width: 200px;
 }
 
 .ngdialog.ngdialog.ngdialog-theme-default .ngdialog-content {
index 9d146ba..74eb56e 100644 (file)
@@ -4,7 +4,7 @@
 <div class="row">
 
 
-    <div class="row  border-bottom white-bg dashboard-header" style="border-radius: 5px 5px 5px 5px ">
+    <div class="row  border-bottom white-bg dashboard-header" style="border-radius: 5px 5px 5px 5px;width:90%;margin-left:30px; ">
 
         <h3>OPNFV Test ecosystem
             <small> *mouse over display test case list</small>
@@ -70,7 +70,7 @@
     </div>
 
 
-    <div id="popup" class="popup" style="width: 20%;height: 35%" dw-loading="Key">
+    <div id="popup" class="popup" style="width: 40%;height: 35%" dw-loading="Key">
 
         <div ng-show="tableData.length==0">
             <center>
@@ -90,7 +90,8 @@
 
                 <tbody>
                     <tr dir-paginate="data in tableData | itemsPerPage: 8  track by $index ">
-                        <td><a ng-click="getDetail(data)"> {{data}}</a></td>
+                        <td ng-if="data.value!=null"><a ng-click="getDetail(data.key)"> {{data.value}}</a></td>
+                        <td ng-if="data.value==null"><a ng-click="getDetail(data.key)"> null</a></td>
                         <tr>
 
                 </tbody>
index 8918b3f..db6f712 100644 (file)
@@ -2,6 +2,6 @@
 <div class="hr-line-dashed"></div>
 
 
-<strong> name</strong>: {{project_name_modal}}<br>
+<strong> name</strong>: {{name_modal}}<br>
 
-<strong>description</strong>: {{description_modal}}<br>
\ No newline at end of file
+<strong>description</strong>: {{description_modal}}<br>
index 4a2f23a..8c701c3 100644 (file)
         $stateProvider.
             state('home', {
                 url: '/',
-                templateUrl: '/testapi-ui/components/home/home.html'
+                templateUrl: 'testapi-ui/components/home/home.html'
             }).
             state('about', {
                 url: '/about',
-                templateUrl: '/testapi-ui/components/about/about.html'
+                templateUrl: 'testapi-ui/components/about/about.html'
             }).
             state('guidelines', {
                 url: '/guidelines',
-                templateUrl: '/testapi-ui/components/guidelines/guidelines.html',
+                templateUrl: 'testapi-ui/components/guidelines/guidelines.html',
                 controller: 'GuidelinesController as ctrl'
             }).
             state('communityResults', {
                 url: '/community_results',
-                templateUrl: '/testapi-ui/components/results/results.html',
+                templateUrl: 'testapi-ui/components/results/results.html',
                 controller: 'ResultsController as ctrl'
             }).
             state('userResults', {
-                url: '/user_results',
+                url: 'user_results',
                 templateUrl: '/testapi-ui/components/results/results.html',
                 controller: 'ResultsController as ctrl'
             }).
             state('resultsDetail', {
                 url: '/results/:testID',
-                templateUrl: '/testapi-ui/components/results-report' +
+                templateUrl: 'testapi-ui/components/results-report' +
                              '/resultsReport.html',
                 controller: 'ResultsReportController as ctrl'
             }).
             }).
             state('authFailure', {
                 url: '/auth_failure',
-                templateUrl: '/testapi-ui/components/home/home.html',
+                templateUrl: 'testapi-ui/components/home/home.html',
                 controller: 'AuthFailureController as ctrl'
             }).
             state('logout', {
                 url: '/logout',
-                templateUrl: '/testapi-ui/components/logout/logout.html',
+                templateUrl: 'testapi-ui/components/logout/logout.html',
                 controller: 'LogoutController as ctrl'
             }).
             state('userVendors', {
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/OpenStack_Project_Refstack_mascot_90x90.png b/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/OpenStack_Project_Refstack_mascot_90x90.png
deleted file mode 100755 (executable)
index 4695090..0000000
Binary files a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/OpenStack_Project_Refstack_mascot_90x90.png and /dev/null differ
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/openstack-logo.png b/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/openstack-logo.png
deleted file mode 100644 (file)
index 826bf2e..0000000
Binary files a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/openstack-logo.png and /dev/null differ
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/refstack-logo.png b/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/refstack-logo.png
deleted file mode 100755 (executable)
index fc45f3e..0000000
Binary files a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/refstack-logo.png and /dev/null differ
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/testapi-logo.png b/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/testapi-logo.png
new file mode 100644 (file)
index 0000000..ff78eb1
Binary files /dev/null and b/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/testapi-logo.png differ
index 2a43cd1..3056e1d 100644 (file)
 <div cg-busy="{promise:ctrl.resultsRequest,message:'Loading'}"></div>
 
 <div ng-show="ctrl.data" class="results-table">
-    <table ng-show="ctrl.data" class="table table-striped table-hover">
+    <table ng-data="ctrl.data.result" ng-show="ctrl.data" class="table table-striped table-hover">
         <thead>
             <tr>
-                <th ng-if="ctrl.isUserResults"></th>
-                <th>Upload Date</th>
-                <th>Test Run ID</th>
-                <th ng-if="ctrl.isUserResults">Vendor</th>
-                <th ng-if="ctrl.isUserResults">Product (version)</th>
-                <th ng-if="ctrl.isUserResults">Target Program</th>
-                <th ng-if="ctrl.isUserResults">Guideline</th>
-                <th ng-if="ctrl.isUserResults">Verified</th>
-                <th ng-if="ctrl.isUserResults">Shared</th>
+                <th>ID</th>
+                <th>Pod</th>
+                <th>Project</th>
+                <th>Test Case</th>
+                <th>Installer</th>
+                <th>Version</th>
+                <th>Scenario</th>
+                <th>Criteria</th>
+                <th>Start Date</th>
+                <th>Stop Date</th>
             </tr>
         </thead>
 
         <tbody>
             <tr ng-repeat-start="(index, result) in ctrl.data.results">
-                <td ng-if="ctrl.isUserResults">
-                    <a ng-if="!result.expanded"
-                       class="glyphicon glyphicon-plus"
-                       ng-click="result.expanded = true">
-                    </a>
-                    <a ng-if="result.expanded"
-                       class="glyphicon glyphicon-minus"
-                       ng-click="result.expanded = false">
-                    </a>
-                </td>
-                <td>{{result.created_at}}</td>
-                <td><a ui-sref="resultsDetail({testID: result.id})">
-                        {{result.id.slice(0, 8)}}...{{result.id.slice(-8)}}
-                    </a>
-                </td>
-                <td ng-if="ctrl.isUserResults">
-                    {{ctrl.vendors[result.product_version.product_info.organization_id].name || '-'}}
-                </td>
-                <td ng-if="ctrl.isUserResults">{{result.product_version.product_info.name || '-'}}
-                    <span ng-if="result.product_version.version">
-                        ({{result.product_version.version}})
-                    </span>
-                </td>
-                <td ng-if="ctrl.isUserResults">{{ctrl.targetMappings[result.meta.target] || '-'}}</td>
-                <td ng-if="ctrl.isUserResults">{{result.meta.guideline.slice(0, -5) || '-'}}</td>
-                <td ng-if="ctrl.isUserResults">
-                    <span ng-if="result.verification_status" class="glyphicon glyphicon-ok"></span>
-                    <span ng-if="!result.verification_status">-</span>
-
-                </td>
-                <td ng-if="ctrl.isUserResults">
-                    <span ng-show="result.meta.shared" class="glyphicon glyphicon-share"></span>
-                </td>
+                <td>{{ result._id }}</td>
+                <td>{{ result.pod_name }}</td>
+                <td>{{ result.project_name }}</td>
+                <td>{{ result.case_name }}</td>
+                <td>{{ result.installer }}</td>
+                <td>{{ result.version }}</td>
+                <td>{{ result.scenario }}</td>
+                <td>{{ result.criteria }}</td>
+                <td>{{ result.start_date }}</td>
+                <td>{{ result.stop_date }}</td>
             </tr>
-            <tr ng-if="result.expanded" ng-repeat-end>
-                <td></td>
-                <td colspan="3">
-                    <strong>Publicly Shared:</strong>
-                    <span ng-if="result.meta.shared == 'true' && !result.sharedEdit">Yes</span>
-                    <span ng-if="!result.meta.shared && !result.sharedEdit">
-                        <em>No</em>
-                    </span>
-                    <select ng-if="result.sharedEdit"
-                            ng-model="result.meta.shared"
-                            class="form-inline">
-                            <option value="true">Yes</option>
-                            <option value="">No</option>
-                    </select>
-                    <a ng-if="!result.sharedEdit"
-                       ng-click="result.sharedEdit = true"
-                       title="Edit"
-                       class="glyphicon glyphicon-pencil"></a>
-                    <a ng-if="result.sharedEdit"
-                       ng-click="ctrl.associateMeta(index,'shared',result.meta.shared)"
-                       title="Save"
-                       class="glyphicon glyphicon-floppy-disk"></a>
-                    <br />
-
-                    <strong>Associated Guideline:</strong>
-                    <span ng-if="!result.meta.guideline && !result.guidelineEdit">
-                        <em>None</em>
-                    </span>
-                    <span ng-if="result.meta.guideline && !result.guidelineEdit">
-                        {{result.meta.guideline.slice(0, -5)}}
-                    </span>
-                    <select ng-if="result.guidelineEdit"
-                            ng-model="result.meta.guideline"
-                            ng-options="o as o.slice(0, -5) for o in ctrl.versionList"
-                            class="form-inline">
-                        <option value="">None</option>
-                    </select>
-                    <a ng-if="!result.guidelineEdit"
-                       ng-click="ctrl.getVersionList();result.guidelineEdit = true"
-                       title="Edit"
-                       class="glyphicon glyphicon-pencil"></a>
-                    <a ng-if="result.guidelineEdit"
-                       ng-click="ctrl.associateMeta(index, 'guideline', result.meta.guideline)"
-                       title="Save"
-                       class="glyphicon glyphicon-floppy-disk">
-                    </a>
-                    <br />
-
-                    <strong>Associated Target Program:</strong>
-                    <span ng-if="!result.meta.target && !result.targetEdit">
-                        <em>None</em>
-                    </span>
-                    <span ng-if="result.meta.target && !result.targetEdit">
-                        {{ctrl.targetMappings[result.meta.target]}}</span>
-                    <select ng-if="result.targetEdit"
-                            ng-model="result.meta.target"
-                            class="form-inline">
-                        <option value="">None</option>
-                        <option value="platform">OpenStack Powered Platform</option>
-                        <option value="compute">OpenStack Powered Compute</option>
-                        <option value="object">OpenStack Powered Object Storage</option>
-                    </select>
-                    <a ng-if="!result.targetEdit"
-                       ng-click="result.targetEdit = true;"
-                       title="Edit"
-                       class="glyphicon glyphicon-pencil">
-                    </a>
-                    <a ng-if="result.targetEdit"
-                       ng-click="ctrl.associateMeta(index, 'target', result.meta.target)"
-                       title="Save"
-                       class="glyphicon glyphicon-floppy-disk">
-                    </a>
-                    <br />
-
-                    <strong>Associated Product:</strong>
-                    <span ng-if="!result.product_version && !result.productEdit">
-                        <em>None</em>
-                    </span>
-                    <span ng-if="result.product_version && !result.productEdit">
-                        <span ng-if="ctrl.products[result.product_version.product_info.id].product_type == 0">
-                            <a ui-sref="distro({id: result.product_version.product_info.id})">
-                                {{ctrl.products[result.product_version.product_info.id].name}}
-                                <small ng-if="result.product_version.version">
-                                    ({{result.product_version.version}})
-                                </small>
-                            </a>
-                        </span>
-                        <span ng-if="ctrl.products[result.product_version.product_info.id].product_type != 0">
-                            <a ui-sref="cloud({id: result.product_version.product_info.id})">
-                                {{ctrl.products[result.product_version.product_info.id].name}}
-                                <small ng-if="result.product_version.version">
-                                    ({{result.product_version.version}})
-                                </small>
-                            </a>
-                        </span>
-                    </span>
-
-                    <select ng-if="result.productEdit"
-                            ng-options="product as product.name for product in ctrl.products | arrayConverter | orderBy: 'name' track by product.id"
-                            ng-model="result.selectedProduct"
-                            ng-change="ctrl.getProductVersions(result)">
-                        <option value="">-- No Product --</option>
-                    </select>
-
-                    <span ng-if="result.productVersions.length && result.productEdit">
-                        <span class="glyphicon glyphicon-arrow-right" style="padding-right:3px;color:#303030;"></span>
-                        Version:
-                        <select ng-options="version as version.version for version in result.productVersions | orderBy: 'version' track by version.id"
-                                ng-model="result.selectedVersion">
-                        </select>
-
-                    </span>
-                    <a ng-if="!result.productEdit"
-                       ng-click="ctrl.prepVersionEdit(result)"
-                       title="Edit"
-                       class="glyphicon glyphicon-pencil">
-                    </a>
-                    <a ng-if="result.productEdit"
-                       ng-click="ctrl.associateProductVersion(result)"
-                       confirm="Once you associate this test to this product, ownership
-                                will be transferred to the product's vendor admins.
-                                Continue?"
-                       title="Save"
-                       class="glyphicon glyphicon-floppy-disk">
-                    </a>
-                    <br />
-                </td>
+            <tr ng-repeat-end=>
             </tr>
         </tbody>
     </table>
index 2b0338c..9e3540d 100644 (file)
@@ -38,7 +38,6 @@
         ctrl.associateMeta = associateMeta;
         ctrl.getVersionList = getVersionList;
         ctrl.getUserProducts = getUserProducts;
-        ctrl.getVendors = getVendors;
         ctrl.associateProductVersion = associateProductVersion;
         ctrl.getProductVersions = getProductVersions;
         ctrl.prepVersionEdit = prepVersionEdit;
             ctrl.update();
         }
 
-        ctrl.getVendors();
-
         /**
          * This will contact the TestAPI API to get a listing of test run
          * results.
             var start = $filter('date')(ctrl.startDate, 'yyyy-MM-dd');
             if (start) {
                 content_url =
-                    content_url + '&start_date=' + start + ' 00:00:00';
+                    content_url + '&from=' + start + ' 00:00:00';
             }
             var end = $filter('date')(ctrl.endDate, 'yyyy-MM-dd');
             if (end) {
-                content_url = content_url + '&end_date=' + end + ' 23:59:59';
+                content_url = content_url + '&to=' + end + ' 23:59:59';
             }
             if (ctrl.isUserResults) {
                 content_url = content_url + '&signed';
             ctrl.resultsRequest =
                 $http.get(content_url).success(function (data) {
                     ctrl.data = data;
-                    ctrl.totalItems = ctrl.data.pagination.total_pages *
-                        ctrl.itemsPerPage;
+                    ctrl.totalItems = ctrl.data.pagination.total_pages * ctrl.itemsPerPage;
                     ctrl.currentPage = ctrl.data.pagination.current_page;
                 }).error(function (error) {
                     ctrl.data = null;
                 });
         }
 
-        /**
-         * This will contact the TestAPI API to get a listing of
-         * vendors.
-         */
-        function getVendors() {
-            var contentUrl = testapiApiUrl + '/vendors';
-            ctrl.vendorsRequest =
-                $http.get(contentUrl).success(function (data) {
-                    ctrl.vendors = {};
-                    data.vendors.forEach(function(vendor) {
-                        ctrl.vendors[vendor.id] = vendor;
-                    });
-                }).error(function (error) {
-                    ctrl.vendors = null;
-                    ctrl.showError = true;
-                    ctrl.error =
-                        'Error retrieving vendor listing from server: ' +
-                        angular.toJson(error);
-                });
-        }
-
         /**
          * Send a PUT request to the API server to associate a product with
          * a test result.
index 5d48c7b..9fdd85f 100644 (file)
@@ -1 +1 @@
-{"testapiApiUrl": "http://localhost:8000/api/v1"}
+{"testapiApiUrl": "http://testresults.opnfv.org/test/api/v1"}
index 3ce76c9..929f543 100644 (file)
@@ -28,7 +28,7 @@
     function raiseAlert($uibModal) {
         return function(mode, title, text) {
             $uibModal.open({
-                templateUrl: '/shared/alerts/alertModal.html',
+                templateUrl: 'testapi-ui/shared/alerts/alertModal.html',
                 controller: 'RaiseAlertModalController as alert',
                 backdrop: true,
                 keyboard: true,
index 692e488..dad59d2 100644 (file)
@@ -8,8 +8,12 @@ dbname = test_results_collection
 
 [api]
 # Listening port
-url = http://localhost:8000/api/v1
+url = http://testresults.opnfv.org/test/api/v1
 port = 8000
+
+# Number of results for one page (integer value)
+#results_per_page = 20
+
 # With debug_on set to true, error traces will be shown in HTTP responses
 debug = True
 authenticate = False
@@ -18,7 +22,7 @@ authenticate = False
 base_url = http://localhost:8000
 
 [ui]
-url = http://localhost:8000
+url = http://testresults.opnfv.org/test
 
 [osid]
 
@@ -41,7 +45,7 @@ openid_ns = http://specs.openid.net/auth/2.0
 # Return endpoint in Refstack's API. Value indicating the endpoint
 # where the user should be returned to after signing in. Openstack Id
 # Idp only supports HTTPS address types. (string value)
-openid_return_to = /api/v1/auth/signin_return
+openid_return_to = v1/auth/signin_return
 
 # Claimed identifier. This value must be set to
 # "http://specs.openid.net/auth/2.0/identifier_select". or to user
index 46765ff..f73c0ab 100644 (file)
@@ -17,6 +17,7 @@ class Config(object):
     def __init__(self):
         self.file = self.CONFIG if self.CONFIG else self._default_config()
         self._parse()
+        self._parse_per_page()
         self.static_path = os.path.join(
             os.path.dirname(os.path.normpath(__file__)),
             os.pardir,
@@ -37,6 +38,10 @@ class Config(object):
         [setattr(self, '{}_{}'.format(section, k), self._parse_value(v))
          for k, v in config.items(section)]
 
+    def _parse_per_page(self):
+        if not hasattr(self, 'api_results_per_page'):
+            self.api_results_per_page = 20
+
     @staticmethod
     def _parse_value(value):
         try:
index 2fc31ca..42372e8 100644 (file)
@@ -104,17 +104,35 @@ class GenericApiHandler(web.RequestHandler):
         if query is None:
             query = {}
         data = []
+        sort = kwargs.get('sort')
+        page = kwargs.get('page')
+        last = kwargs.get('last')
+        per_page = kwargs.get('per_page')
+
         cursor = self._eval_db(self.table, 'find', query)
-        if 'sort' in kwargs:
-            cursor = cursor.sort(kwargs.get('sort'))
-        if 'last' in kwargs:
-            cursor = cursor.limit(kwargs.get('last'))
+        if sort:
+            cursor = cursor.sort(sort)
+        if last and last != 0:
+            cursor = cursor.limit(last)
+        if page:
+            records_count = yield cursor.count()
+            total_pages, remainder = divmod(records_count, per_page)
+            if remainder > 0:
+                total_pages += 1
+            cursor = cursor.skip((page - 1) * per_page).limit(per_page)
         while (yield cursor.fetch_next):
             data.append(self.format_data(cursor.next_object()))
         if res_op is None:
             res = {self.table: data}
         else:
             res = res_op(data, *args)
+        if page:
+            res.update({
+                'pagination': {
+                    'current_page': page,
+                    'total_pages': total_pages
+                }
+            })
         self.finish_request(res)
 
     @web.asynchronous
index 214706f..208af6d 100644 (file)
@@ -11,12 +11,15 @@ from datetime import timedelta
 
 from bson import objectid
 
+from opnfv_testapi.common import config
 from opnfv_testapi.common import message
 from opnfv_testapi.common import raises
 from opnfv_testapi.resources import handlers
 from opnfv_testapi.resources import result_models
 from opnfv_testapi.tornado_swagger import swagger
 
+CONF = config.Config()
+
 
 class GenericResultHandler(handlers.GenericApiHandler):
     def __init__(self, application, request, **kwargs):
@@ -35,6 +38,8 @@ class GenericResultHandler(handlers.GenericApiHandler):
 
     def set_query(self):
         query = dict()
+        date_range = dict()
+
         for k in self.request.query_arguments.keys():
             v = self.get_query_argument(k)
             if k == 'project' or k == 'pod' or k == 'case':
@@ -47,8 +52,14 @@ class GenericResultHandler(handlers.GenericApiHandler):
                     query['start_date'] = obj
             elif k == 'trust_indicator':
                 query[k + '.current'] = float(v)
-            elif k != 'last':
+            elif k == 'from':
+                date_range.update({'$gte': str(v)})
+            elif k == 'to':
+                date_range.update({'$lt': str(v)})
+            elif k != 'last' and k != 'page':
                 query[k] = v
+            if date_range:
+                query['start_date'] = date_range
         return query
 
 
@@ -64,9 +75,11 @@ class ResultsCLHandler(GenericResultHandler):
                  - case : case name
                  - pod : pod name
                  - version : platform version (Arno-R1, ...)
-                 - installer (fuel, ...)
+                 - installer : fuel/apex/compass/joid/daisy
                  - build_tag : Jenkins build tag name
-                 - period : x (x last days)
+                 - period : x last days, incompatible with from/to
+                 - from : starting time in 2016-01-01 or 2016-01-01 00:01:23
+                 - to : ending time in 2016-01-01 or 2016-01-01 00:01:23
                  - scenario : the test scenario (previously version)
                  - criteria : the global criteria status passed or failed
                  - trust_indicator : evaluate the stability of the test case
@@ -113,22 +126,40 @@ class ResultsCLHandler(GenericResultHandler):
             @type period: L{string}
             @in period: query
             @required period: False
+            @param from: i.e. 2016-01-01 or 2016-01-01 00:01:23
+            @type from: L{string}
+            @in from: query
+            @required from: False
+            @param to: i.e. 2016-01-01 or 2016-01-01 00:01:23
+            @type to: L{string}
+            @in to: query
+            @required to: False
             @param last: last records stored until now
             @type last: L{string}
             @in last: query
             @required last: False
+            @param page: which page to list
+            @type page: L{int}
+            @in page: query
+            @required page: False
             @param trust_indicator: must be float
             @type trust_indicator: L{float}
             @in trust_indicator: query
             @required trust_indicator: False
         """
+        limitations = {'sort': [('start_date', -1)]}
         last = self.get_query_argument('last', 0)
         if last is not None:
             last = self.get_int('last', last)
+            limitations.update({'last': last})
+
+        page = self.get_query_argument('page', 1)
+        if page is not None:
+            page = self.get_int('page', page)
+            limitations.update({'page': page,
+                                'per_page': CONF.api_results_per_page})
 
-        self._list(query=self.set_query(),
-                   sort=[('start_date', -1)],
-                   last=last)
+        self._list(query=self.set_query(), **limitations)
 
     @swagger.operation(nickname="createTestResult")
     def post(self):
index aea85a4..a2312de 100644 (file)
@@ -55,7 +55,7 @@ mappings = [
     (r"/api/v1/scenarios/([^/]+)", scenario_handlers.ScenarioGURHandler),
 
     # static path
-    (r'/(.*\.(css|png|gif|js|html|json|map))',
+    (r'/(.*\.(css|png|gif|js|html|json|map|woff2|woff|ttf))',
      tornado.web.StaticFileHandler,
      {'path': config.Config().static_path}),
 
index ef74a08..b2564a6 100644 (file)
@@ -20,18 +20,18 @@ def thread_execute(method, *args, **kwargs):
 class MemCursor(object):
     def __init__(self, collection):
         self.collection = collection
-        self.count = len(self.collection)
+        self.length = len(self.collection)
         self.sorted = []
 
     def _is_next_exist(self):
-        return self.count != 0
+        return self.length != 0
 
     @property
     def fetch_next(self):
         return thread_execute(self._is_next_exist)
 
     def next_object(self):
-        self.count -= 1
+        self.length -= 1
         return self.collection.pop()
 
     def sort(self, key_or_list):
@@ -48,10 +48,25 @@ class MemCursor(object):
 
     def limit(self, limit):
         if limit != 0 and limit < len(self.collection):
-            self.collection = self.collection[0:limit]
-            self.count = limit
+            self.collection = self.collection[0: limit]
+            self.length = limit
         return self
 
+    def skip(self, skip):
+        if skip < self.length and (skip > 0):
+            self.collection = self.collection[self.length - skip: -1]
+            self.length -= skip
+        elif skip >= self.length:
+            self.collection = []
+            self.length = 0
+        return self
+
+    def _count(self):
+        return self.length
+
+    def count(self):
+        return thread_execute(self._count)
+
 
 class MemDb(object):
 
index 7319084..6a9d94e 100644 (file)
@@ -32,6 +32,9 @@ class SigninHandler(base.BaseHandler):
 
 class SigninReturnHandler(base.BaseHandler):
     def get(self):
+        if self.get_query_argument(const.OPENID_MODE) == 'cancel':
+            self._auth_failure('Authentication canceled.')
+
         openid = self.get_query_argument(const.OPENID_CLAIMED_ID)
         user_info = {
             'openid': openid,
@@ -44,6 +47,12 @@ class SigninReturnHandler(base.BaseHandler):
             self.set_secure_cookie('openid', openid)
         self.redirect(url=CONF.ui_url)
 
+    def _auth_failure(self, message):
+        params = {'message': message}
+        url = parse.urljoin(CONF.ui_url,
+                            '/#/auth_failure?' + parse.urlencode(params))
+        self.redirect(url)
+
 
 class SignoutHandler(base.BaseHandler):
     def get(self):