Merge "Send Build Notifications to Jenkins Admins"
authorTrevor Bramwell <tbramwell@linuxfoundation.org>
Mon, 10 Jul 2017 17:27:21 +0000 (17:27 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Mon, 10 Jul 2017 17:27:21 +0000 (17:27 +0000)
105 files changed:
UPSTREAM [new file with mode: 0644]
jjb/apex/apex-deploy.sh
jjb/apex/apex-download-artifact.sh
jjb/apex/apex-upload-artifact.sh
jjb/apex/apex.yml
jjb/apex/apex.yml.j2
jjb/armband/armband-ci-jobs.yml
jjb/barometer/barometer-build.sh [new file with mode: 0644]
jjb/barometer/barometer-upload-artifact.sh [new file with mode: 0644]
jjb/barometer/barometer.yml
jjb/ci_gate_security/anteater-clone-all-repos.sh [new file with mode: 0755]
jjb/ci_gate_security/anteater-report-to-gerrit.sh [new file with mode: 0644]
jjb/ci_gate_security/anteater-security-audit-weekly.sh [new file with mode: 0644]
jjb/ci_gate_security/anteater-security-audit.sh [new file with mode: 0644]
jjb/ci_gate_security/opnfv-ci-gate-security.yml
jjb/compass4nfv/compass-build.sh
jjb/compass4nfv/compass-ci-jobs.yml
jjb/compass4nfv/compass-deploy.sh
jjb/compass4nfv/compass-dovetail-jobs.yml
jjb/compass4nfv/compass-download-artifact.sh
jjb/compass4nfv/compass-upload-artifact.sh
jjb/compass4nfv/compass-verify-jobs.yml
jjb/daisy4nfv/daisy-daily-jobs.yml
jjb/daisy4nfv/daisy-project-jobs.yml
jjb/doctor/doctor.yml
jjb/dovetail/dovetail-ci-jobs.yml
jjb/dovetail/dovetail-cleanup.sh
jjb/dovetail/dovetail-run.sh
jjb/fuel/fuel-daily-jobs.yml
jjb/fuel/fuel-deploy.sh
jjb/fuel/fuel-download-artifact.sh
jjb/functest/functest-daily-jobs.yml
jjb/functest/functest-loop.sh
jjb/functest/functest-project-jobs.yml
jjb/functest/functest-suite.sh
jjb/functest/set-functest-env.sh
jjb/global/installer-params.yml
jjb/global/releng-macros.yml
jjb/global/slave-params.yml
jjb/netready/netready.yml
jjb/releng/automate.yml [moved from jjb/releng/testapi-automate.yml with 77% similarity]
jjb/releng/docker-deploy.sh [moved from jjb/releng/testapi-docker-deploy.sh with 63% similarity]
jjb/releng/docker-update.sh [new file with mode: 0644]
jjb/releng/opnfv-docker.sh
jjb/releng/opnfv-docker.yml
jjb/releng/testapi-docker-update.sh [deleted file]
jjb/storperf/storperf.yml
jjb/xci/bifrost-periodic-jobs.yml
jjb/xci/bifrost-verify-jobs.yml
jjb/xci/bifrost-verify.sh
jjb/xci/osa-periodic-jobs.yml
jjb/yardstick/yardstick-daily-jobs.yml [moved from jjb/yardstick/yardstick-ci-jobs.yml with 99% similarity]
jjb/yardstick/yardstick-daily.sh
modules/requirements.txt
modules/setup.py
modules/test-requirements.txt
prototypes/xci/README.rst
prototypes/xci/config/pinned-versions
prototypes/xci/config/user-vars
prototypes/xci/file/ansible-role-requirements.yml
prototypes/xci/playbooks/provision-vm-nodes.yml
prototypes/xci/scripts/update-osa-version-files.sh [new file with mode: 0755]
prototypes/xci/var/opnfv.yml
prototypes/xci/xci-deploy.sh
utils/fetch_os_creds.sh
utils/jenkins-jnlp-connect.sh
utils/push-test-logs.sh
utils/test/reporting/api/api/handlers/testcases.py
utils/test/reporting/docker/reporting.sh
utils/test/reporting/docker/requirements.pip
utils/test/reporting/docker/supervisor.conf
utils/test/reporting/functest/reporting-status.py
utils/test/reporting/functest/reporting-tempest.py
utils/test/reporting/pages/angular.sh
utils/test/reporting/pages/app/images/overview.png
utils/test/reporting/pages/app/index.html
utils/test/reporting/pages/app/scripts/app.config.js [deleted file]
utils/test/reporting/pages/app/scripts/controllers/table.controller.js
utils/test/reporting/pages/app/scripts/controllers/testvisual.controller.js
utils/test/reporting/pages/app/scripts/factory/table.factory.js
utils/test/reporting/pages/app/styles/custome.css
utils/test/reporting/pages/app/views/commons/testCaseVisual.html
utils/test/reporting/pages/app/views/modal/testcasedetail.html
utils/test/reporting/pages/config.sh [new file with mode: 0755]
utils/test/reporting/run_test.sh [new file with mode: 0755]
utils/test/reporting/run_unit_tests.sh [deleted file]
utils/test/reporting/utils/reporting_utils.py
utils/test/testapi/3rd_party/static/testapi-ui/app.js
utils/test/testapi/3rd_party/static/testapi-ui/assets/img/OpenStack_Project_Refstack_mascot_90x90.png [deleted file]
utils/test/testapi/3rd_party/static/testapi-ui/assets/img/openstack-logo.png [deleted file]
utils/test/testapi/3rd_party/static/testapi-ui/assets/img/refstack-logo.png [deleted file]
utils/test/testapi/3rd_party/static/testapi-ui/assets/img/testapi-logo.png [new file with mode: 0644]
utils/test/testapi/3rd_party/static/testapi-ui/components/results/results.html
utils/test/testapi/3rd_party/static/testapi-ui/components/results/resultsController.js
utils/test/testapi/deployment/deploy.py
utils/test/testapi/deployment/docker-compose.yml.template
utils/test/testapi/docker/Dockerfile
utils/test/testapi/docker/prepare-env.sh
utils/test/testapi/etc/config.ini
utils/test/testapi/htmlize/htmlize.py
utils/test/testapi/opnfv_testapi/common/config.py
utils/test/testapi/opnfv_testapi/resources/handlers.py
utils/test/testapi/opnfv_testapi/resources/result_handlers.py
utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
utils/upload-artifact.sh [new file with mode: 0644]

diff --git a/UPSTREAM b/UPSTREAM
new file mode 100644 (file)
index 0000000..65e44f5
--- /dev/null
+++ b/UPSTREAM
@@ -0,0 +1,448 @@
+# Upstream contributions, bitergia will crawl this and extract the relevant information
+# system is one of Gerrit, Bugzilla, Launchpad (insert more)
+#
+# All the contributions listed in this file are merged commits. Contributions under
+# review, in progress and abandoned are not included.
+# The latest contribution included in this file is 453130 and it is dated 2017-05-23.
+# Contributions from Yolanda Robla Mota have not been included yet.
+# The gerrit query used to list contributions is
+#   status:merged AND (owner: "Markos Chandras" OR owner: "Fatih Degirmenci")
+---
+-
+  url: https://review.openstack.org/#/c/453130/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/466422/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/466421/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/466249/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/465927/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/465686/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/465685/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/464759/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/464180/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/464629/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/463359/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/463313/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/463301/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/463300/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/462488/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/450970/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/463299/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/462863/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/461754/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/462859/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/462443/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/461755/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/461018/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/461017/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/461050/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/458616/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/460617/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/458797/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/453128/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459984/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/460071/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459779/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459775/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459332/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459331/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459330/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459715/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459702/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459684/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459599/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/459461/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/458801/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/457709/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/458246/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/458420/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/458419/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/450634/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/457695/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/455290/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/455461/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/448800/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/453609/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/453798/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/453577/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/453574/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/449183/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/452079/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/452160/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/450384/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/450210/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/451848/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/451426/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/450239/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/450095/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/448625/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/447444/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/447435/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/441041/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/444197/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/439938/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/444033/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/442224/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/428345/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/440614/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/439946/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/441042/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/439940/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/433517/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/435433/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/437010/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/436948/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/436000/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/436066/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/436085/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/435994/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/434328/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/433532/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/427682/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/428704/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/430864/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/428716/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/427744/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/426844/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/424853/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/410639/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/407970/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/387441/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/398317/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/401991/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/400150/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/404315/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/389171/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/401164/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/400712/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/392002/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/388242/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/392003/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/392986/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/392004/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/389079/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/388158/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/388840/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/388780/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/388847/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/388748/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/381576/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/381575/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/378532/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/386002/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/381574/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/381541/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/376303/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/379835/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/376534/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/375350/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/371602/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/370258/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/370584/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/363458/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/366835/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/363896/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/353165/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/361652/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/358477/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/359922/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/357268/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/357718/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/356964/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/355485/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/355431/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/355668/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/202056/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/202180/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/192397/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/192720/
+  system: Gerrit
+-
+  url: https://review.openstack.org/#/c/163149/
+  system: Gerrit
index 74f67ce..3a2ca60 100755 (executable)
@@ -3,7 +3,7 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
-APEX_PKGS="common undercloud" # removed onos for danube
+APEX_PKGS="common undercloud onos"
 IPV6_FLAG=False
 
 # log info to console
index 206c627..52c3c67 100755 (executable)
@@ -3,7 +3,7 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
-APEX_PKGS="common undercloud" # removed onos for danube
+APEX_PKGS="common undercloud onos"
 
 # log info to console
 echo "Downloading the Apex artifact. This could take some time..."
index 25870bb..f53451d 100755 (executable)
@@ -126,13 +126,13 @@ elif [ "$ARTIFACT_TYPE" == 'rpm' ]; then
     RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
     RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
     VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
-    for pkg in common undercloud; do # removed onos for danube
+    for pkg in common undercloud onos; do
       RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
     done
     SRPM_INSTALL_PATH=$BUILD_DIRECTORY
     SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
     VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
-    for pkg in common undercloud; do # removed onos for danube
+    for pkg in common undercloud onos; do
       SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
     done
 
index 50502d7..a395cf2 100644 (file)
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   git-revision: true
-#        - multijob:
-#            name: functest-smoke
-#            condition: SUCCESSFUL
-#            projects:
-#              - name: 'functest-apex-virtual-suite-{stream}'
-#                current-parameters: false
-#                predefined-parameters: |
-#                  DEPLOY_SCENARIO={verify-scenario}
-#                  FUNCTEST_SUITE_NAME=healthcheck
-#                  GERRIT_BRANCH=$GERRIT_BRANCH
-#                  GERRIT_REFSPEC=$GERRIT_REFSPEC
-#                  GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-#                  GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-#                node-parameters: true
-#                kill-phase-on: FAILURE
-#                abort-all-job: true
-#                git-revision: false
+        - multijob:
+            name: functest-smoke
+            condition: SUCCESSFUL
+            projects:
+              - name: 'functest-apex-virtual-suite-{stream}'
+                current-parameters: false
+                predefined-parameters: |
+                  DEPLOY_SCENARIO={verify-scenario}
+                  FUNCTEST_SUITE_NAME=healthcheck
+                  GERRIT_BRANCH=$GERRIT_BRANCH
+                  GERRIT_REFSPEC=$GERRIT_REFSPEC
+                  GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                  GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                node-parameters: true
+                kill-phase-on: NEVER
+                abort-all-job: true
+                git-revision: false
 
 # Verify Scenario Gate
 - job-template:
                     GERRIT_REFSPEC=$GERRIT_REFSPEC
                     GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                     GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                    GERRIT_EVENT_COMMENT_TEXT=$GERRIT_EVENT_COMMENT_TEXT
                   node-parameters: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   git-revision: true
-#        - multijob:
-#            name: functest-smoke
-#            condition: SUCCESSFUL
-#            projects:
-#              - name: 'functest-apex-virtual-suite-{stream}'
-#                current-parameters: false
-#                predefined-parameters: |
-#                  DEPLOY_SCENARIO={verify-scenario}
-#                  FUNCTEST_SUITE_NAME=healthcheck
-#                  GERRIT_BRANCH=$GERRIT_BRANCH
-#                  GERRIT_REFSPEC=$GERRIT_REFSPEC
-#                  GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-#                  GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-#                node-parameters: true
-#                kill-phase-on: FAILURE
-#                abort-all-job: true
-#                git-revision: false
+        - multijob:
+            name: functest-smoke
+            condition: SUCCESSFUL
+            projects:
+              - name: 'functest-apex-virtual-suite-{stream}'
+                current-parameters: false
+                predefined-parameters: |
+                  DEPLOY_SCENARIO={verify-scenario}
+                  FUNCTEST_SUITE_NAME=healthcheck
+                  GERRIT_BRANCH=$GERRIT_BRANCH
+                  GERRIT_REFSPEC=$GERRIT_REFSPEC
+                  GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                  GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                node-parameters: true
+                kill-phase-on: NEVER
+                abort-all-job: true
+                git-revision: false
 
 - job-template:
     name: 'apex-runner-cperf-{stream}'
index 84a899f..752cf28 100644 (file)
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   git-revision: true
-#        - multijob:
-#            name: functest-smoke
-#            condition: SUCCESSFUL
-#            projects:
-#              - name: 'functest-apex-virtual-suite-{stream}'
-#                current-parameters: false
-#                predefined-parameters: |
-#                  DEPLOY_SCENARIO={verify-scenario}
-#                  FUNCTEST_SUITE_NAME=healthcheck
-#                  GERRIT_BRANCH=$GERRIT_BRANCH
-#                  GERRIT_REFSPEC=$GERRIT_REFSPEC
-#                  GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-#                  GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-#                node-parameters: true
-#                kill-phase-on: FAILURE
-#                abort-all-job: true
-#                git-revision: false
+        - multijob:
+            name: functest-smoke
+            condition: SUCCESSFUL
+            projects:
+              - name: 'functest-apex-virtual-suite-{stream}'
+                current-parameters: false
+                predefined-parameters: |
+                  DEPLOY_SCENARIO={verify-scenario}
+                  FUNCTEST_SUITE_NAME=healthcheck
+                  GERRIT_BRANCH=$GERRIT_BRANCH
+                  GERRIT_REFSPEC=$GERRIT_REFSPEC
+                  GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                  GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                node-parameters: true
+                kill-phase-on: NEVER
+                abort-all-job: true
+                git-revision: false
 
 # Verify Scenario Gate
 - job-template:
                     GERRIT_REFSPEC=$GERRIT_REFSPEC
                     GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                     GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                    GERRIT_EVENT_COMMENT_TEXT=$GERRIT_EVENT_COMMENT_TEXT
                   node-parameters: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   git-revision: true
-#        - multijob:
-#            name: functest-smoke
-#            condition: SUCCESSFUL
-#            projects:
-#              - name: 'functest-apex-virtual-suite-{stream}'
-#                current-parameters: false
-#                predefined-parameters: |
-#                  DEPLOY_SCENARIO={verify-scenario}
-#                  FUNCTEST_SUITE_NAME=healthcheck
-#                  GERRIT_BRANCH=$GERRIT_BRANCH
-#                  GERRIT_REFSPEC=$GERRIT_REFSPEC
-#                  GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-#                  GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-#                node-parameters: true
-#                kill-phase-on: FAILURE
-#                abort-all-job: true
-#                git-revision: false
+        - multijob:
+            name: functest-smoke
+            condition: SUCCESSFUL
+            projects:
+              - name: 'functest-apex-virtual-suite-{stream}'
+                current-parameters: false
+                predefined-parameters: |
+                  DEPLOY_SCENARIO={verify-scenario}
+                  FUNCTEST_SUITE_NAME=healthcheck
+                  GERRIT_BRANCH=$GERRIT_BRANCH
+                  GERRIT_REFSPEC=$GERRIT_REFSPEC
+                  GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                  GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                node-parameters: true
+                kill-phase-on: NEVER
+                abort-all-job: true
+                git-revision: false
 
 - job-template:
     name: 'apex-runner-cperf-{stream}'
index 1531ff1..55d8ff9 100644 (file)
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 1'
+        - timed: ''
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 2'
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 3'
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 4'
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 5'
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0,20 * * 6'
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0,20 * * 7'
+        - timed: ''
 
 #----------------------------------------------------------------------
 # Enea Armband CI Baremetal Triggers running against danube branch
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 4 * * 1,2,3,4,5'
+        - timed: '0 0,16 * * 2,4'
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 8 * * 1,2,3,4,5'
+        - timed: '0 0 * * 1,5,7'
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 12 * * 1,2,3,4,5'
+        - timed: '0 16 * * 1,5,7'
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 16 * * 1,2,3,4,5'
+        - timed: '0 8 * * 2,4,6'
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 20 * * 1,2,3,4,5'
+        - timed: '0 8 * * 1,3,5,7'
 - trigger:
     name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 4,8 * * 6,7'
+        - timed: '0 0 * * 3,6'
 - trigger:
     name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-danube-trigger'
     triggers:
-        - timed: '0 12,16 * * 6,7'
+        - timed: '0 16 * * 3,6'
 #---------------------------------------------------------------
 # Enea Armband CI Virtual Triggers running against master branch
 #---------------------------------------------------------------
diff --git a/jjb/barometer/barometer-build.sh b/jjb/barometer/barometer-build.sh
new file mode 100644 (file)
index 0000000..e40841b
--- /dev/null
@@ -0,0 +1,21 @@
+set -x
+
+OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
+OPNFV_ARTIFACT_URL="$GS_URL/$OPNFV_ARTIFACT_VERSION/"
+
+# log info to console
+echo "Starting the build of Barometer RPMs"
+echo "------------------------------------"
+echo
+
+cd ci
+./install_dependencies.sh
+./build_rpm.sh
+cd $WORKSPACE
+
+# save information regarding artifact into file
+(
+    echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
+    echo "OPNFV_ARTIFACT_URL=$OPNFV_ARTIFACT_URL"
+) > $WORKSPACE/opnfv.properties
+
diff --git a/jjb/barometer/barometer-upload-artifact.sh b/jjb/barometer/barometer-upload-artifact.sh
new file mode 100644 (file)
index 0000000..817cc57
--- /dev/null
@@ -0,0 +1,46 @@
+#!/bin/bash
+set -o nounset
+set -o pipefail
+
+RPM_WORKDIR=$WORKSPACE/rpmbuild
+RPM_DIR=$RPM_WORKDIR/RPMS/x86_64/
+cd $WORKSPACE/
+
+# source the opnfv.properties to get ARTIFACT_VERSION
+source $WORKSPACE/opnfv.properties
+
+# upload property files
+gsutil cp $WORKSPACE/opnfv.properties gs://$OPNFV_ARTIFACT_URL/opnfv.properties > gsutil.properties.log 2>&1
+gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
+
+echo "Uploading the barometer RPMs to artifacts.opnfv.org"
+echo "---------------------------------------------------"
+echo
+
+gsutil -m cp -r $RPM_DIR/* gs://$OPNFV_ARTIFACT_URL > $WORKSPACE/gsutil.log 2>&1
+
+# Check if the RPMs were pushed
+gsutil ls gs://$OPNFV_ARTIFACT_URL > /dev/null 2>&1
+if [[ $? -ne 0 ]]; then
+  echo "Problem while uploading barometer RPMs to gs://$OPNFV_ARTIFACT_URL!"
+  echo "Check log $WORKSPACE/gsutil.log on the appropriate build server"
+  exit 1
+fi
+
+gsutil -m setmeta \
+    -h "Cache-Control:private, max-age=0, no-transform" \
+    gs://$OPNFV_ARTIFACT_URL/*.rpm > /dev/null 2>&1
+
+gsutil -m setmeta \
+    -h "Content-Type:text/html" \
+    -h "Cache-Control:private, max-age=0, no-transform" \
+    gs://$GS_URL/latest.properties \
+    gs://$OPNFV_ARTIFACT_URL/opnfv.properties > /dev/null 2>&1
+
+echo
+echo "--------------------------------------------------------"
+echo "Done!"
+echo "Artifact is available at $OPNFV_ARTIFACT_URL"
+
+#cleanup the RPM repo from the build machine.
+rm -rf $RPM_WORKDIR
index 68b8a04..2d3e972 100644 (file)
         - shell: |
             pwd
             cd src
-            ./install_build_deps.sh
             make clobber
             make
 
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
+        - barometer-project-parameter:
+            gs-pathname: '{gs-pathname}'
         - 'opnfv-build-centos-defaults'
 
     scm:
          - timed: '@midnight'
 
     builders:
-        - shell: |
-            pwd
-            cd ci
-            ./install_dependencies.sh
-            ./build_rpm.sh
+        - shell:
+            !include-raw-escape: ./barometer-build.sh
+        - shell:
+            !include-raw-escape: ./barometer-upload-artifact.sh
+
+########################
+# parameter macros
+########################
+- parameter:
+    name: barometer-project-parameter
+    parameters:
+        - string:
+            name: GS_URL
+            default: '$GS_BASE{gs-pathname}'
+            description: "URL to Google Storage."
diff --git a/jjb/ci_gate_security/anteater-clone-all-repos.sh b/jjb/ci_gate_security/anteater-clone-all-repos.sh
new file mode 100755 (executable)
index 0000000..8a9e73d
--- /dev/null
@@ -0,0 +1,33 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+set -o errexit
+set -o pipefail
+set -o nounset
+export PATH=$PATH:/usr/local/bin/
+
+
+#WORKSPACE="$(pwd)"
+
+cd $WORKSPACE
+if [ ! -d "$WORKSPACE/allrepos" ]; then
+  mkdir $WORKSPACE/allrepos
+fi
+
+cd $WORKSPACE/allrepos
+
+declare -a PROJECT_LIST
+EXCLUDE_PROJECTS="All-Projects|All-Users|securedlab"
+
+PROJECT_LIST=($(ssh gerrit.opnfv.org -p 29418 gerrit ls-projects | egrep -v $EXCLUDE_PROJECTS))
+echo "PROJECT_LIST=(${PROJECT_LIST[*]})" > $WORKSPACE/opnfv-projects.sh
+
+for PROJECT in ${PROJECT_LIST[@]}; do
+  echo "> Cloning $PROJECT"
+  if [ ! -d "$PROJECT" ]; then
+    git clone "https://gerrit.opnfv.org/gerrit/$PROJECT.git"
+  else
+    pushd "$PROJECT" > /dev/null
+    git pull -f
+    popd > /dev/null
+  fi
+done
diff --git a/jjb/ci_gate_security/anteater-report-to-gerrit.sh b/jjb/ci_gate_security/anteater-report-to-gerrit.sh
new file mode 100644 (file)
index 0000000..fc3018f
--- /dev/null
@@ -0,0 +1,25 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+set -o pipefail
+export PATH=$PATH:/usr/local/bin/
+EXITSTATUS=0
+
+# This Log should always exist
+if [[ -e securityaudit.log ]] ; then
+    echo -e "\nposting security audit report to gerrit...\n"
+
+    #check if log has errors
+    if grep ERROR securityaudit.log; then
+        EXITSTATUS=1
+    fi
+    
+    cat securityaudit.log  | awk -F"ERROR - " '{print $2}' > shortlog
+    
+    ssh -p 29418 gerrit.opnfv.org \
+        "gerrit review -p $GERRIT_PROJECT \
+        -m \"$(cat shortlog)\" \
+        $GERRIT_PATCHSET_REVISION \
+        --notify NONE"
+    
+    exit $EXITSTATUS
+fi
diff --git a/jjb/ci_gate_security/anteater-security-audit-weekly.sh b/jjb/ci_gate_security/anteater-security-audit-weekly.sh
new file mode 100644 (file)
index 0000000..436a173
--- /dev/null
@@ -0,0 +1,37 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+
+echo "--------------------------------------------------------"
+vols="-v $WORKSPACE/allrepos/:/home/opnfv/anteater/allrepos/"
+echo "Pulling releng-anteater docker image"
+echo "--------------------------------------------------------"
+docker pull opnfv/releng-anteater
+echo "--------------------------------------------------------"
+cmd="docker run -id $vols opnfv/releng-anteater /bin/bash"
+echo "Running docker command $cmd"
+container_id=$($cmd)
+echo "Container ID is $container_id"
+source $WORKSPACE/opnfv-projects.sh
+for project in "${PROJECT_LIST[@]}"
+
+do
+  cmd="anteater --project testproj --path /home/opnfv/anteater/allrepos/$project"
+  echo "Executing command inside container"
+  echo "$cmd"
+  echo "--------------------------------------------------------"
+  docker exec $container_id $cmd > $WORKSPACE/"$project".securityaudit.log 2>&1
+done
+
+exit_code=$?
+echo "--------------------------------------------------------"
+echo "Stopping docker container with ID $container_id"
+docker stop $container_id
+
+
+#gsutil cp $WORKSPACE/securityaudit.log \
+#    gs://$GS_URL/$PROJECT-securityaudit-weekly.log 2>&1
+#
+#gsutil -m setmeta \
+#    -h "Content-Type:text/html" \
+#    -h "Cache-Control:private, max-age=0, no-transform" \
+#    gs://$GS_URL/$PROJECT-securityaudit-weekly.log > /dev/null 2>&1
diff --git a/jjb/ci_gate_security/anteater-security-audit.sh b/jjb/ci_gate_security/anteater-security-audit.sh
new file mode 100644 (file)
index 0000000..9bd3cc3
--- /dev/null
@@ -0,0 +1,28 @@
+#!/bin/bash
+cd $WORKSPACE
+echo "Generating patchset file to list changed files"
+git diff HEAD^1 --name-only | sed "s#^#/home/opnfv/anteater/$PROJECT/#" > $WORKSPACE/patchset
+echo "Changed files are"
+echo "--------------------------------------------------------"
+cat $WORKSPACE/patchset
+echo "--------------------------------------------------------"
+
+vols="-v $WORKSPACE:/home/opnfv/anteater/$PROJECT"
+envs="-e PROJECT=$PROJECT"
+
+echo "Pulling releng-anteater docker image"
+echo "--------------------------------------------------------"
+docker pull opnfv/releng-anteater
+echo "--------------------------------------------------------"
+
+cmd="docker run -i $envs $vols --rm opnfv/releng-anteater \
+/home/opnfv/venv/bin/anteater --project $PROJECT --patchset /home/opnfv/anteater/$PROJECT/patchset"
+echo "Running docker container"
+echo "$cmd"
+$cmd > $WORKSPACE/securityaudit.log 2>&1
+exit_code=$?
+echo "--------------------------------------------------------"
+echo "Docker container exited with code: $exit_code"
+echo "--------------------------------------------------------"
+cat securityaudit.log
+exit 0
index 732df89..e2f6ceb 100644 (file)
@@ -1,5 +1,6 @@
+# SPDX-license-identifier: Apache-2.0
 ########################
-# Job configuration for opnfv-lint
+# Job configuration for opnfv-anteater (security audit)
 ########################
 - project:
 
@@ -9,6 +10,7 @@
 
     jobs:
         - 'opnfv-security-audit-verify-{stream}'
+        - 'opnfv-security-audit-weekly-{stream}'
 
     stream:
         - master:
 ########################
 # job templates
 ########################
+- job-template:
+    name: 'opnfv-security-audit-weekly-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'ericsson-build3'
+            description: 'Slave label on Jenkins'
+        - project-parameter:
+            project: releng
+            branch: '{branch}'
+
+    triggers:
+        - timed: '@weekly'
+
+    builders:
+        - anteater-security-audit-weekly
+
 - job-template:
     name: 'opnfv-security-audit-verify-{stream}'
 
     disabled: '{obj:disabled}'
 
     parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'ericsson-build3'
+            description: 'Slave label on Jenkins'
         - project-parameter:
             project: $GERRIT_PROJECT
             branch: '{branch}'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
 
     scm:
         - git-scm-gerrit
                     comment-contains-value: 'reverify'
             projects:
               - project-compare-type: 'REG_EXP'
-                project-pattern: 'sandbox'
+                project-pattern: 'apex|armband|bamboo|barometer|bottlenecks|calipso|compass4nfv|conductor|cooper|functest|octopus|pharos|releng|sandbox'
                 branches:
                   - branch-compare-type: 'ANT'
                     branch-pattern: '**/{branch}'
                 file-paths:
                   - compare-type: ANT
-                    pattern: '**/*.py'
-          skip-vote:
-            successful: true
-            failed: true
-            unstable: true
-            notbuilt: true
+                    pattern: '**'
+            skip-vote:
+                successful: true
+                failed: true
+                unstable: true
+                notbuilt: true
 
     builders:
-        - security-audit-python-code
+        - anteater-security-audit
         - report-security-audit-result-to-gerrit
 ########################
 # builder macros
 ########################
 - builder:
-    name: security-audit-python-code
+    name: anteater-security-audit
     builders:
-        - shell: |
-            #!/bin/bash
-            set -o errexit
-            set -o pipefail
-            set -o xtrace
-            export PATH=$PATH:/usr/local/bin/
-
-            # this is where the security/license audit script will be executed
-            echo "Hello World!"
+        - shell:
+            !include-raw: ./anteater-security-audit.sh
+
 - builder:
     name: report-security-audit-result-to-gerrit
     builders:
-        - shell: |
-            #!/bin/bash
-            set -o errexit
-            set -o pipefail
-            set -o xtrace
-            export PATH=$PATH:/usr/local/bin/
-
-            # If no violations were found, no lint log will exist.
-            if [[ -e securityaudit.log ]] ; then
-                echo -e "\nposting security audit report to gerrit...\n"
-
-                cat securityaudit.log
-                echo
-
-                ssh -p 29418 gerrit.opnfv.org \
-                    "gerrit review -p $GERRIT_PROJECT \
-                     -m \"$(cat securityaudit.log)\" \
-                     $GERRIT_PATCHSET_REVISION \
-                     --notify NONE"
-
-                exit 1
-            fi
+        - shell:
+            !include-raw: ./anteater-report-to-gerrit.sh
+
+- builder:
+    name: anteater-security-audit-weekly
+    builders:
+        - shell:
+            !include-raw:
+                - ./anteater-clone-all-repos.sh
+                - ./anteater-security-audit-weekly.sh
+
index 093debb..673a9f1 100644 (file)
@@ -24,7 +24,16 @@ then
 fi
 
 cd $WORKSPACE/
-./build.sh  --iso-dir $BUILD_DIRECTORY/ --iso-name compass.iso -c $CACHE_DIRECTORY
+
+if [[ "$BRANCH" == 'stable/danube' ]]; then
+    ./build.sh  --iso-dir $BUILD_DIRECTORY/ --iso-name compass.iso -c $CACHE_DIRECTORY
+    OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/compass.iso | cut -d' ' -f1)
+    OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso
+else
+    ./build.sh --tar-dir $BUILD_DIRECTORY/ --tar-name compass.tar.gz -c $CACHE_DIRECTORY
+    OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/compass.tar.gz | cut -d' ' -f1)
+    OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.tar.gz
+fi
 
 # list the build artifacts
 ls -al $BUILD_DIRECTORY
@@ -34,8 +43,8 @@ ls -al $BUILD_DIRECTORY
     echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
     echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
     echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
-    echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-    echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/compass.iso | cut -d' ' -f1)"
+    echo "OPNFV_ARTIFACT_URL=$OPNFV_ARTIFACT_URL"
+    echo "OPNFV_ARTIFACT_SHA512SUM=$OPNFV_ARTIFACT_SHA512SUM"
     echo "OPNFV_BUILD_URL=$BUILD_URL"
 ) > $BUILD_DIRECTORY/opnfv.properties
 echo
index e1e760d..ddd90b6 100644 (file)
         branch: '{stream}'
         gs-pathname: ''
         disabled: false
+        openstack-version: ocata
     danube: &danube
         stream: danube
         branch: 'stable/{stream}'
         gs-pathname: '/{stream}'
         disabled: false
+        openstack-version: newton
 #--------------------------------
 # POD, INSTALLER, AND BRANCH MAPPING
 #--------------------------------
 #--------------------------------
     pod:
         - baremetal:
-            slave-label: compass-baremetal
+            slave-label: compass-baremetal-master
             os-version: 'xenial'
             <<: *master
         - virtual:
-            slave-label: compass-virtual
+            slave-label: compass-virtual-master
             os-version: 'xenial'
             <<: *master
         - baremetal:
-            slave-label: compass-baremetal
+            slave-label: compass-baremetal-branch
             os-version: 'xenial'
             <<: *danube
         - virtual:
-            slave-label: compass-virtual
+            slave-label: compass-virtual-branch
             os-version: 'xenial'
             <<: *danube
 #--------------------------------
     wrappers:
         - build-name:
             name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+        - fix-workspace-permissions
 
     parameters:
         - project-parameter:
               predefined-parameters: |
                 DEPLOY_SCENARIO={scenario}
                 COMPASS_OS_VERSION={os-version}
+                COMPASS_OPENSTACK_VERSION={openstack-version}
               same-node: true
               block: true
         - trigger-builds:
                 unstable-threshold: 'FAILURE'
         # dovetail only master by now, not sync with A/B/C branches
         # here the stream means the SUT stream, dovetail stream is defined in its own job
-        # only run on os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha scenario
+        # only run on os-(nosdn|odl_l2|onos|odl_l3)-nofeature-ha scenario
+        # run against SUT master branch, dovetail docker image with latest tag
+        # run against SUT danube branch, dovetail docker image with latest tag(odd days)and cvp.X.X.X tag(even days)
         - conditional-step:
-            condition-kind: regex-match
-            regex: os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha
-            label: '{scenario}'
+            condition-kind: and
+            condition-operands:
+                - condition-kind: regex-match
+                  regex: danube
+                  label: '{stream}'
+                - condition-kind: regex-match
+                  regex: os-(nosdn|odl_l2|onos|odl_l3)-nofeature-ha
+                  label: '{scenario}'
+                - condition-kind: day-of-week
+                  day-selector: select-days
+                  days:
+                      MON: true
+                      WED: true
+                      FRI: true
+                      SUN: true
+                  use-build-time: true
+            steps:
+                - trigger-builds:
+                    - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
+                      current-parameters: false
+                      predefined-parameters: |
+                        DOCKER_TAG=latest
+                        DEPLOY_SCENARIO={scenario}
+                      block: true
+                      same-node: true
+                      block-thresholds:
+                        build-step-failure-threshold: 'never'
+                        failure-threshold: 'never'
+                        unstable-threshold: 'FAILURE'
+        - conditional-step:
+            condition-kind: and
+            condition-operands:
+                - condition-kind: regex-match
+                  regex: danube
+                  label: '{stream}'
+                - condition-kind: regex-match
+                  regex: os-(nosdn|odl_l2|onos|odl_l3)-nofeature-ha
+                  label: '{scenario}'
+                - condition-kind: day-of-week
+                  day-selector: select-days
+                  days:
+                      TUES: true
+                      THURS: true
+                      SAT: true
+                  use-build-time: true
             steps:
                 - trigger-builds:
                     - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
                         build-step-failure-threshold: 'never'
                         failure-threshold: 'never'
                         unstable-threshold: 'FAILURE'
+        - conditional-step:
+            condition-kind: and
+            condition-operands:
+                - condition-kind: regex-match
+                  regex: os-(nosdn|odl_l2|onos|odl_l3)-nofeature-ha
+                  label: '{scenario}'
+                - condition-kind: regex-match
+                  regex: master
+                  label: '{stream}'
+            steps:
+                - trigger-builds:
+                    - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
 
 - job-template:
     name: 'compass-deploy-{pod}-daily-{stream}'
         - build-name:
             name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
         - timeout:
-            timeout: 120
+            timeout: 240
             abort: true
+        - fix-workspace-permissions
 
     parameters:
         - project-parameter:
     scm:
         - git-scm
 
-    wrappers:
-        - build-name:
-            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
-
     builders:
         - description-setter:
             description: "POD: $NODE_NAME"
             name: GS_URL
             default: '$GS_BASE{gs-pathname}'
             description: "URL to Google Storage."
-        - choice:
-            name: COMPASS_OPENSTACK_VERSION
-            choices:
-                - 'newton'
 
 ########################
 # trigger macros
 - trigger:
     name: 'compass-os-onos-nofeature-ha-baremetal-centos-master-trigger'
     triggers:
-        - timed: '0 7 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-ocl-nofeature-ha-baremetal-centos-master-trigger'
     triggers:
-        - timed: '0 11 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-onos-sfc-ha-baremetal-centos-master-trigger'
     triggers:
-        - timed: '0 3 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-baremetal-centos-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-nosdn-openo-ha-baremetal-master-trigger'
     triggers:
-        - timed: '0 3 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-odl_l2-nofeature-ha-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-onos-nofeature-ha-baremetal-master-trigger'
     triggers:
-        - timed: '0 14 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-ocl-nofeature-ha-baremetal-master-trigger'
     triggers:
-        - timed: '0 10 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-onos-sfc-ha-baremetal-master-trigger'
     triggers:
-        - timed: '0 6 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-ocl-nofeature-ha-baremetal-danube-trigger'
     triggers:
-        - timed: '0 5 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-onos-sfc-ha-baremetal-danube-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 5 * * *'
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-baremetal-danube-trigger'
     triggers:
 - trigger:
     name: 'compass-os-nosdn-openo-ha-virtual-master-trigger'
     triggers:
-        - timed: '0 22 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-odl_l2-nofeature-ha-virtual-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-onos-nofeature-ha-virtual-master-trigger'
     triggers:
-        - timed: '0 18 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-ocl-nofeature-ha-virtual-master-trigger'
     triggers:
-        - timed: '0 16 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-onos-sfc-ha-virtual-master-trigger'
     triggers:
-        - timed: '0 15 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-virtual-master-trigger'
     triggers:
-        - timed: '0 14 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-nosdn-kvm-ha-virtual-master-trigger'
     triggers:
index 534e17e..2668ccd 100644 (file)
@@ -23,7 +23,11 @@ fi
 echo 1 > /proc/sys/vm/drop_caches
 
 export CONFDIR=$WORKSPACE/deploy/conf
-export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
+if [[ "$BRANCH" = 'stable/danube' ]]; then
+    export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
+else
+    export ISO_URL=file://$BUILD_DIRECTORY/compass.tar.gz
+fi
 
 cd $WORKSPACE
 
index c321655..67d1e4e 100644 (file)
@@ -55,6 +55,7 @@
     wrappers:
         - build-name:
             name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+        - fix-workspace-permissions
 
     triggers:
         - '{auto-trigger-name}'
         - build-name:
             name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
         - timeout:
-            timeout: 120
+            timeout: 240
             abort: true
+        - fix-workspace-permissions
 
     parameters:
         - project-parameter:
     scm:
         - git-scm
 
-    wrappers:
-        - build-name:
-            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
-
     builders:
         - description-setter:
             description: "POD: $NODE_NAME"
index 5a63c4a..f891564 100644 (file)
@@ -18,12 +18,18 @@ curl -s -o $BUILD_DIRECTORY/latest.properties http://$GS_URL/latest.properties
 # source the file so we get OPNFV vars
 source $BUILD_DIRECTORY/latest.properties
 
-# download the file
-curl -s -o $BUILD_DIRECTORY/compass.iso http://$OPNFV_ARTIFACT_URL > gsutil.iso.log 2>&1
-
-# list the file
-ls -al $BUILD_DIRECTORY/compass.iso
+if [[ "$BRANCH" == 'stable/danube' ]]; then
+    # download the file
+    curl -s -o $BUILD_DIRECTORY/compass.iso http://$OPNFV_ARTIFACT_URL > gsutil.iso.log 2>&1
+    # list the file
+    ls -al $BUILD_DIRECTORY/compass.iso
+else
+    # download the file
+    curl -s -o $BUILD_DIRECTORY/compass.tar.gz http://$OPNFV_ARTIFACT_URL > gsutil.tar.gz.log 2>&1
+    # list the file
+    ls -al $BUILD_DIRECTORY/compass.tar.gz
+fi
 
 echo
 echo "--------------------------------------------------------"
-echo "Done!"
\ No newline at end of file
+echo "Done!"
index 73b7f07..87a9334 100644 (file)
@@ -7,6 +7,11 @@ echo "Uploading the $INSTALLER_TYPE artifact. This could take some time..."
 echo "--------------------------------------------------------"
 echo
 
+if [[ "$BRANCH" == 'stable/danube' ]]; then
+    FILETYPE='iso'
+else
+    FILETYPE='tar.gz'
+fi
 # source the opnfv.properties to get ARTIFACT_VERSION
 source $BUILD_DIRECTORY/opnfv.properties
 
@@ -23,16 +28,16 @@ signiso () {
 time gpg2 -vvv --batch --yes --no-tty \
   --default-key opnfv-helpdesk@rt.linuxfoundation.org  \
   --passphrase besteffort \
-  --detach-sig $BUILD_DIRECTORY/compass.iso
+  --detach-sig $BUILD_DIRECTORY/compass.$FILETYPE
 
-gsutil cp $BUILD_DIRECTORY/compass.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig
+gsutil cp $BUILD_DIRECTORY/compass.$FILETYPE.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE.sig
 echo "ISO signature Upload Complete!"
 }
 
 signiso
 
 # upload artifact and additional files to google storage
-gsutil cp $BUILD_DIRECTORY/compass.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
+gsutil cp $BUILD_DIRECTORY/compass.$FILETYPE gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE > gsutil.$FILETYPE.log 2>&1
 gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
 gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
 
@@ -44,19 +49,19 @@ gsutil -m setmeta \
 
 gsutil -m setmeta \
     -h "Cache-Control:private, max-age=0, no-transform" \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
+    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE > /dev/null 2>&1
 
 # disabled errexit due to gsutil setmeta complaints
 #   BadRequestException: 400 Invalid argument
 # check if we uploaded the file successfully to see if things are fine
-gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
+gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE > /dev/null 2>&1
 if [[ $? -ne 0 ]]; then
     echo "Problem while uploading artifact!"
-    echo "Check log $WORKSPACE/gsutil.iso.log on the machine where this build is done."
+    echo "Check log $WORKSPACE/gsutil.$FILETYPE.log on the machine where this build is done."
     exit 1
 fi
 
 echo
 echo "--------------------------------------------------------"
 echo "Done!"
-echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
+echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE"
index 56f54d8..f016b91 100644 (file)
             gs-pathname: ''
             ppa-pathname: '/{stream}'
             disabled: false
+            openstack-version: 'ocata'
+            branch-type: 'master'
         - danube:
             branch: 'stable/{stream}'
             gs-pathname: '/{stream}'
             ppa-pathname: '/{stream}'
             disabled: false
+            openstack-version: 'newton'
+            branch-type: 'branch'
 
     distro:
         - 'xenial':
@@ -25,7 +29,7 @@
             os-version: 'xenial'
             openstack-os-version: ''
         - 'centos7':
-            disabled: false
+            disabled: true
             os-version: 'centos7'
             openstack-os-version: ''
 #####################################
                 - 'compass-os-.*?-virtual-daily-.*?'
             block-level: 'NODE'
 
-    scm:
-        - git-scm-gerrit
-
     wrappers:
         - ssh-agent-wrapper
         - timeout:
-            timeout: 120
+            timeout: 240
             fail: true
+        - fix-workspace-permissions
+
+    scm:
+        - git-scm-gerrit
 
     triggers:
         - gerrit:
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
-        - 'compass-virtual-defaults'
+        - 'compass-virtual-{branch-type}-defaults'
         - '{installer}-defaults'
         - 'compass-verify-defaults':
             installer: '{installer}'
                   current-parameters: true
                   predefined-parameters: |
                     COMPASS_OS_VERSION={os-version}
+                    COMPASS_OPENSTACK_VERSION={openstack-version}
                   node-parameters: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
             condition: SUCCESSFUL
             projects:
                 - name: 'functest-compass-virtual-suite-{stream}'
-                  current-parameters: true
+                  current-parameters: false
                   predefined-parameters:
                     FUNCTEST_SUITE_NAME=healthcheck
                   node-parameters: true
                   kill-phase-on: NEVER
                   abort-all-job: true
                 - name: 'functest-compass-virtual-suite-{stream}'
-                  current-parameters: true
+                  current-parameters: false
                   predefined-parameters:
                     FUNCTEST_SUITE_NAME=vping_ssh
                   node-parameters: true
                 - 'compass-os-.*?-virtual-daily-.*?'
             block-level: 'NODE'
 
-    scm:
-        - git-scm-gerrit
-
     wrappers:
         - ssh-agent-wrapper
         - timeout:
-            timeout: 120
+            timeout: 240
             fail: true
+        - fix-workspace-permissions
+
+    scm:
+        - git-scm-gerrit
 
     triggers:
         - gerrit:
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
-        - 'compass-virtual-defaults'
+        - 'compass-virtual-{branch-type}-defaults'
         - '{installer}-defaults'
         - 'compass-verify-defaults':
             installer: '{installer}'
                 - 'functest-compass-virtual.*'
             block-level: 'NODE'
 
-    scm:
-        - git-scm-gerrit
-
     wrappers:
         - ssh-agent-wrapper
         - timeout:
-            timeout: 120
+            timeout: 240
             fail: true
+        - fix-workspace-permissions
+
+    scm:
+        - git-scm-gerrit
 
     builders:
         - description-setter:
         - string:
             name: PPA_CACHE
             default: "$WORKSPACE/work/repo/"
-        - choice:
-            name: COMPASS_OPENSTACK_VERSION
-            choices:
-                - 'newton'
         - choice:
             name: COMPASS_OS_VERSION
             choices:
index aac76ba..592e54d 100644 (file)
         # NOHA scenarios
         - 'os-nosdn-nofeature-noha':
             auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
+        # ODL_L3 scenarios
+        - 'os-odl_l3-nofeature-noha':
+            auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
+        # ODL_L2 scenarios
+        - 'os-odl_l2-nofeature-noha':
+            auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
 
     jobs:
         - '{project}-{scenario}-{pod}-daily-{stream}'
             installer: '{installer}'
         - string:
             name: DEPLOY_SCENARIO
-            default: 'os-nosdn-nofeature-ha'
+            default: 'os-nosdn-nofeature-noha'
         - 'daisy-project-parameter':
             gs-pathname: '{gs-pathname}'
         - string:
 # NOHA Scenarios
 - trigger:
     name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
+    triggers:
+        - timed: 'H 12 * * *'
+# ODL_L3 Scenarios
+- trigger:
+    name: 'daisy-os-odl_l3-nofeature-noha-baremetal-daily-master-trigger'
+    triggers:
+        - timed: 'H 16 * * *'
+# ODL_L2 Scenarios
+- trigger:
+    name: 'daisy-os-odl_l2-nofeature-noha-baremetal-daily-master-trigger'
     triggers:
         - timed: ''
 #-----------------------------------------------
 - trigger:
     name: 'daisy-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: 'H 8,22 * * *'
+        - timed: 'H 12 * * *'
+# ODL_L3 Scenarios
+- trigger:
+    name: 'daisy-os-odl_l3-nofeature-noha-virtual-daily-master-trigger'
+    triggers:
+        - timed: 'H 16 * * *'
+# ODL_L3 Scenarios
+- trigger:
+    name: 'daisy-os-odl_l2-nofeature-noha-virtual-daily-master-trigger'
+    triggers:
+        - timed: ''
 
index 0a9d43d..57e44e3 100644 (file)
             branch: '{stream}'
             gs-pathname: ''
             disabled: false
-        - danube:
-            branch: 'stable/{stream}'
-            gs-pathname: '/{stream}'
-            disabled: false
 
     phase:
         - 'build':
@@ -64,7 +60,7 @@
         - git-scm
 
     triggers:
-        - timed: '0 H/8 * * *'
+        - timed: '0 8 * * *'
 
     parameters:
         - project-parameter:
             description: 'Git URL to use on this Jenkins Slave'
         - string:
             name: DEPLOY_SCENARIO
-            default: 'os-nosdn-nofeature-ha'
+            default: 'os-nosdn-nofeature-noha'
         - '{installer}-project-parameter':
             gs-pathname: '{gs-pathname}'
 
index 807d436..c5454c7 100644 (file)
 
     installer:
         - apex:
-            slave-label: 'ool-virtual1'
-            pod: 'ool-virtual1'
+            slave-label: 'doctor-apex-verify'
         - fuel:
-            slave-label: 'ool-virtual2'
-            pod: 'ool-virtual2'
+            slave-label: 'doctor-fuel-verify'
         #- joid:
         #    slave-label: 'ool-virtual3'
         #    pod: 'ool-virtual3'
         - verify:
             profiler: 'none'
             auto-trigger-name: 'doctor-verify'
+            is-python: false
         - profiling:
             profiler: 'poc'
             auto-trigger-name: 'experimental'
+            is-python: false
+        - python-verify:
+            profiler: 'none'
+            auto-trigger-name: 'doctor-verify'
+            is-python: true
+
+    pod:
+        - arm-pod2:
+            slave-label: '{pod}'
+        - arm-pod3:
+            slave-label: '{pod}'
 
     jobs:
         - 'doctor-verify-{stream}'
+        - 'doctor-{task}-{installer}-{inspector}-{pod}-{stream}'
         - 'doctor-{task}-{installer}-{inspector}-{stream}'
 
 - job-template:
     builders:
         - shell: "[ -e tests/run.sh ] && bash -n ./tests/run.sh"
 
+- job-template:
+    name: 'doctor-{task}-{installer}-{inspector}-{pod}-{stream}'
+
+    node: '{slave-label}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+        - git-scm-gerrit
+
+
+    triggers:
+        - '{auto-trigger-name}':
+            project: '{project}'
+            branch: '{branch}'
+
+    builders:
+        - shell: "[ -e tests/run.sh ] && bash -n ./tests/run.sh"
+
+
 - job-template:
     name: 'doctor-{task}-{installer}-{inspector}-{stream}'
 
             default: 'doctor-notification'
         - string:
             name: TESTCASE_OPTIONS
-            default: '-e INSPECTOR_TYPE={inspector} -e PROFILER_TYPE={profiler} -v $WORKSPACE:/home/opnfv/repos/doctor'
+            default: '-e INSPECTOR_TYPE={inspector} -e PROFILER_TYPE={profiler} -e PYTHON_ENABLE={is-python} -v $WORKSPACE:/home/opnfv/repos/doctor'
             description: 'Addtional parameters specific to test case(s)'
         # functest-parameter
         - string:
index 682948d..43978f6 100644 (file)
@@ -25,7 +25,7 @@
         branch: 'stable/{stream}'
         dovetail-branch: master
         gs-pathname: '/{stream}'
-        docker-tag: 'latest'
+        docker-tag: 'cvp.0.2.0'
 
 #-----------------------------------
 # POD, PLATFORM, AND BRANCH MAPPING
             name: DOVETAIL_REPO_DIR
             default: "/home/opnfv/dovetail"
             description: "Directory where the dovetail repository is cloned"
+        - string:
+            name: SUT_BRANCH
+            default: '{branch}'
+            description: "SUT branch"
 
     scm:
         - git-scm
index 0ee789a..3ae0cbc 100755 (executable)
@@ -1,4 +1,11 @@
 #!/bin/bash
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
 
 [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
index dce7e58..bf96fd4 100755 (executable)
@@ -1,4 +1,11 @@
 #!/bin/bash
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
 
 #the noun INSTALLER is used in community, here is just the example to run.
 #multi-platforms are supported.
@@ -7,14 +14,12 @@ set -e
 [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
 DOVETAIL_HOME=${WORKSPACE}/cvp
-if [ -d ${DOVETAIL_HOME} ]; then
-    sudo rm -rf ${DOVETAIL_HOME}/*
-else
-    sudo mkdir -p ${DOVETAIL_HOME}
-fi
+[ -d ${DOVETAIL_HOME} ] && sudo rm -rf ${DOVETAIL_HOME}
+
+mkdir -p ${DOVETAIL_HOME}
 
 DOVETAIL_CONFIG=${DOVETAIL_HOME}/pre_config
-sudo mkdir -p ${DOVETAIL_CONFIG}
+mkdir -p ${DOVETAIL_CONFIG}
 
 sshkey=""
 # The path of openrc.sh is defined in fetch_os_creds.sh
@@ -47,7 +52,12 @@ releng_repo=${WORKSPACE}/releng
 git clone https://gerrit.opnfv.org/gerrit/releng ${releng_repo} >/dev/null
 
 if [[ ${INSTALLER_TYPE} != 'joid' ]]; then
-    sudo /bin/bash ${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} >${redirect}
+    echo "SUT branch is $SUT_BRANCH"
+    echo "dovetail branch is $BRANCH"
+    BRANCH_BACKUP=$BRANCH
+    export BRANCH=$SUT_BRANCH
+    ${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} >${redirect}
+    export BRANCH=$BRANCH_BACKUP
 fi
 
 if [[ -f $OPENRC ]]; then
@@ -59,6 +69,8 @@ else
     exit 1
 fi
 
+set +e
+
 sudo pip install virtualenv
 
 cd ${releng_repo}/modules
@@ -83,6 +95,8 @@ ${cmd}
 
 deactivate
 
+set -e
+
 cd ${WORKSPACE}
 
 if [ -f ${DOVETAIL_CONFIG}/pod.yaml ]; then
@@ -102,7 +116,8 @@ if [ "$INSTALLER_TYPE" == "fuel" ]; then
 fi
 
 # sdnvpn test case needs to download this image first before running
-sudo wget -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_CONFIG}
+echo "Download image ubuntu-16.04-server-cloudimg-amd64-disk1.img ..."
+wget -q -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_CONFIG}
 
 opts="--privileged=true -id"
 
index dd0590c..6867708 100644 (file)
             name: GS_URL
             default: artifacts.opnfv.org/$PROJECT{gs-pathname}
             description: "URL to Google Storage."
+        - string:
+            name: SSH_KEY
+            default: "/tmp/mcp.rsa"
+            description: "Path to private SSH key to access environment nodes. For MCP deployments only."
 ########################
 # trigger macros
 ########################
index 4d48ee5..2fb5c71 100755 (executable)
@@ -12,11 +12,13 @@ set -o pipefail
 
 export TERM="vt220"
 
-# source the file so we get OPNFV vars
-source latest.properties
+if [[ "$BRANCH" != 'master' ]]; then
+    # source the file so we get OPNFV vars
+    source latest.properties
 
-# echo the info about artifact that is used during the deployment
-echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+    # echo the info about artifact that is used during the deployment
+    echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+fi
 
 if [[ "$JOB_NAME" =~ "merge" ]]; then
     # set simplest scenario for virtual deploys to run for merges
@@ -75,7 +77,7 @@ echo "--------------------------------------------------------"
 echo "Scenario: $DEPLOY_SCENARIO"
 echo "Lab: $LAB_NAME"
 echo "POD: $POD_NAME"
-echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
+[[ "$BRANCH" != 'master' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
 echo
 echo "Starting the deployment using $INSTALLER_TYPE. This could take some time..."
 echo "--------------------------------------------------------"
index 8cc552e..c3b8253 100755 (executable)
@@ -10,6 +10,9 @@
 set -o errexit
 set -o pipefail
 
+# disable Fuel ISO download for master branch
+[[ "$BRANCH" == 'master' ]] && exit 0
+
 # use proxy url to replace the nomral URL, for googleusercontent.com will be blocked randomly
 [[ "$NODE_NAME" =~ (zte) ]] && GS_URL=${GS_BASE_PROXY%%/*}/$GS_URL
 
index 8de092d..fdef6f4 100644 (file)
             installer: fuel
             <<: *danube
 # PODs for verify jobs triggered by each patch upload
-        - ool-virtual1:
-            slave-label: '{pod}'
-            installer: apex
-            <<: *master
+#        - ool-virtual1:
+#            slave-label: '{pod}'
+#            installer: apex
+#            <<: *master
 #--------------------------------
 
     testsuite:
index 6768906..00a5f13 100755 (executable)
@@ -2,7 +2,11 @@
 set +e
 
 [[ "$PUSH_RESULTS_TO_DB" == "true" ]] && flags+="-r"
-cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/run_tests.py -t all ${flags}"
+if [ "$BRANCH" == 'master' ]; then
+    cmd="run_tests -t all ${flags}"
+else
+    cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/run_tests.py -t all ${flags}"
+fi
 
 container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
 docker exec $container_id $cmd
index 7036f20..353423d 100644 (file)
@@ -9,6 +9,7 @@
 
     jobs:
         - 'functest-verify-{stream}'
+        - 'functest-docs-upload-{stream}'
 
     stream:
         - master:
@@ -18,7 +19,7 @@
         - danube:
             branch: 'stable/{stream}'
             gs-pathname: '/{stream}'
-            disabled: false
+            disabled: true
 
 - job-template:
     name: 'functest-verify-{stream}'
                     unhealthy: 40
                     failing: 30
 
+- job-template:
+    name: 'functest-docs-upload-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+        - git-scm
+
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - change-merged-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'remerge'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                disable-strict-forbidden-file-verification: 'true'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'docs/**|.gitignore'
+
+    builders:
+        - functest-upload-doc-artifact
+
 ################################
 # job builders
 ################################
     builders:
         - shell: |
             cd $WORKSPACE && tox
+
+- builder:
+    name: functest-upload-doc-artifact
+    builders:
+        - shell: |
+            cd $WORKSPACE && tox -edocs
+            wget -O - https://git.opnfv.org/releng/plain/utils/upload-artifact.sh | bash -s "functest/api/_build" "docs"
index 5d1ed28..9b7f135 100755 (executable)
@@ -10,7 +10,11 @@ global_ret_val=0
 
 tests=($(echo $FUNCTEST_SUITE_NAME | tr "," "\n"))
 for test in ${tests[@]}; do
-    cmd="python /home/opnfv/repos/functest/functest/ci/run_tests.py -t $test"
+    if [ "$BRANCH" == 'master' ]; then
+        cmd="run_tests -t $test"
+    else
+        cmd="python /home/opnfv/repos/functest/functest/ci/run_tests.py -t $test"
+    fi
     docker exec $container_id $cmd
     let global_ret_val+=$?
 done
index 558e248..5f936f5 100755 (executable)
@@ -90,7 +90,12 @@ if [ $(docker ps | grep "${FUNCTEST_IMAGE}:${DOCKER_TAG}" | wc -l) == 0 ]; then
     exit 1
 fi
 
-cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/prepare_env.py start"
+if [ "$BRANCH" == 'master' ]; then
+    cmd="prepare_env start"
+else
+    cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/prepare_env.py start"
+fi
+
 
 echo "Executing command inside the docker: ${cmd}"
 docker exec ${container_id} ${cmd}
index 40fc42c..ee154af 100644 (file)
             name: INSTALLER_IP
             default: '10.20.0.2'
             description: 'IP of the installer'
+        - string:
+            name: SALT_MASTER_IP
+            default: '192.168.10.100'
+            description: 'IP of the salt master (for mcp deployments)'
+        - string:
+            name: SSH_KEY
+            default: '/tmp/mcp.rsa'
+            description: 'Path to private SSH key to access environment nodes'
         - string:
             name: INSTALLER_TYPE
             default: fuel
index ce89905..92199d6 100644 (file)
         - pre-scm-buildstep:
           - shell: |
                 #!/bin/bash
-                sudo chown -R $USER $WORKSPACE || exit 1
+                sudo chown -R $USER:$USER $WORKSPACE || exit 1
 
 - builder:
     name: build-html-and-pdf-docs-output
             find "$local_path" | grep -e 'index.html$' -e 'pdf$' | \
                 sed -e "s|^$local_path|    http://$gs_path|" >> gerrit_comment.txt
 
+# To take advantage of this macro, have your build write
+# out the file 'gerrit_comment.txt' with information to post
+# back to gerrit and include this macro in the list of builders.
 - builder:
-    name: report-docs-build-result-to-gerrit
+    name: report-build-result-to-gerrit
     builders:
         - shell: |
             #!/bin/bash
     builders:
         - build-html-and-pdf-docs-output
         - upload-under-review-docs-to-opnfv-artifacts
-        - report-docs-build-result-to-gerrit
+        - report-build-result-to-gerrit
 
 - builder:
     name: upload-merged-docs
     builders:
         - build-html-and-pdf-docs-output
         - upload-generated-docs-to-opnfv-artifacts
-        - report-docs-build-result-to-gerrit
+        - report-build-result-to-gerrit
         - remove-old-docs-from-opnfv-artifacts
 
 - builder:
index cdc08ea..50859c4 100644 (file)
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'compass-baremetal-master-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'compass-baremetal-master'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'compass-baremetal-branch-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'compass-baremetal-branch'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
 - parameter:
     name: 'fuel-baremetal-defaults'
     parameters:
             default: 'daisy-baremetal'
         - string:
             name: INSTALLER_IP
-            default: '10.20.11.2'
+            default: '10.20.7.3'
             description: 'IP of the installer'
         - string:
             name: GIT_BASE
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'compass-virtual-master-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'compass-virtual-master'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'compass-virtual-branch-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'compass-virtual-branch'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
 - parameter:
     name: 'fuel-virtual-defaults'
     parameters:
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
 - parameter:
-    name: 'ool-defaults'
+    name: 'doctor-defaults'
     parameters:
         - node:
             name: SLAVE_NAME
             description: 'Slave name on Jenkins'
             allowed-slaves:
-                - ool-virtual1
-                - ool-virtual2
-                - ool-virtual3
+                - '{default-slave}'
             default-slaves:
                 - '{default-slave}'
         - string:
             default: /root/.ssh/id_rsa
             description: 'SSH key to be used'
 - parameter:
-    name: 'ool-virtual1-defaults'
+    name: 'doctor-apex-verify-defaults'
     parameters:
-        - 'ool-defaults':
-            default-slave: 'ool-virtual1'
+        - 'doctor-defaults':
+            default-slave: 'doctor-apex-verify'
 - parameter:
-    name: 'ool-virtual2-defaults'
+    name: 'doctor-fuel-verify-defaults'
     parameters:
-        - 'ool-defaults':
-            default-slave: 'ool-virtual2'
+        - 'doctor-defaults':
+            default-slave: 'doctor-fuel-verify'
 - parameter:
-    name: 'ool-virtual3-defaults'
+    name: 'doctor-joid-verify-defaults'
     parameters:
-        - 'ool-defaults':
-            default-slave: 'ool-virtual3'
+        - 'doctor-defaults':
+            default-slave: 'doctor-joid-verify'
 - parameter:
     name: 'multisite-virtual-defaults'
     parameters:
index 9a4d885..2702c45 100644 (file)
@@ -58,7 +58,7 @@
 - job-template:
     name: 'netready-build-gluon-packages-daily-{stream}'
 
-    disabled: false
+    disabled: true
 
     concurrent: true
 
similarity index 77%
rename from jjb/releng/testapi-automate.yml
rename to jjb/releng/automate.yml
index dd76538..73bef3e 100644 (file)
@@ -1,20 +1,22 @@
 - project:
-    name: testapi-automate
+    name: utils-automate
     stream:
         - master:
             branch: '{stream}'
-            gs-pathname: ''
+
+    module:
+        - 'testapi'
+        - 'reporting'
 
     phase:
-        - 'docker-update'
         - 'docker-deploy':
             slave-label: 'testresults'
         - 'generate-doc'
 
     jobs:
-        - 'testapi-automate-{stream}'
-        - 'testapi-automate-{phase}-{stream}'
-        - 'testapi-verify-{stream}'
+        - '{module}-automate-{stream}'
+        - '{module}-automate-{phase}-{stream}'
+        - '{module}-verify-{stream}'
 
     project: 'releng'
 
@@ -44,7 +46,7 @@
         - mongodb-backup
 
 - job-template:
-    name: 'testapi-verify-{stream}'
+    name: '{module}-verify-{stream}'
 
     parameters:
         - project-parameter:
                     branch-pattern: '**/{branch}'
                 file-paths:
                   - compare-type: 'ANT'
-                    pattern: 'utils/test/testapi/**'
+                    pattern: 'utils/test/{module}/**'
 
     builders:
-        - run-unit-tests
+        - shell: |
+            cd ./utils/test/{module}/
+            bash run_test.sh
+            cp *.xml $WORKSPACE
 
     publishers:
         - junit:
                     failing: 30
 
 - job-template:
-    name: 'testapi-automate-{stream}'
+    name: '{module}-automate-{stream}'
 
     project-type: multijob
 
             branch: '{branch}'
         - string:
             name: DOCKER_TAG
-            default: "latest"
-            description: "Tag name for testapi docker image"
+            default: 'latest'
+            description: 'Tag name for {module} docker image'
+        - string:
+            name: MODULE_NAME
+            default: '{module}'
+            description: "Name of the module"
         - 'opnfv-build-defaults'
 
     scm:
                     branch-pattern: '**/{branch}'
                 file-paths:
                   - compare-type: 'ANT'
-                    pattern: 'utils/test/testapi/**'
+                    pattern: 'utils/test/{module}/**'
 
     builders:
         - description-setter:
             description: "Built on $NODE_NAME"
-        - multijob:
-            name: docker-update
-            condition: SUCCESSFUL
-            projects:
-                - name: 'testapi-automate-docker-update-{stream}'
-                  current-parameters: true
-                  kill-phase-on: FAILURE
-                  abort-all-job: true
+        - docker-update
         - multijob:
             name: docker-deploy
             condition: SUCCESSFUL
             projects:
-                - name: 'testapi-automate-docker-deploy-{stream}'
+                - name: '{module}-automate-docker-deploy-{stream}'
                   current-parameters: false
                   predefined-parameters: |
                     GIT_BASE=$GIT_BASE
             name: generate-doc
             condition: SUCCESSFUL
             projects:
-                - name: 'testapi-automate-generate-doc-{stream}'
+                - name: '{module}-automate-generate-doc-{stream}'
                   current-parameters: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
         - 'email-publisher'
 
 - job-template:
-    name: 'testapi-automate-{phase}-{stream}'
+    name: '{module}-automate-{phase}-{stream}'
 
     properties:
         - throttle:
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
-        - string:
-            name: DOCKER_TAG
-            default: "latest"
-            description: "Tag name for testapi docker image"
 
     wrappers:
         - ssh-agent-wrapper
     builders:
         - description-setter:
             description: "Built on $NODE_NAME"
-        - 'testapi-automate-{phase}-macro'
+        - '{module}-automate-{phase}-macro'
 
 ################################
 # job builders
 ################################
 - builder:
-    name: mongodb-backup
-    builders:
-        - shell: |
-            bash ./jjb/releng/testapi-backup-mongodb.sh
-
-- builder:
-    name: 'run-unit-tests'
+    name: 'docker-update'
     builders:
-        - shell: |
-            bash ./utils/test/testapi/run_test.sh
-
-- builder:
-    name: 'testapi-automate-docker-update-macro'
-    builders:
-        - shell: |
-            bash ./jjb/releng/testapi-docker-update.sh
+        - shell:
+            !include-raw: ./docker-update.sh
 
 - builder:
     name: 'testapi-automate-generate-doc-macro'
         - shell: |
             bash ./utils/test/testapi/htmlize/push-doc-artifact.sh
 
+- builder:
+    name: 'reporting-automate-generate-doc-macro'
+    builders:
+        - shell: echo "To Be Done"
+
 - builder:
     name: 'testapi-automate-docker-deploy-macro'
     builders:
         - shell: |
-            bash ./jjb/releng/testapi-docker-deploy.sh
+            bash ./jjb/releng/docker-deploy.sh 'sudo docker run -dti -p 8082:8000 -e mongodb_url=mongodb://172.17.0.1:27017 -e base_url=http://testresults.opnfv.org/test opnfv/testapi' "http://testresults.opnfv.org/test/swagger/APIs"
+- builder:
+    name: 'reporting-automate-docker-deploy-macro'
+    builders:
+        - shell: |
+            bash ./jjb/releng/docker-deploy.sh 'sudo docker run -itd -p 8084:8000 -e SERVER_URL=http://testresults.opnfv.org/reporting2:8084 opnfv/reporting' "http://testresults.opnfv.org/reporting2/reporting/index.html"
+
+- builder:
+    name: mongodb-backup
+    builders:
+        - shell: |
+            bash ./jjb/releng/testapi-backup-mongodb.sh
 
 ################################
 # job publishers
     name: 'email-publisher'
     publishers:
         - email:
-            recipients: rohitsakala@gmail.com feng.xiaowei@zte.com.cn
+            recipients: rohitsakala@gmail.com feng.xiaowei@zte.com.cn morgan.richomme@orange.com
             notify-every-unstable-build: false
             send-to-individuals: true
similarity index 63%
rename from jjb/releng/testapi-docker-deploy.sh
rename to jjb/releng/docker-deploy.sh
index b4e60b0..b3b930f 100644 (file)
@@ -1,10 +1,30 @@
 #!/bin/bash
+#  Licensed to the Apache Software Foundation (ASF) under one   *
+#  or more contributor license agreements.  See the NOTICE file *
+#  distributed with this work for additional information        *
+#  regarding copyright ownership.  The ASF licenses this file   *
+#  to you under the Apache License, Version 2.0 (the            *
+#  "License"); you may not use this file except in compliance   *
+#  with the License.  You may obtain a copy of the License at   *
+#                                                               *
+#    http://www.apache.org/licenses/LICENSE-2.0                 *
+#                                                               *
+#  Unless required by applicable law or agreed to in writing,   *
+#  software distributed under the License is distributed on an  *
+#  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY       *
+#  KIND, either express or implied.  See the License for the    *
+#  specific language governing permissions and limitations      *
+#  under the License.                                           *
+
+# Assigning Variables
+command=$1
+url=$2
 
 function check() {
 
     # Verify hosted
     sleep 5
-    cmd=`curl -s --head  --request GET http://testresults.opnfv.org/test/swagger/spec | grep '200 OK' > /dev/null`
+    cmd=`curl -s --head  --request GET ${url} | grep '200 OK' > /dev/null`
     rc=$?
     echo $rc
 
@@ -63,7 +83,7 @@ else
 fi
 
 echo "Running a container with the new image"
-sudo docker run -dti -p "8082:8000" -e "mongodb_url=mongodb://172.17.0.1:27017" -e "swagger_url=http://testresults.opnfv.org/test" opnfv/testapi:latest
+$command:latest
 
 if check; then
     echo "TestResults Hosted."
@@ -71,7 +91,7 @@ else
     echo "TestResults Hosting Failed"
     if [[ $(sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}') ]]; then
         echo "Running old Image"
-        sudo docker run -dti -p "8082:8000" -e "mongodb_url=mongodb://172.17.0.1:27017" -e "swagger_url=http://testresults.opnfv.org/test" opnfv/testapi:old
+        $command:old
         exit 1
     fi
 fi
diff --git a/jjb/releng/docker-update.sh b/jjb/releng/docker-update.sh
new file mode 100644 (file)
index 0000000..559ac83
--- /dev/null
@@ -0,0 +1,34 @@
+#!/bin/bash
+#  Licensed to the Apache Software Foundation (ASF) under one   *
+#  or more contributor license agreements.  See the NOTICE file *
+#  distributed with this work for additional information        *
+#  regarding copyright ownership.  The ASF licenses this file   *
+#  to you under the Apache License, Version 2.0 (the            *
+#  "License"); you may not use this file except in compliance   *
+#  with the License.  You may obtain a copy of the License at   *
+#                                                               *
+#    http://www.apache.org/licenses/LICENSE-2.0                 *
+#                                                               *
+#  Unless required by applicable law or agreed to in writing,   *
+#  software distributed under the License is distributed on an  *
+#  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY       *
+#  KIND, either express or implied.  See the License for the    *
+#  specific language governing permissions and limitations      *
+#  under the License.                                           *
+
+set -o errexit
+set -o nounset
+
+cd $WORKSPACE/utils/test/$MODULE_NAME/docker/
+
+# Remove previous containers
+docker ps -a | grep "opnfv/$MODULE_NAME" | awk '{ print $1 }' | xargs -r docker rm -f
+
+# Remove previous images
+docker images | grep "opnfv/$MODULE_NAME" | awk '{ print $3 }' | xargs -r docker rmi -f
+
+# Start build
+docker build --no-cache -t opnfv/$MODULE_NAME:$DOCKER_TAG .
+
+# Push Image
+docker push opnfv/$MODULE_NAME:$DOCKER_TAG
index 2aa52ad..ebd0c9f 100644 (file)
@@ -73,6 +73,8 @@ fi
 # Get tag version
 echo "Current branch: $BRANCH"
 
+BUILD_BRANCH=$BRANCH
+
 if [[ "$BRANCH" == "master" ]]; then
     DOCKER_TAG="latest"
 elif [[ -n "${RELEASE_VERSION-}" ]]; then
@@ -82,19 +84,17 @@ else
     DOCKER_TAG="stable"
 fi
 
+if [[ -n "${COMMIT_ID-}" && -n "${RELEASE_VERSION-}" ]]; then
+    DOCKER_TAG=$RELEASE_VERSION
+    BUILD_BRANCH=$COMMIT_ID
+fi
+
 # Start the build
 echo "Building docker image: $DOCKER_REPO_NAME:$DOCKER_TAG"
 echo "--------------------------------------------------------"
 echo
-if [[ $DOCKER_REPO_NAME == *"dovetail"* ]]; then
-    if [[ -n "${RELEASE_VERSION-}" ]]; then
-        DOCKER_TAG=${RELEASE_VERSION}
-    fi
-    cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG -f $DOCKERFILE ."
-else
-    cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BRANCH
-        -f $DOCKERFILE ."
-fi
+cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BUILD_BRANCH
+    -f $DOCKERFILE ."
 
 echo ${cmd}
 ${cmd}
index 5fe0eb9..095ba41 100644 (file)
             name: DOCKER_REPO_NAME
             default: "opnfv/{project}"
             description: "Dockerhub repo to be pushed to."
+        - string:
+            name: COMMIT_ID
+            default: ""
+            description: "commit id to make a snapshot docker image"
         - string:
             name: RELEASE_VERSION
             default: ""
diff --git a/jjb/releng/testapi-docker-update.sh b/jjb/releng/testapi-docker-update.sh
deleted file mode 100644 (file)
index 84f5c32..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/bash
-
-set -o errexit
-set -o nounset
-
-cd $WORKSPACE/utils/test/testapi/docker/
-
-# Remove previous containers
-docker ps -a | grep "opnfv/testapi" | awk '{ print $1 }' | xargs -r docker rm -f
-
-# Remove previous images
-docker images | grep "opnfv/testapi" | awk '{ print $3 }' | xargs -r docker rmi -f
-
-# Start build
-docker build --no-cache -t opnfv/testapi:$DOCKER_TAG .
-
-# Push Image
-docker push opnfv/testapi:$DOCKER_TAG
index cd40050..13186a1 100644 (file)
             name: DOCKER_TAG
             default: '{docker-tag}'
             description: 'Tag to pull docker image'
+        - choice:
+            name: DISK_TYPE
+            choices:
+                - 'SSD'
+                - 'HDD'
+            default: 'HDD'
+            description: 'The type of hard disk that Cinder uses'
+        - string:
+            name: AGENT_COUNT
+            description: 'The number of slave agents to start. Defaults to the cinder node count'
+        - string:
+            name: VOLUME_SIZE
+            default: '4'
+            description: 'Size of Cinder volume (in GB)'
+        - string:
+            name: WORKLOADS
+            default: 'wr,rr,rw'
+            description: 'Workloads to run'
+        - string:
+            name: BLOCK_SIZES
+            default: '2048,16384'
+            description: 'Block sizes for VM I/O operations'
+        - string:
+            name: QUEUE_DEPTHS
+            default: '1,4'
+            description: 'Number of simultaneous I/O operations to keep active'
+        - string:
+            name: STEADY_STATE_SAMPLES
+            default: '10'
+            description: 'Number of samples to use (1 per minute) to measure steady state'
+        - string:
+            name: DEADLINE
+            description: 'Maximum run time in minutes if steady state cannot be found. Defaults to 3 times steady state samples'
+        - choice:
+            name: TEST_CASE
+            choices:
+                - 'snia_steady_state'
+            description: 'The test case to run'
 
     scm:
         - git-scm
 
     triggers:
-        - timed: 'H H * * *'
+        - timed: '0 22 * * *'
 
     builders:
         - shell: |
index 3e9ff67..9773cfd 100644 (file)
     # trigger is disabled until we know which jobs we will have
     # and adjust stuff accordingly
     triggers:
-        - timed: '#@midnight'
+        - timed: ''  # '@midnight'
 
     builders:
         - description-setter:
index c6da9f4..319f8eb 100644 (file)
@@ -20,9 +20,9 @@
 # distros
 #--------------------------------
     distro:
-        - 'trusty':
+        - 'xenial':
             disabled: false
-            dib-os-release: 'trusty'
+            dib-os-release: 'xenial'
             dib-os-element: 'ubuntu-minimal'
             dib-os-packages: 'vlan,vim,less,bridge-utils,language-pack-en,iputils-ping,rsyslog,curl'
             extra-dib-elements: 'openssh-server'
index 2e6f227..b522b89 100755 (executable)
@@ -89,7 +89,7 @@ function cleanup_and_upload() {
 }
 
 # check distro to see if we support it
-if [[ ! "$DISTRO" =~ (trusty|centos7|suse) ]]; then
+if [[ ! "$DISTRO" =~ (xenial|centos7|suse) ]]; then
     echo "Distro $DISTRO is not supported!"
     exit 1
 fi
index 56a4b18..722b077 100644 (file)
     # trigger is disabled until we know which jobs we will have
     # and adjust stuff accordingly
     triggers:
-        - timed: '#@midnight'
+        - timed: ''  # '@midnight'
 
     builders:
         - description-setter:
similarity index 99%
rename from jjb/yardstick/yardstick-ci-jobs.yml
rename to jjb/yardstick/yardstick-daily-jobs.yml
index 5ff36f8..ff1d47e 100644 (file)
 
     publishers:
         - email:
-            recipients: jean.gaoliang@huawei.com limingjiang@huawei.com
+            recipients: jean.gaoliang@huawei.com limingjiang@huawei.com ross.b.brattain@intel.com
 
 ########################
 # builder macros
index 973f83a..1c2abad 100755 (executable)
@@ -31,7 +31,8 @@ fi
 opts="--privileged=true --rm"
 envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
     -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NETWORK=${EXTERNAL_NETWORK} \
-    -e YARDSTICK_BRANCH=${BRANCH} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}"
+    -e YARDSTICK_BRANCH=${BRANCH} -e BRANCH=${BRANCH} \
+    -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}"
 
 # Pull the image with correct tag
 echo "Yardstick: Pulling image opnfv/yardstick:${DOCKER_TAG}"
index bae55dc..2793dd4 100644 (file)
@@ -1,3 +1,3 @@
-paramiko==2.1.2
-mock==1.3.0
-requests==2.9.1
+paramiko>=2.0 # LGPLv2.1+
+mock>=2.0 # BSD
+requests!=2.12.2,>=2.10.0 # Apache-2.0
index 8ac5cea..0dd635f 100644 (file)
@@ -17,9 +17,9 @@ setup(
     package_data={
     },
     url="https://www.opnfv.org",
-    install_requires=["paramiko>=2.0.1",
-                      "mock==1.3.0",
-                      "nose==1.3.7",
-                      "coverage==4.1",
-                      "requests==2.9.1"]
+    install_requires=["paramiko>=2.0",
+                      "mock>=2.0",
+                      "requests!=2.12.2,>=2.10.0"],
+    test_requires=["nose",
+                   "coverage>=4.0"]
 )
index 99d7f13..c264540 100644 (file)
@@ -1,6 +1,2 @@
-# The order of packages is significant, because pip processes them in the order
-# of appearance. Changing the order has an impact on the overall integration
-# process, which may cause wedges in the gate later.
-
-nose
-coverage
+nose # LGPL
+coverage>=4.0 # Apache-2.0
index 0d93665..ecb8e19 100644 (file)
@@ -185,6 +185,25 @@ continuously chasing the HEAD of corresponding branches.
 Once a working version is identified, the versions of the upstream components
 are then bumped in releng repo.
 
+==================
+XCI developer tips
+==================
+
+It is possible to run XCI in development mode, in order to test the
+latest changes. When deploying on this mode, the script will use the working
+directories for releng/bifrost/OSA, instead of cloning the whole repositories
+on each run.
+To enable it, you need to export the different DEV_PATH vars:
+
+- export OPNFV_RELENG_DEV_PATH=/opt/releng/
+- export OPENSTACK_BIFROST_DEV_PATH=/opt/bifrost/
+- export OPENSTACK_OSA_DEV_PATH=/opt/openstack-ansible/
+
+Please note the trailing slahses.
+
+This will cause the deployment to pick the development copies stored at the
+specified directories, and use them instead of cloning those on every run.
+
 ===========================================
 Limitations, Known Issues, and Improvements
 ===========================================
index e3b49c7..5817860 100755 (executable)
@@ -21,7 +21,7 @@
 #-------------------------------------------------------------------------------
 # use releng from master until the development work with the sandbox is complete
 export OPNFV_RELENG_VERSION="master"
-# HEAD of "master" as of 04.04.2017
-export OPENSTACK_BIFROST_VERSION=${OPENSTACK_BIFROST_VERSION:-"6109f824e5510e794dbf1968c3859e8b6356d280"}
-# HEAD of "master" as of 04.04.2017
-export OPENSTACK_OSA_VERSION=${OPENSTACK_OSA_VERSION:-"d9e1330c7ff9d72a604b6b4f3af765f66a01b30e"}
+# HEAD of bifrost "master" as of 29.06.2017
+export OPENSTACK_BIFROST_VERSION=${OPENSTACK_BIFROST_VERSION:-"7c9bb5e07c6bc3b42c9a9e8457e5eef511075b38"}
+# HEAD of osa "master" as of 29.06.2017
+export OPENSTACK_OSA_VERSION=${OPENSTACK_OSA_VERSION:-"0648818c64239b534d00db381c4609f28e40bda9"}
index 5ed5396..fd11a58 100755 (executable)
@@ -56,3 +56,6 @@ export LOG_PATH=${LOG_PATH:-${XCI_DEVEL_ROOT}/opnfv/logs}
 export RUN_TEMPEST=${RUN_TEMPEST:-false}
 # Set this to to true to force XCI to re-create the target OS images
 export CLEAN_DIB_IMAGES=${CLEAN_DIB_IMAGES:-false}
+# Set this to a full path pointing to extra config files (containing
+# group_vars/all)
+export XCI_EXTRA_VARS_PATH=${XCI_EXTRA_VARS_PATH:-""}
index 842bcc4..57e0bb8 100644 (file)
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-# these versions are extracted based on the osa commit d9e1330c7ff9d72a604b6b4f3af765f66a01b30e on 04.04.2017
-# https://review.openstack.org/gitweb?p=openstack/openstack-ansible.git;a=commit;h=d9e1330c7ff9d72a604b6b4f3af765f66a01b30e
+# these versions are extracted based on the osa commit 0648818c64239b534d00db381c4609f28e40bda9 on 2017-06-27T22:02:17+00:00
+# https://review.openstack.org/gitweb?p=openstack/openstack-ansible.git;a=commit;h=0648818c64239b534d00db381c4609f28e40bda9
+- name: ansible-hardening
+  scm: git
+  src: https://git.openstack.org/openstack/ansible-hardening
+  version: f422da8599c6d8f64ebfefbf0a0aa711ea1f9569
 - name: apt_package_pinning
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-apt_package_pinning
-  version: 364fc9fcd8ff652546c13d9c20ac808bc0e35f66
+  version: 4afe664efb5a2385a1d7071f68bc9001f16c0f41
 - name: pip_install
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-pip_install
-  version: 793ae4d01397bd91ebe18e9670e8e27d1ae91960
+  version: 348995b85f91f796b28656459474fb3935be737c
 - name: galera_client
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-galera_client
-  version: c093c13e01826da545bf9a0259e0be441bc1b5e1
+  version: 2055ebf1582a15c2b2a73985485be15884c9b2d3
 - name: galera_server
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-galera_server
-  version: fd0a6b104a32badbe7e7594e2c829261a53bfb11
+  version: 78a1259a10a5be95ab7d6ba3e8f2961805ae3a5b
 - name: ceph_client
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-ceph_client
-  version: 9149bfa8e3c4284b656834ba7765ea3aa48bec2e
+  version: aa3b0d959464f9362aaf29d6cf6225e1d4e302be
 - name: haproxy_server
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-haproxy_server
-  version: 32415ab81c61083ac5a83b65274703e4a5470e5e
+  version: f8bc5c6129c0d50ac3355c82560fbf22ee32479b
 - name: keepalived
   scm: git
   src: https://github.com/evrardjp/ansible-keepalived
-  version: 4f7c8eb16e3cbd8c8748f126c1eea73db5c8efe9
+  version: 3.0.1
 - name: lxc_container_create
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-lxc_container_create
-  version: 097da38126d90cfca36cdc3955aaf658a00db599
+  version: 1eab03452885f0a34cb78f54e3bcf5824abc012e
 - name: lxc_hosts
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-lxc_hosts
-  version: 2931d0c87a1c592ad7f1f2f83cdcf468e8dea932
+  version: f0b8782c03dde4dd65e70d8b03afc26a30c74f37
 - name: memcached_server
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-memcached_server
-  version: 58e17aa13ebe7b0aa5da7c00afc75d6716d2720d
-- name: openstack-ansible-security
-  scm: git
-  src: https://git.openstack.org/openstack/openstack-ansible-security
-  version: 9d745ec4fe8ac3e6d6cbb2412abe5196a9d2dad7
+  version: 6924e7d44d6e36cbe45507e43ef82af6ac0ae125
 - name: openstack_hosts
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-openstack_hosts
-  version: 2076dfddf418b1bdd64d3782346823902aa996bc
+  version: d68b1dd8fc5ef18c78172d4e9fa3ca01d7473dcf
 - name: os_keystone
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_keystone
-  version: cee7a02143a1826479e6444c6fb5f1c2b6074ab7
+  version: 0cafcc150da10a01ee0b4543167fdc88b9b91a85
 - name: openstack_openrc
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-openstack_openrc
-  version: fb98ad8d7bfe7fba0c964cb061313f1b8767c4b0
+  version: 18b7f31a19c4c9bc95abc07a83c9ba866eff538d
 - name: os_aodh
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_aodh
-  version: 9dcacb8fd6feef02e485f99c83535707ae67876b
+  version: 5bebd5a18aa7469803f26fb41df62495730afde3
 - name: os_barbican
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_barbican
-  version: bb3f39cb2f3c31c6980aa65c8953ff6293b992c0
+  version: fc95936f9375c3e9eab708b356e760e3eeb785d7
 - name: os_ceilometer
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_ceilometer
-  version: 178ad8245fa019f0610c628c58c377997b011e8a
+  version: daf94c5d1a009abb111b5ff7dea8b4f50473b227
 - name: os_cinder
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_cinder
-  version: 1321fd39d8f55d1dc3baf91b4194469b349d7dc4
+  version: d0c46f29d7bb02139a14ad46869ce411e80874d9
 - name: os_glance
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_glance
-  version: f39ef212bfa2edff8334bfb632cc463001c77c11
+  version: a1e3588769e6d17b074398f0ef2675f34438b73b
 - name: os_gnocchi
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_gnocchi
-  version: 318bd76e5e72402e8ff5b372b469c27a9395341b
+  version: f79b0f6e1db40b59390b7e40a90792e72afe55e6
 - name: os_heat
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_heat
-  version: 07d59ddb757b2d2557fba52ac537803e646e65b4
+  version: 7a5b703b35f36a5a63ce9934ef585c8967e9de5a
 - name: os_horizon
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_horizon
-  version: 69ef49c4f7a42f082f4bcff824d13f57145e2b83
+  version: cb4a27da79ad67b2826f637927514e0829c23c0f
 - name: os_ironic
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_ironic
-  version: 57e8a0eaaa2159f33e64a1b037180383196919d1
+  version: e5c24e40b0d08d8bc7b4641679a8731c2b2aca29
 - name: os_magnum
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_magnum
-  version: 8329c257dff25686827bd1cc904506d76ad1d12f
+  version: 3eeb33db25db48f04e496a3ee47323fffe2af864
 - name: os_trove
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_trove
-  version: b948402c76d6188caa7be376098354cdb850d638
+  version: 0cf74c1a917b07e557411ca1c1376491c97aa0a9
 - name: os_neutron
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_neutron
-  version: 2a92a4e1857e7457683aefd87ee5a4e751fc701a
+  version: 280788b20099532c13042966defcbcbf5d5dd994
 - name: os_nova
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_nova
-  version: 511963b7921ec7c2db24e8ee1d71a940b0aafae4
+  version: 031b386bdd29f895203a3d053c1dabba66cfeeb0
 - name: os_rally
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_rally
-  version: 96153c5b3285d11d00611a03135c9d8f267e0f52
+  version: 9125458265088eb8622f28df57f640509546a6d4
 - name: os_sahara
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_sahara
-  version: 012d3f3530f878e5143d58380f94d1f514baad04
+  version: 433d624b0ddb0d2778f014a175064572e15ea462
 - name: os_swift
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_swift
-  version: d62d6a23ac0b01d0320dbcb6c710dfd5f3cecfdf
+  version: 3b91c62e1de6e0d852476e3b74e39b7a55d77ec9
 - name: os_tempest
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_tempest
-  version: 9d2bfb09d1ebbc9102329b0d42de33aa321e57b1
+  version: 692209da1fdab6014e13e65be27ffb9b8c8578bb
 - name: plugins
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-plugins
-  version: 3d2e23bb7e1d6775789d7f65ce8a878a7ee1d3c7
+  version: 8685a0ba38b7f534dd4db971da6d54b495c79169
 - name: rabbitmq_server
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-rabbitmq_server
-  version: 9b0ce64fe235705e237bc4b476ecc0ad602d67a8
+  version: 50bffbf8f114c8100ec5e86ebac9baba5c4f233d
 - name: repo_build
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-repo_build
-  version: fe3ae20f74a912925d5c78040984957a6d55f9de
+  version: 9ce713e9762650e1041ba7d9ad3c207a0c65d0c4
 - name: repo_server
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-repo_server
-  version: 7ea0820e0941282cd5c5cc263e939ffbee54ba52
+  version: 275124b643d6e6a9c92d65be7a7f309fe6f0c6dc
 - name: rsyslog_client
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-rsyslog_client
-  version: 19615e47137eee46ee92c0308532fe1d2212333c
+  version: da0090d48b166e0ffe83c35483572e358a29d523
 - name: rsyslog_server
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-rsyslog_server
-  version: efd7b21798da49802012e390a0ddf7cc38636eeb
+  version: 0f4b5ac0e7a170bd9811875965b781d447a5517a
 - name: sshd
   scm: git
   src: https://github.com/willshersystems/ansible-sshd
-  version: 426e11c4dffeca09fcc4d16103a91e5e65180040
+  version: 0.5.1
 - name: bird
   scm: git
   src: https://github.com/logan2211/ansible-bird
-  version: 2c4d29560d3617abddf0e63e0c95536364dedd92
+  version: '1.2'
 - name: etcd
   scm: git
   src: https://github.com/logan2211/ansible-etcd
-  version: ef63b0c5fd352b61084fd5aca286ee7f3fea932b
+  version: '1.2'
 - name: unbound
   scm: git
   src: https://github.com/logan2211/ansible-unbound
-  version: 5329d03eb9c15373d648a801563087c576bbfcde
+  version: '1.4'
 - name: resolvconf
   scm: git
   src: https://github.com/logan2211/ansible-resolvconf
-  version: 3b2b7cf2e900b194829565b351bf32bb63954548
+  version: '1.2'
 - name: os_designate
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_designate
-  version: b7098a6bdea73c869f45a86e0cc78d21b032161e
+  version: cc9760d0a08083c1168999422ccefa0d56ead093
 - name: ceph.ceph-common
   scm: git
   src: https://github.com/ceph/ansible-ceph-common
-  version: ef149767fa9565ec887f0bdb007ff752bd61e5d5
+  version: v2.2.9
 - name: ceph.ceph-docker-common
   scm: git
   src: https://github.com/ceph/ansible-ceph-docker-common
 - name: ceph-mon
   scm: git
   src: https://github.com/ceph/ansible-ceph-mon
-  version: c5be4d6056dfe6a482ca3fcc483a6050cc8929a1
+  version: v2.2.9
 - name: ceph-osd
   scm: git
   src: https://github.com/ceph/ansible-ceph-osd
-  version: 7bc5a61ceb96e487b7a9fe9643f6dafa6492f2b5
+  version: v2.2.9
+- name: os_octavia
+  scm: git
+  src: https://git.openstack.org/openstack/openstack-ansible-os_octavia
+  version: 48ff9a634a3ea34c6811ebc10057708dc23ed76e
+- name: os_molteniron
+  scm: git
+  src: https://git.openstack.org/openstack/openstack-ansible-os_molteniron
+  version: 0de6fe5251b54881ab3eb8bf0a8d694dd4362430
index 8be36c7..92b5c55 100644 (file)
         delete: yes
       when:
         - OPNFV_RELENG_DEV_PATH != ""
+    - name: Copy extra vars to releng and bifrost
+      synchronize:
+        src: "{{ XCI_EXTRA_VARS_PATH }}"
+        dest: "{{ item }}"
+      with_items:
+        - "{{ OPNFV_RELENG_PATH }}/prototypes/xci/playbooks"
+        - "{{ OPENSTACK_BIFROST_PATH }}/playbooks/inventory"
+      when:
+        - XCI_EXTRA_VARS_PATH != ""
 
 - hosts: localhost
   connection: local
diff --git a/prototypes/xci/scripts/update-osa-version-files.sh b/prototypes/xci/scripts/update-osa-version-files.sh
new file mode 100755 (executable)
index 0000000..d822d25
--- /dev/null
@@ -0,0 +1,91 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2017 SUSE LINUX GmbH and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# This script is used to pin the SHAs for the various roles in the
+# ansible-role-requirements file. It will also update the SHAs for
+# OSA and bifrost.
+
+set -e
+
+# NOTE(hwoarang) This could break if files are re-arranged in the future
+releng_xci_base="$(dirname $(readlink -f $0))/.."
+
+usage() {
+    echo """
+    ${0} <openstack-ansible commit SHA> [<bifrost commit SHA>]
+    """
+    exit 0
+}
+
+cleanup() {
+    [[ -d $tempdir ]] && rm -rf $tempdir
+}
+
+printme() {
+    echo "===> $1"
+}
+
+# Only need a single argument
+[[ $# -lt 1 || $# -gt 2 ]] && echo "Invalid number of arguments!" && usage
+
+tempdir="$(mktemp -d)"
+
+trap cleanup EXIT
+
+pushd $tempdir &> /dev/null
+
+printme "Downloading the sources-branch-updater-lib.sh library"
+
+printme "Cloning the openstack-ansible repository"
+(
+    git clone -q git://git.openstack.org/openstack/openstack-ansible && cd openstack-ansible && git checkout -q $1
+)
+
+popd &> /dev/null
+
+pushd $tempdir/openstack-ansible &> /dev/null
+source scripts/sources-branch-updater-lib.sh
+printme "Synchronize roles and packages"
+update_ansible_role_requirements "master" "true" "true"
+
+# Construct the ansible-role-requirements-file
+echo """---
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2017 Ericsson AB and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+# these versions are extracted based on the osa commit ${1} on $(git --no-pager log -1 --format=%cI $1)
+# https://review.openstack.org/gitweb?p=openstack/openstack-ansible.git;a=commit;h=$1""" > $releng_xci_base/file/ansible-role-requirements.yml
+cat $tempdir/openstack-ansible/ansible-role-requirements.yml >> $releng_xci_base/file/ansible-role-requirements.yml
+
+# Update the pinned OSA version
+sed -i -e "/^export OPENSTACK_OSA_VERSION/s@:-\"[a-z0-9]*@:-\"${1}@" \
+    -e "s/\(^# HEAD of osa.*of \).*/\1$(date +%d\.%m\.%Y)/" $releng_xci_base/config/pinned-versions
+
+# Update the pinned bifrost version
+[[ -n ${2:-} ]] && \
+    sed -i -e "/^export OPENSTACK_BIFROST_VERSION/s@:-\"[a-z0-9]*@:-\"${2}@" \
+    -e "s/\(^# HEAD of bifrost.*of \).*/\1$(date +%d\.%m\.%Y)/" $releng_xci_base/config/pinned-versions
+
+popd &> /dev/null
+
+printme ""
+printme "======================= Report ============================"
+printme ""
+printme "The $releng_xci_base/file/ansible-role-requirements.yml and"
+printme "$releng_xci_base/config/pinned-versions files have been"
+printme "updated. Please make sure you test the end result before"
+printme "committing it!"
+printme ""
+printme "==========================================================="
index 85f532a..aeaface 100644 (file)
@@ -27,3 +27,4 @@ XCI_LOOP: "{{ lookup('env','XCI_LOOP') }}"
 LOG_PATH: "{{ lookup('env','LOG_PATH') }}"
 OPNFV_HOST_IP: "{{ lookup('env','OPNFV_HOST_IP') }}"
 OPNFV_SSH_HOST_KEYS_PATH: "{{ lookup('env', 'OPNFV_SSH_HOST_KEYS_PATH') }}"
+XCI_EXTRA_VARS_PATH: "{{ lookup('env', 'XCI_EXTRA_VARS_PATH') }}"
index 3a65983..d711256 100755 (executable)
@@ -37,6 +37,15 @@ source "$XCI_PATH/config/${XCI_FLAVOR}-vars"
 # source xci configuration
 source $XCI_PATH/config/env-vars
 
+#-------------------------------------------------------------------------------
+# Sanitize local development environment variables
+#-------------------------------------------------------------------------------
+user_local_dev_vars=(OPNFV_RELENG_DEV_PATH OPNFV_OSA_DEV_PATH OPNFV_BIFROST_DEV_PATH)
+for local_user_var in ${user_local_dev_vars[@]}; do
+    [[ -n ${!local_user_var:-} ]] && export $local_user_var=${!local_user_var%/}/
+done
+unset user_local_dev_vars local_user_var
+
 #-------------------------------------------------------------------------------
 # Log info to console
 #-------------------------------------------------------------------------------
index 458bbda..285f838 100755 (executable)
@@ -12,8 +12,9 @@ set -o nounset
 set -o pipefail
 
 usage() {
-    echo "usage: $0 [-v] -d <destination> -i <installer_type> -a <installer_ip>" >&2
+    echo "usage: $0 [-v] -d <destination> -i <installer_type> -a <installer_ip> [-s <ssh_key>]" >&2
     echo "[-v] Virtualized deployment" >&2
+    echo "[-s <ssh_key>] Path to ssh key. For MCP deployments only" >&2
 }
 
 info ()  {
@@ -53,11 +54,12 @@ swap_to_public() {
 : ${DEPLOY_TYPE:=''}
 
 #Get options
-while getopts ":d:i:a:h:v" optchar; do
+while getopts ":d:i:a:h:s:v" optchar; do
     case "${optchar}" in
         d) dest_path=${OPTARG} ;;
         i) installer_type=${OPTARG} ;;
         a) installer_ip=${OPTARG} ;;
+        s) ssh_key=${OPTARG} ;;
         v) DEPLOY_TYPE="virt" ;;
         *) echo "Non-option argument: '-${OPTARG}'" >&2
            usage
@@ -70,6 +72,9 @@ done
 dest_path=${dest_path:-$HOME/opnfv-openrc.sh}
 installer_type=${installer_type:-$INSTALLER_TYPE}
 installer_ip=${installer_ip:-$INSTALLER_IP}
+if [ "${installer_type}" == "fuel" ] && [ "${BRANCH}" == "master" ]; then
+    installer_ip=${SALT_MASTER_IP}
+fi
 
 if [ -z $dest_path ] || [ -z $installer_type ] || [ -z $installer_ip ]; then
     usage
@@ -89,40 +94,45 @@ ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
 
 # Start fetching the files
 if [ "$installer_type" == "fuel" ]; then
-    #ip_fuel="10.20.0.2"
     verify_connectivity $installer_ip
+    if [ "${BRANCH}" == "master" ]; then
+        ssh_key=${ssh_key:-$SSH_KEY}
+        if [ -z $ssh_key ] || [ ! -f $ssh_key ]; then
+            error "Please provide path to existing ssh key for mcp deployment."
+            exit 2
+        fi
+        ssh_options+=" -i ${ssh_key}"
 
-    env=$(sshpass -p r00tme ssh 2>/dev/null $ssh_options root@${installer_ip} \
-        'fuel env'|grep operational|head -1|awk '{print $1}') &> /dev/null
-    if [ -z $env ]; then
-        error "No operational environment detected in Fuel"
-    fi
-    env_id="${FUEL_ENV:-$env}"
-
-    # Check if controller is alive (online='True')
-    controller_ip=$(sshpass -p r00tme ssh 2>/dev/null $ssh_options root@${installer_ip} \
-        "fuel node --env ${env_id} | grep controller | grep 'True\|  1' | awk -F\| '{print \$5}' | head -1" | \
-        sed 's/ //g') &> /dev/null
+        # retrieving controller vip
+        controller_ip=$(ssh 2>/dev/null ${ssh_options} ubuntu@${installer_ip} \
+            "sudo salt --out txt 'ctl01*' pillar.get _param:openstack_control_address | awk '{print \$2}'" | \
+            sed 's/ //g') &> /dev/null
 
-    if [ -z $controller_ip ]; then
-        error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
-    fi
+        info "Fetching rc file from controller $controller_ip..."
+        ssh ${ssh_options} ubuntu@${controller_ip} "sudo cat /root/keystonercv3" > $dest_path
+    else
+        #ip_fuel="10.20.0.2"
+        env=$(sshpass -p r00tme ssh 2>/dev/null ${ssh_options} root@${installer_ip} \
+            'fuel env'|grep operational|head -1|awk '{print $1}') &> /dev/null
+        if [ -z $env ]; then
+            error "No operational environment detected in Fuel"
+        fi
+        env_id="${FUEL_ENV:-$env}"
 
-    info "Fetching rc file from controller $controller_ip..."
-    sshpass -p r00tme ssh 2>/dev/null $ssh_options root@${installer_ip} \
-        "scp $ssh_options ${controller_ip}:/root/openrc ." &> /dev/null
-    sshpass -p r00tme scp 2>/dev/null $ssh_options root@${installer_ip}:~/openrc $dest_path &> /dev/null
+        # Check if controller is alive (online='True')
+        controller_ip=$(sshpass -p r00tme ssh 2>/dev/null ${ssh_options} root@${installer_ip} \
+            "fuel node --env ${env_id} | grep controller | grep 'True\|  1' | awk -F\| '{print \$5}' | head -1" | \
+            sed 's/ //g') &> /dev/null
 
-    #This file contains the mgmt keystone API, we need the public one for our rc file
-    admin_ip=$(cat $dest_path | grep "OS_AUTH_URL" | sed 's/^.*\=//' | sed "s/^\([\"']\)\(.*\)\1\$/\2/g" | sed s'/\/$//')
-    public_ip=$(sshpass -p r00tme ssh $ssh_options root@${installer_ip} \
-        "ssh ${controller_ip} 'source openrc; openstack endpoint list'" \
-        | grep keystone | grep public | sed 's/ /\n/g' | grep ^http | head -1) &> /dev/null
-        #| grep http | head -1 | cut -d '|' -f 4 | sed 's/v1\/.*/v1\//' | sed 's/ //g') &> /dev/null
-    #NOTE: this is super ugly sed 's/v1\/.*/v1\//'OS_AUTH_URL
-    # but sometimes the output of endpoint-list is like this: http://172.30.9.70:8004/v1/%(tenant_id)s
-    # Fuel virtual need a fix
+        if [ -z $controller_ip ]; then
+            error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
+        fi
 
+        info "Fetching rc file from controller $controller_ip..."
+        sshpass -p r00tme ssh 2>/dev/null ${ssh_options} root@${installer_ip} \
+            "scp ${ssh_options} ${controller_ip}:/root/openrc ." &> /dev/null
+        sshpass -p r00tme scp 2>/dev/null ${ssh_options} root@${installer_ip}:~/openrc $dest_path &> /dev/null
+    fi
     #convert to v3 URL
     auth_url=$(cat $dest_path|grep AUTH_URL)
     if [[ -z `echo $auth_url |grep v3` ]]; then
@@ -143,36 +153,40 @@ elif [ "$installer_type" == "apex" ]; then
     sudo scp $ssh_options root@$installer_ip:/home/stack/overcloudrc.v3 $dest_path
 
 elif [ "$installer_type" == "compass" ]; then
-    verify_connectivity $installer_ip
-    controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
-        'mysql -ucompass -pcompass -Dcompass -e"select *  from cluster;"' \
-        | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"127.0.0.1\"/) {print $(i+2);break;}}'  \
-        | grep -oP "\d+.\d+.\d+.\d+")
-
-    if [ -z $controller_ip ]; then
-        error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
-    fi
-
-    info "Fetching rc file from controller $controller_ip..."
-    sshpass -p root ssh 2>/dev/null $ssh_options root@${installer_ip} \
-        "scp $ssh_options ${controller_ip}:/opt/admin-openrc.sh ." &> /dev/null
-    sshpass -p root scp 2>/dev/null $ssh_options root@${installer_ip}:~/admin-openrc.sh $dest_path &> /dev/null
-
-    info "This file contains the mgmt keystone API, we need the public one for our rc file"
-
-    if grep "OS_AUTH_URL.*v2" $dest_path > /dev/null 2>&1 ; then
-        public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
-            "ssh ${controller_ip} 'source /opt/admin-openrc.sh; openstack endpoint show identity '" \
-            | grep publicurl | awk '{print $4}')
+    if [ "${BRANCH}" == "master" ]; then
+        sudo docker cp compass-tasks:/opt/openrc $dest_path &> /dev/null
+        sudo chown $(whoami):$(whoami) $dest_path
     else
-        public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
-            "ssh ${controller_ip} 'source /opt/admin-openrc.sh; \
-                 openstack endpoint list --interface public --service identity '" \
-            | grep identity | awk '{print $14}')
-    fi
-    info "public_ip: $public_ip"
-    swap_to_public $public_ip
+        verify_connectivity $installer_ip
+        controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
+            'mysql -ucompass -pcompass -Dcompass -e"select *  from cluster;"' \
+            | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"127.0.0.1\"/) {print $(i+2);break;}}'  \
+            | grep -oP "\d+.\d+.\d+.\d+")
+
+        if [ -z $controller_ip ]; then
+            error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
+        fi
 
+        info "Fetching rc file from controller $controller_ip..."
+        sshpass -p root ssh 2>/dev/null $ssh_options root@${installer_ip} \
+            "scp $ssh_options ${controller_ip}:/opt/admin-openrc.sh ." &> /dev/null
+        sshpass -p root scp 2>/dev/null $ssh_options root@${installer_ip}:~/admin-openrc.sh $dest_path &> /dev/null
+
+        info "This file contains the mgmt keystone API, we need the public one for our rc file"
+
+        if grep "OS_AUTH_URL.*v2" $dest_path > /dev/null 2>&1 ; then
+            public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
+                "ssh ${controller_ip} 'source /opt/admin-openrc.sh; openstack endpoint show identity '" \
+                | grep publicurl | awk '{print $4}')
+        else
+            public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
+                "ssh ${controller_ip} 'source /opt/admin-openrc.sh; \
+                     openstack endpoint list --interface public --service identity '" \
+                | grep identity | awk '{print $14}')
+        fi
+        info "public_ip: $public_ip"
+        swap_to_public $public_ip
+    fi
 
 elif [ "$installer_type" == "joid" ]; then
     # do nothing...for the moment
index 8fce2e0..c46ca89 100755 (executable)
@@ -61,8 +61,8 @@ main () {
     #make pid dir
     pidfile="/var/run/$jenkinsuser/jenkins_jnlp_pid"
     if ! [ -d /var/run/$jenkinsuser/ ]; then
-        mkdir /var/run/$jenkinsuser/
-        chown $jenkinsuser:$jenkinsuser /var/run/$jenkinsuser/
+        sudo mkdir /var/run/$jenkinsuser/
+        sudo chown $jenkinsuser:$jenkinsuser /var/run/$jenkinsuser/
     fi
 
     if [[ $skip_monit != true ]]; then
index f0c488a..5021b78 100644 (file)
@@ -30,7 +30,8 @@ node_list=(\
 'arm-pod1' 'arm-pod3' \
 'huawei-pod1' 'huawei-pod2' 'huawei-pod3' 'huawei-pod4' 'huawei-pod5' \
 'huawei-pod6' 'huawei-pod7' 'huawei-pod12' \
-'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4')
+'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4'\
+'zte-virtual1')
 
 
 if [[ ! " ${node_list[@]} " =~ " ${testbed} " ]]; then
index 110ac4c..2b91186 100644 (file)
@@ -20,7 +20,7 @@ class TestCases(BaseHandler):
 
         url = '{}/projects/{}/cases'.format(conf.base_url, project)
         cases = requests.get(url).json().get('testcases', [])
-        data = [t['name'] for t in cases]
+        data = [{t['name']: t['catalog_description']} for t in cases]
         self.write(json_encode(data))
 
 
index 49f4517..7fe97a8 100755 (executable)
@@ -98,3 +98,5 @@ echo "daemon off;" >> /etc/nginx/nginx.conf
 cp /home/opnfv/utils/test/reporting/docker/supervisor.conf /etc/supervisor/conf.d/
 
 ln -s /usr/bin/nodejs /usr/bin/node
+
+cd pages && /bin/bash angular.sh
index 6de856e..aeee3ba 100644 (file)
@@ -12,3 +12,4 @@ PyYAML==3.11
 simplejson==3.8.1
 jinja2==2.8
 tornado==4.4.2
+requests==2.12.5
index 5e315ba..b323dd0 100644 (file)
@@ -1,22 +1,19 @@
 [supervisord]
 nodaemon = true
 
-[program:reporting_tornado]
+[program:tornado]
 user = root
 directory = /home/opnfv/utils/test/reporting/api/api
 command = python server.py --port=800%(process_num)d
 process_name=%(program_name)s%(process_num)d
 numprocs=4
 numprocs_start=1
-autorestart = true
 
-[program:reporting_nginx]
+[program:nginx]
 user = root
 command = service nginx restart
-autorestart = true
 
-[program:reporting_angular]
+[program:configuration]
 user = root
 directory = /home/opnfv/utils/test/reporting/pages
-command = bash angular.sh
-autorestart = true
+command = bash config.sh
index e700e04..77ab784 100755 (executable)
@@ -107,7 +107,6 @@ for version in versions:
         scenario_results = rp_utils.getScenarios(healthcheck,
                                                  installer,
                                                  version)
-
         # get nb of supported architecture (x86, aarch64)
         architectures = rp_utils.getArchitectures(scenario_results)
         logger.info("Supported architectures: {}".format(architectures))
index 6e6585a..0304298 100755 (executable)
@@ -1,4 +1,15 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2017 Orange and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+# SPDX-license-identifier: Apache-2.0
+
 from urllib2 import Request, urlopen, URLError
+from datetime import datetime
 import json
 import jinja2
 import os
@@ -97,7 +108,13 @@ for version in rp_utils.get_config('general.versions'):
                     crit_rate = True
 
                 # Expect that the suite duration is inferior to 30m
-                if result['details']['duration'] < criteria_duration:
+                stop_date = datetime.strptime(result['stop_date'],
+                                              '%Y-%m-%d %H:%M:%S')
+                start_date = datetime.strptime(result['start_date'],
+                                               '%Y-%m-%d %H:%M:%S')
+
+                delta = stop_date - start_date
+                if (delta.total_seconds() < criteria_duration):
                     crit_time = True
 
                 result['criteria'] = {'tests': crit_tests,
index 080f27b..0e00ea6 100755 (executable)
@@ -1,8 +1,3 @@
-: ${SERVER_URL:='http://testresults.opnfv.org/reporting/api'}
-
-echo "var BASE_URL = 'http://${SERVER_URL}/landing-page'" >> app/scripts/app.config.js
-echo "var PROJECT_URL = 'http://${SERVER_URL}'" >> app/scripts/app.config.js
-
 apt-get install -y nodejs
 apt-get install -y npm
 npm install
index 14dbbff..f1688cf 100644 (file)
Binary files a/utils/test/reporting/pages/app/images/overview.png and b/utils/test/reporting/pages/app/images/overview.png differ
index f4eb65a..843a623 100644 (file)
     <script src="scripts/controllers/auth.controller.js"></script>
     <script src="scripts/controllers/admin.controller.js"></script>
     <script src="scripts/controllers/main.controller.js"></script>
-    <script src="scripts/app.config.js"></script>
     <script src="scripts/controllers/testvisual.controller.js"></script>
 
     <!-- endbuild -->
 </body>
 
-</html>
\ No newline at end of file
+</html>
diff --git a/utils/test/reporting/pages/app/scripts/app.config.js b/utils/test/reporting/pages/app/scripts/app.config.js
deleted file mode 100644 (file)
index e69de29..0000000
index 0f3a17a..44d9441 100644 (file)
  * Controller of the opnfvdashBoardAngularApp
  */
 angular.module('opnfvApp')
-    .controller('TableController', ['$scope', '$state', '$stateParams', '$http', 'TableFactory', function($scope, $state, $stateParams, $http, TableFactory) {
-
-        $scope.filterlist = [];
-        $scope.selection = [];
-        $scope.statusList = [];
-        $scope.projectList = [];
-        $scope.installerList = [];
-        $scope.versionlist = [];
-        $scope.loopci = [];
-        $scope.time = [];
-        $scope.tableDataAll = {};
-        $scope.tableInfoAll = {};
-        $scope.scenario = {};
-
-        $scope.VersionConfig = {
-            create: true,
-            valueField: 'title',
-            labelField: 'title',
-            delimiter: '|',
-            maxItems: 1,
-            placeholder: 'Version',
-            onChange: function(value) {
-                checkElementArrayValue($scope.selection, $scope.VersionOption);
-                $scope.selection.push(value);
-                // console.log($scope.selection);
-                getScenarioData();
+    .controller('TableController', ['$scope', '$state', '$stateParams', '$http', 'TableFactory', '$timeout',
+        function($scope, $state, $stateParams, $http, TableFactory, $timeout) {
+
+            $scope.filterlist = [];
+            $scope.selection = [];
+            $scope.statusList = [];
+            $scope.projectList = [];
+            $scope.installerList = [];
+            $scope.versionlist = [];
+            $scope.loopci = [];
+            $scope.time = [];
+            $scope.tableDataAll = {};
+            $scope.tableInfoAll = {};
+            $scope.scenario = {};
+            // $scope.selectProjects = [];
+
+
+            $scope.VersionConfig = {
+                create: true,
+                valueField: 'title',
+                labelField: 'title',
+                delimiter: '|',
+                maxItems: 1,
+                placeholder: 'Version',
+                onChange: function(value) {
+                    checkElementArrayValue($scope.selection, $scope.VersionOption);
+                    $scope.selection.push(value);
+                    // console.log($scope.selection);
+                    getScenarioData();
 
+                }
             }
-        }
 
-        $scope.LoopConfig = {
-            create: true,
-            valueField: 'title',
-            labelField: 'title',
-            delimiter: '|',
-            maxItems: 1,
-            placeholder: 'Loop',
-            onChange: function(value) {
-                checkElementArrayValue($scope.selection, $scope.LoopOption);
-                $scope.selection.push(value);
-                // console.log($scope.selection);
-                getScenarioData();
+            $scope.LoopConfig = {
+                create: true,
+                valueField: 'title',
+                labelField: 'title',
+                delimiter: '|',
+                maxItems: 1,
+                placeholder: 'Loop',
+                onChange: function(value) {
+                    checkElementArrayValue($scope.selection, $scope.LoopOption);
+                    $scope.selection.push(value);
+                    // console.log($scope.selection);
+                    getScenarioData();
 
+                }
             }
-        }
 
-        $scope.TimeConfig = {
-            create: true,
-            valueField: 'title',
-            labelField: 'title',
-            delimiter: '|',
-            maxItems: 1,
-            placeholder: 'Time',
-            onChange: function(value) {
-                checkElementArrayValue($scope.selection, $scope.TimeOption);
-                $scope.selection.push(value);
-                // console.log($scope.selection)
-                getScenarioData();
+            $scope.TimeConfig = {
+                create: true,
+                valueField: 'title',
+                labelField: 'title',
+                delimiter: '|',
+                maxItems: 1,
+                placeholder: 'Time',
+                onChange: function(value) {
+                    checkElementArrayValue($scope.selection, $scope.TimeOption);
+                    $scope.selection.push(value);
+                    // console.log($scope.selection)
+                    getScenarioData();
 
 
+                }
             }
-        }
 
 
-        init();
+            init();
 
-        function init() {
-            $scope.toggleSelection = toggleSelection;
-            getScenarioData();
-            // radioSetting();
-            getFilters();
-        }
+            function init() {
+                $scope.toggleSelection = toggleSelection;
+                getScenarioData();
+                getFilters();
+            }
 
-        function getFilters() {
-            TableFactory.getFilter().get({
+            function getFilters() {
+                TableFactory.getFilter().get({
+
+                }).$promise.then(function(response) {
+                    if (response != null) {
+                        $scope.statusList = response.filters.status;
+                        $scope.projectList = response.filters.projects;
+                        $scope.installerList = response.filters.installers;
+                        $scope.versionlist = response.filters.version;
+                        $scope.loopci = response.filters.loops;
+                        $scope.time = response.filters.time;
+
+                        $scope.statusListString = $scope.statusList.toString();
+                        $scope.projectListString = $scope.projectList.toString();
+                        $scope.installerListString = $scope.installerList.toString();
+                        $scope.VersionSelected = $scope.versionlist[1];
+                        $scope.LoopCiSelected = $scope.loopci[0];
+                        $scope.TimeSelected = $scope.time[0];
+                        radioSetting($scope.versionlist, $scope.loopci, $scope.time);
+
+                    } else {
+                        alert("网络错误");
+                    }
+                })
+            }
 
+            function getScenarioData() {
 
-            }).$promise.then(function(response) {
-                if (response != null) {
-                    $scope.statusList = response.filters.status;
-                    $scope.projectList = response.filters.projects;
-                    $scope.installerList = response.filters.installers;
-                    $scope.versionlist = response.filters.version;
-                    $scope.loopci = response.filters.loops;
-                    $scope.time = response.filters.time;
+                // var utl = BASE_URL + '/scenarios';
+                var data = {
+                    'status': ['success', 'danger', 'warning'],
+                    'projects': ['functest', 'yardstick'],
+                    'installers': ['apex', 'compass', 'fuel', 'joid'],
+                    'version': $scope.VersionSelected,
+                    'loops': $scope.LoopCiSelected,
+                    'time': $scope.TimeSelected
+                };
 
-                    $scope.statusListString = $scope.statusList.toString();
-                    $scope.projectListString = $scope.projectList.toString();
-                    $scope.installerListString = $scope.installerList.toString();
-                    $scope.VersionSelected = $scope.versionlist[1];
-                    $scope.LoopCiSelected = $scope.loopci[0];
-                    $scope.TimeSelected = $scope.time[0];
-                    radioSetting($scope.versionlist, $scope.loopci, $scope.time);
+                TableFactory.getScenario(data).then(function(response) {
+                    if (response.status == 200) {
+                        $scope.scenario = response.data;
 
-                } else {
-                    alert("网络错误");
-                }
-            })
-        }
+                        reSettingcolspan();
+                    }
+
+                }, function(error) {
+
+                })
 
-        function getScenarioData() {
-
-            var utl = BASE_URL + '/scenarios';
-            var data = {
-                'status': ['success', 'danger', 'warning'],
-                'projects': ['functest', 'yardstick'],
-                'installers': ['apex', 'compass', 'fuel', 'joid'],
-                'version': $scope.VersionSelected,
-                'loops': $scope.LoopCiSelected,
-                'time': $scope.TimeSelected
-            };
-            var config = {
-                headers: {
-                    'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8;'
-                }
             }
-            $http.post(utl, data, config).then(function(response) {
-                if (response.status == 200) {
-                    $scope.scenario = response.data;
+
+            function reSettingcolspan() {
+                if ($scope.selectProjects == undefined || $scope.selectProjects == null) {
                     constructJson();
-                }
-            })
-        }
+                    $scope.colspan = $scope.tableDataAll.colspan;
 
-        //construct json 
-        function constructJson() {
+                } else {
+                    constructJson();
+                    $scope.colspan = $scope.tempColspan;
+                }
+                // console.log("test")
+            }
 
-            var colspan;
-            var InstallerData;
-            var projectsInfo;
-            $scope.tableDataAll["scenario"] = [];
+            //construct json 
+            function constructJson(selectProject) {
 
+                var colspan;
+                var InstallerData;
+                var projectsInfo;
+                $scope.tableDataAll["scenario"] = [];
 
-            for (var item in $scope.scenario.scenarios) {
 
-                var headData = Object.keys($scope.scenario.scenarios[item].installers).sort();
-                var scenarioStatus = $scope.scenario.scenarios[item].status;
-                var scenarioStatusDisplay;
-                if (scenarioStatus == "success") {
-                    scenarioStatusDisplay = "navy";
-                } else if (scenarioStatus == "danger") {
-                    scenarioStatusDisplay = "danger";
-                } else if (scenarioStatus == "warning") {
-                    scenarioStatusDisplay = "warning";
-                }
+                for (var item in $scope.scenario.scenarios) {
 
-                InstallerData = headData;
-                var projectData = [];
-                var datadisplay = [];
-                var projects = [];
+                    var headData = Object.keys($scope.scenario.scenarios[item].installers).sort();
+                    var scenarioStatus = $scope.scenario.scenarios[item].status;
+                    var scenarioStatusDisplay;
+                    if (scenarioStatus == "success") {
+                        scenarioStatusDisplay = "navy";
+                    } else if (scenarioStatus == "danger") {
+                        scenarioStatusDisplay = "danger";
+                    } else if (scenarioStatus == "warning") {
+                        scenarioStatusDisplay = "warning";
+                    }
 
-                for (var j = 0; j < headData.length; j++) {
+                    InstallerData = headData;
+                    var projectData = [];
+                    var datadisplay = [];
+                    var projects = [];
 
-                    projectData.push($scope.scenario.scenarios[item].installers[headData[j]]);
-                }
-                for (var j = 0; j < projectData.length; j++) {
-
-                    for (var k = 0; k < projectData[j].length; k++) {
-                        projects.push(projectData[j][k].project);
-                        var temArray = [];
-                        if (projectData[j][k].score == null) {
-                            temArray.push("null");
-                            temArray.push(projectData[j][k].project);
-                            temArray.push(headData[j]);
-                        } else {
-                            temArray.push(projectData[j][k].score);
-                            temArray.push(projectData[j][k].project);
-                            temArray.push(headData[j]);
-                        }
+                    for (var j = 0; j < headData.length; j++) {
 
+                        projectData.push($scope.scenario.scenarios[item].installers[headData[j]]);
+                    }
+                    for (var j = 0; j < projectData.length; j++) {
+
+                        for (var k = 0; k < projectData[j].length; k++) {
+                            projects.push(projectData[j][k].project);
+                            var temArray = [];
+                            if (projectData[j][k].score == null) {
+                                temArray.push("null");
+                                temArray.push(projectData[j][k].project);
+                                temArray.push(headData[j]);
+                            } else {
+                                temArray.push(projectData[j][k].score);
+                                temArray.push(projectData[j][k].project);
+                                temArray.push(headData[j]);
+                            }
+
+
+                            if (projectData[j][k].status == "platinium") {
+                                temArray.push("primary");
+                                temArray.push("P");
+                            } else if (projectData[j][k].status == "gold") {
+                                temArray.push("danger");
+                                temArray.push("G");
+                            } else if (projectData[j][k].status == "silver") {
+                                temArray.push("warning");
+                                temArray.push("S");
+                            } else if (projectData[j][k].status == null) {
+                                temArray.push("null");
+                            }
+
+                            datadisplay.push(temArray);
 
-                        if (projectData[j][k].status == "platinium") {
-                            temArray.push("primary");
-                            temArray.push("P");
-                        } else if (projectData[j][k].status == "gold") {
-                            temArray.push("danger");
-                            temArray.push("G");
-                        } else if (projectData[j][k].status == "silver") {
-                            temArray.push("warning");
-                            temArray.push("S");
-                        } else if (projectData[j][k].status == null) {
-                            temArray.push("null");
                         }
 
-                        datadisplay.push(temArray);
-
                     }
 
-                }
+                    colspan = projects.length / headData.length;
 
-                colspan = projects.length / headData.length;
-
-                var tabledata = {
-                    scenarioName: item,
-                    Installer: InstallerData,
-                    projectData: projectData,
-                    projects: projects,
-                    datadisplay: datadisplay,
-                    colspan: colspan,
-                    status: scenarioStatus,
-                    statusDisplay: scenarioStatusDisplay
-                };
+                    var tabledata = {
+                        scenarioName: item,
+                        Installer: InstallerData,
+                        projectData: projectData,
+                        projects: projects,
+                        datadisplay: datadisplay,
+                        colspan: colspan,
+                        status: scenarioStatus,
+                        statusDisplay: scenarioStatusDisplay
+                    };
 
-                JSON.stringify(tabledata);
-                $scope.tableDataAll.scenario.push(tabledata);
+                    JSON.stringify(tabledata);
+                    $scope.tableDataAll.scenario.push(tabledata);
 
-                // console.log(tabledata);
 
-            }
+                    // console.log(tabledata);
+
+                }
 
 
-            projectsInfo = $scope.tableDataAll.scenario[0].projects;
+                projectsInfo = $scope.tableDataAll.scenario[0].projects;
 
-            var tempHeadData = [];
+                var tempHeadData = [];
 
-            for (var i = 0; i < InstallerData.length; i++) {
-                for (var j = 0; j < colspan; j++) {
-                    tempHeadData.push(InstallerData[i]);
+                for (var i = 0; i < InstallerData.length; i++) {
+                    for (var j = 0; j < colspan; j++) {
+                        tempHeadData.push(InstallerData[i]);
+                    }
                 }
-            }
 
-            //console.log(tempHeadData);
+                //console.log(tempHeadData);
 
-            var projectsInfoAll = [];
+                var projectsInfoAll = [];
 
-            for (var i = 0; i < projectsInfo.length; i++) {
-                var tempA = [];
-                tempA.push(projectsInfo[i]);
-                tempA.push(tempHeadData[i]);
-                projectsInfoAll.push(tempA);
+                for (var i = 0; i < projectsInfo.length; i++) {
+                    var tempA = [];
+                    tempA.push(projectsInfo[i]);
+                    tempA.push(tempHeadData[i]);
+                    projectsInfoAll.push(tempA);
 
-            }
-            //console.log(projectsInfoAll);
+                }
+                //console.log(projectsInfoAll);
 
-            $scope.tableDataAll["colspan"] = colspan;
-            $scope.tableDataAll["Installer"] = InstallerData;
-            $scope.tableDataAll["Projects"] = projectsInfoAll;
+                $scope.tableDataAll["colspan"] = colspan;
+                $scope.tableDataAll["Installer"] = InstallerData;
+                $scope.tableDataAll["Projects"] = projectsInfoAll;
 
-            // console.log($scope.tableDataAll);
-            $scope.colspan = $scope.tableDataAll.colspan;
+                // console.log($scope.tableDataAll);
+                $scope.colspan = $scope.tableDataAll.colspan;
+                console.log($scope.tableDataAll);
 
-        }
+            }
 
-        //get json element size
-        function getSize(jsondata) {
-            var size = 0;
-            for (var item in jsondata) {
-                size++;
+            //get json element size
+            function getSize(jsondata) {
+                var size = 0;
+                for (var item in jsondata) {
+                    size++;
+                }
+                return size;
             }
-            return size;
-        }
 
-        $scope.colspan = $scope.tableDataAll.colspan;
-        // console.log($scope.colspan);
 
+            // console.log($scope.colspan);
 
-        //find all same element index 
-        function getSameElementIndex(array, element) {
-            var indices = [];
-            var idx = array.indexOf(element);
-            while (idx != -1) {
-                indices.push(idx);
-                idx = array.indexOf(element, idx + 1);
-            }
-            //return indices;
-            var result = { element: element, index: indices };
-            JSON.stringify(result);
-            return result;
-        }
 
-        //delete element in array
-        function deletElement(array, index) {
-            array.splice(index, 1);
+            //find all same element index 
+            function getSameElementIndex(array, element) {
+                var indices = [];
+                var idx = array.indexOf(element);
+                while (idx != -1) {
+                    indices.push(idx);
+                    idx = array.indexOf(element, idx + 1);
+                }
+                //return indices;
+                var result = { element: element, index: indices };
+                JSON.stringify(result);
+                return result;
+            }
 
-        }
+            //delete element in array
+            function deletElement(array, index) {
+                array.splice(index, 1);
 
-        function radioSetting(array1, array2, array3) {
-            var tempVersion = [];
-            var tempLoop = [];
-            var tempTime = [];
-            for (var i = 0; i < array1.length; i++) {
-                var temp = {
-                    title: array1[i]
-                };
-                tempVersion.push(temp);
-            }
-            for (var i = 0; i < array2.length; i++) {
-                var temp = {
-                    title: array2[i]
-                };
-                tempLoop.push(temp);
             }
-            for (var i = 0; i < array3.length; i++) {
-                var temp = {
-                    title: array3[i]
-                };
-                tempTime.push(temp);
+
+            function radioSetting(array1, array2, array3) {
+                var tempVersion = [];
+                var tempLoop = [];
+                var tempTime = [];
+                for (var i = 0; i < array1.length; i++) {
+                    var temp = {
+                        title: array1[i]
+                    };
+                    tempVersion.push(temp);
+                }
+                for (var i = 0; i < array2.length; i++) {
+                    var temp = {
+                        title: array2[i]
+                    };
+                    tempLoop.push(temp);
+                }
+                for (var i = 0; i < array3.length; i++) {
+                    var temp = {
+                        title: array3[i]
+                    };
+                    tempTime.push(temp);
+                }
+                $scope.VersionOption = tempVersion;
+                $scope.LoopOption = tempLoop;
+                $scope.TimeOption = tempTime;
             }
-            $scope.VersionOption = tempVersion;
-            $scope.LoopOption = tempLoop;
-            $scope.TimeOption = tempTime;
-        }
 
-        //remove element in the array
-        function removeArrayValue(arr, value) {
-            for (var i = 0; i < arr.length; i++) {
-                if (arr[i] == value) {
-                    arr.splice(i, 1);
-                    break;
+            //remove element in the array
+            function removeArrayValue(arr, value) {
+                for (var i = 0; i < arr.length; i++) {
+                    if (arr[i] == value) {
+                        arr.splice(i, 1);
+                        break;
+                    }
                 }
             }
-        }
 
-        //check if exist element
-        function checkElementArrayValue(arrayA, arrayB) {
-            for (var i = 0; i < arrayB.length; i++) {
-                if (arrayA.indexOf(arrayB[i].title) > -1) {
-                    removeArrayValue(arrayA, arrayB[i].title);
+            //check if exist element
+            function checkElementArrayValue(arrayA, arrayB) {
+                for (var i = 0; i < arrayB.length; i++) {
+                    if (arrayA.indexOf(arrayB[i].title) > -1) {
+                        removeArrayValue(arrayA, arrayB[i].title);
+                    }
                 }
             }
-        }
 
-        function toggleSelection(status) {
-            var idx = $scope.selection.indexOf(status);
+            function toggleSelection(status) {
+                var idx = $scope.selection.indexOf(status);
+
+                if (idx > -1) {
+                    $scope.selection.splice(idx, 1);
+                    filterData($scope.selection)
+                } else {
+                    $scope.selection.push(status);
+                    filterData($scope.selection)
+                }
+                // console.log($scope.selection);
 
-            if (idx > -1) {
-                $scope.selection.splice(idx, 1);
-                filterData($scope.selection)
-            } else {
-                $scope.selection.push(status);
-                filterData($scope.selection)
             }
-            // console.log($scope.selection);
 
-        }
+            //filter function
+            function filterData(selection) {
 
-        //filter function
-        function filterData(selection) {
+                $scope.selectInstallers = [];
+                $scope.selectProjects = [];
+                $scope.selectStatus = [];
+                for (var i = 0; i < selection.length; i++) {
+                    if ($scope.statusListString.indexOf(selection[i]) > -1) {
+                        $scope.selectStatus.push(selection[i]);
+                    }
+                    if ($scope.projectListString.indexOf(selection[i]) > -1) {
+                        $scope.selectProjects.push(selection[i]);
+                    }
+                    if ($scope.installerListString.indexOf(selection[i]) > -1) {
+                        $scope.selectInstallers.push(selection[i]);
+                    }
+                }
+
+
+                // $scope.colspan = $scope.selectProjects.length;
+                //when some selection is empty, we set it full
+                if ($scope.selectInstallers.length == 0) {
+                    $scope.selectInstallers = $scope.installerList;
 
-            $scope.selectInstallers = [];
-            $scope.selectProjects = [];
-            $scope.selectStatus = [];
-            for (var i = 0; i < selection.length; i++) {
-                if ($scope.statusListString.indexOf(selection[i]) > -1) {
-                    $scope.selectStatus.push(selection[i]);
                 }
-                if ($scope.projectListString.indexOf(selection[i]) > -1) {
-                    $scope.selectProjects.push(selection[i]);
+                if ($scope.selectProjects.length == 0) {
+                    $scope.selectProjects = $scope.projectList;
+                    $scope.colspan = $scope.tableDataAll.colspan;
+                } else {
+                    $scope.colspan = $scope.selectProjects.length;
+                    $scope.tempColspan = $scope.colspan;
                 }
-                if ($scope.installerListString.indexOf(selection[i]) > -1) {
-                    $scope.selectInstallers.push(selection[i]);
+                if ($scope.selectStatus.length == 0) {
+                    $scope.selectStatus = $scope.statusList
                 }
-            }
 
-            $scope.colspan = $scope.selectProjects.length;
-            //when some selection is empty, we set it full
-            if ($scope.selectInstallers.length == 0) {
-                $scope.selectInstallers = $scope.installerList;
+                // console.log($scope.selectStatus);
+                // console.log($scope.selectProjects);
 
             }
-            if ($scope.selectProjects.length == 0) {
-                $scope.selectProjects = $scope.projectList;
-                $scope.colspan = $scope.tableDataAll.colspan;
-            }
-            if ($scope.selectStatus.length == 0) {
-                $scope.selectStatus = $scope.statusList
-            }
 
-            // console.log($scope.selectStatus);
-            // console.log($scope.selectProjects);
 
         }
-
-
-    }]);
\ No newline at end of file
+    ]);
\ No newline at end of file
index 7082aed..894e10f 100644 (file)
 angular.module('opnfvApp')
     .controller('testVisualController', ['$scope', '$state', '$stateParams', 'TableFactory', 'ngDialog', '$http', '$loading',
         function($scope, $state, $stateParams, TableFactory, ngDialog, $http, $loading) {
-            $scope.dovet = "59,222,156,317";
-            $scope.functest = "203,163,334,365";
-            $scope.yardstick = "398,161,513,384";
-            $scope.vsperf = "567,163,673,350";
-            $scope.stor = "686,165,789,341";
-            $scope.qtip = "802,164,905,341";
-            $scope.bootleneck = "917,161,1022,338";
-            $scope.noPopArea1 = "30,11,1243,146";
-            $scope.noPopArea2 = "1041,157,1250,561";
-            $scope.noPopArea3 = "15,392,1027,561";
+            $scope.dovet = "50,168,177,443";
+            $scope.functest = "194,173,356,442";
+            $scope.yardstick = "377,183,521,412";
+            $scope.vsperf = "542,185,640,414";
+            $scope.stor = "658,187,750,410";
+            $scope.qtip = "769,190,852,416";
+            $scope.bottlenecks = "870,192,983,419";
+            $scope.noPopArea1 = "26,8,1190,180";
+            $scope.noPopArea2 = "1018,193,1190,590";
+            $scope.noPopArea3 = "37,455,1003,584";
 
             init();
             $scope.showSelectValue = 0;
@@ -41,21 +41,35 @@ angular.module('opnfvApp')
                 $scope.tableData = null;
                 $scope.modalName = name;
 
-                var url = PROJECT_URL + '/projects/' + name + '/cases';
-
-                var config = {
-                    headers: {
-                        'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8;'
-                    }
-                }
-                $http.get(url, config).then(function(response) {
+                TableFactory.getProjectTestCases(name).then(function(response) {
                     if (response.status == 200) {
                         $scope.tableData = response.data;
-                        $loading.finish('Key');
-
 
+                        $scope.tableData = constructObjectArray($scope.tableData);
+                        console.log($scope.tableData);
+                        $loading.finish('Key');
                     }
+                }, function(error) {
+
                 })
+
+            }
+
+            //construct key value for tableData
+            function constructObjectArray(array) {
+                var templateArray = [];
+                for (var i = 0; i < array.length; i++) {
+                    var key = Object.keys(array[i])[0];
+                    var value = array[i][key];
+                    var temp = {
+                        'key': key,
+                        'value': value
+                    };
+                    templateArray.push(temp);
+
+                }
+
+                return templateArray;
             }
 
             function getDetail(casename) {
@@ -64,7 +78,7 @@ angular.module('opnfvApp')
                     'testcase': casename
                 }).$promise.then(function(response) {
                     if (response != null) {
-                        $scope.project_name_modal = response.project_name;
+                        $scope.name_modal = response.name;
                         $scope.description_modal = response.description;
                         openTestDetail();
                     }
@@ -108,4 +122,4 @@ angular.module('opnfvApp')
 
 
         }
-    ]);
\ No newline at end of file
+    ]);
index 2a8cbd0..f0af34f 100644 (file)
@@ -4,11 +4,24 @@
  * get data factory
  */
 angular.module('opnfvApp')
-    .factory('TableFactory', function($resource, $rootScope) {
+    .factory('TableFactory', function($resource, $rootScope, $http) {
+
+        var BASE_URL = 'http://testresults.opnfv.org/reporting2';
+        $.ajax({
+          url: 'config.json',
+          async: false,
+          dataType: 'json',
+          success: function (response) {
+              BASE_URL = response.url;
+          },
+          error: function (response){
+              alert('fail to get api url, using default: http://testresults.opnfv.org/reporting2')
+          }
+        });
 
         return {
             getFilter: function() {
-                return $resource(BASE_URL + '/filters', {}, {
+                return $resource(BASE_URL + '/landing-page/filters', {}, {
                     'get': {
                         method: 'GET',
 
@@ -16,33 +29,42 @@ angular.module('opnfvApp')
                 });
             },
             getScenario: function() {
-                return $resource(BASE_URL + '/scenarios', {}, {
-                    'post': {
-                        method: 'POST',
+
+                var config = {
+                    headers: {
+                        'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8;'
                     }
-                })
+                }
+
+                return $http.post(BASE_URL + '/landing-page/scenarios', {}, config);
             },
+
+
             getProjectUrl: function() {
-                return $resource(PROJECT_URL + '/projects-page/projects', {}, {
+                return $resource(BASE_URL + '/projects-page/projects', {}, {
                     'get': {
                         method: 'GET'
                     }
                 })
             },
-            getProjectTestCases: function() {
-                return $resource(PROJECT_URL + '/projects/:project/cases', { project: '@project' }, {
-                    'get': {
-                        method: 'GET'
+            getProjectTestCases: function(name) {
+                var config = {
+                    headers: {
+                        'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8;'
                     }
-                })
+                };
+                return $http.get(BASE_URL + '/projects/' + name + '/cases', {}, config)
+
+
             },
             getProjectTestCaseDetail: function() {
-                return $resource(PROJECT_URL + '/projects/:project/cases/:testcase', { project: '@project', testcase: '@testcase' }, {
+                return $resource(BASE_URL + '/projects/:project/cases/:testcase', { project: '@project', testcase: '@testcase' }, {
                     'get': {
 
                         method: 'GET'
                     }
                 })
             }
+
         };
-    });
\ No newline at end of file
+    });
index 7ab869b..8e567ca 100644 (file)
@@ -71,6 +71,7 @@ html {
     border-radius: 5px 5px 5px 5px;
     background-color: #f3f3f4;
     opacity: 0.9;
+    width: 200px;
 }
 
 .ngdialog.ngdialog.ngdialog-theme-default .ngdialog-content {
index 9d146ba..4de4e18 100644 (file)
@@ -4,7 +4,7 @@
 <div class="row">
 
 
-    <div class="row  border-bottom white-bg dashboard-header" style="border-radius: 5px 5px 5px 5px ">
+    <div class="row  border-bottom white-bg dashboard-header" style="border-radius: 5px 5px 5px 5px;width:90%;margin-left:30px; ">
 
         <h3>OPNFV Test ecosystem
             <small> *mouse over display test case list</small>
@@ -20,7 +20,7 @@
                  <area shape="rect" coords={{vsperf}} alt="test" href="{{vsperfurl}}" onmouseover="pop(event)" ng-mouseover="myTrigger('vsperf')" />
                   <area shape="rect" coords={{stor}} alt="test" href="{{storperfurl}}" onmouseover="pop(event)" ng-mouseover="myTrigger('storperf')"/>
                    <area shape="rect" coords={{qtip}} alt="test" href="{{qtipurl}}"  onmouseover="pop(event)" ng-mouseover="myTrigger('qtip')" />
-                    <area shape="rect" coords={{bootleneck}} alt="test"  href="{{bottlenecksurl}}" onmouseover="pop(event)" ng-mouseover="myTrigger('bootlenecks')" />
+                    <area shape="rect" coords={{bottlenecks}} alt="test"  href="{{bottlenecksurl}}" onmouseover="pop(event)" ng-mouseover="myTrigger('bottlenecks')" />
                      <area shape="rect" coords={{noPopArea1}} alt="test" onmouseover="pophide(event)"  />
                       <area shape="rect" coords={{noPopArea2}} alt="test"  onmouseover="pophide(event)"  />
                        <area shape="rect" coords={{noPopArea3}} alt="test"  onmouseover="pophide(event)"  />
@@ -70,7 +70,7 @@
     </div>
 
 
-    <div id="popup" class="popup" style="width: 20%;height: 35%" dw-loading="Key">
+    <div id="popup" class="popup" style="width: 40%;height: 35%" dw-loading="Key">
 
         <div ng-show="tableData.length==0">
             <center>
@@ -90,7 +90,8 @@
 
                 <tbody>
                     <tr dir-paginate="data in tableData | itemsPerPage: 8  track by $index ">
-                        <td><a ng-click="getDetail(data)"> {{data}}</a></td>
+                        <td ng-if="data.value!=null"><a ng-click="getDetail(data.key)"> {{data.value}}</a></td>
+                        <td ng-if="data.value==null"><a ng-click="getDetail(data.key)"> null</a></td>
                         <tr>
 
                 </tbody>
             $('#popup').hide();
             return true;
         }
-    </script>
\ No newline at end of file
+    </script>
index 8918b3f..db6f712 100644 (file)
@@ -2,6 +2,6 @@
 <div class="hr-line-dashed"></div>
 
 
-<strong> name</strong>: {{project_name_modal}}<br>
+<strong> name</strong>: {{name_modal}}<br>
 
-<strong>description</strong>: {{description_modal}}<br>
\ No newline at end of file
+<strong>description</strong>: {{description_modal}}<br>
diff --git a/utils/test/reporting/pages/config.sh b/utils/test/reporting/pages/config.sh
new file mode 100755 (executable)
index 0000000..f9bb89a
--- /dev/null
@@ -0,0 +1,3 @@
+: ${SERVER_URL:='testresults.opnfv.org/reporting2'}
+
+echo "{\"url\": \"http://${SERVER_URL}\"}" > dist/config.json
diff --git a/utils/test/reporting/run_test.sh b/utils/test/reporting/run_test.sh
new file mode 100755 (executable)
index 0000000..8c674ce
--- /dev/null
@@ -0,0 +1,44 @@
+#!/bin/bash
+set -o errexit
+set -o pipefail
+
+
+# Get script directory
+SCRIPTDIR=`dirname $0`
+
+# Creating virtual environment
+if [ ! -z $VIRTUAL_ENV ]; then
+    venv=$VIRTUAL_ENV
+else
+    venv=$SCRIPTDIR/.venv
+    virtualenv $venv
+fi
+
+source $venv/bin/activate
+
+export CONFIG_REPORTING_YAML=$SCRIPTDIR/reporting.yaml
+
+# ***************
+# Run unit tests
+# ***************
+echo "Running unit tests..."
+
+# install python packages
+easy_install -U setuptools
+easy_install -U pip
+pip install -r $SCRIPTDIR/docker/requirements.pip
+pip install -e $SCRIPTDIR
+
+python $SCRIPTDIR/setup.py develop
+
+# unit tests
+# TODO: remove cover-erase
+# To be deleted when all functest packages will be listed
+nosetests --with-xunit \
+         --cover-package=$SCRIPTDIR/utils \
+         --with-coverage \
+         --cover-xml \
+         $SCRIPTDIR/tests/unit
+rc=$?
+
+deactivate
diff --git a/utils/test/reporting/run_unit_tests.sh b/utils/test/reporting/run_unit_tests.sh
deleted file mode 100755 (executable)
index 6b0e3b2..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o pipefail
-
-# ******************************
-# prepare the env for the tests
-# ******************************
-# Either Workspace is set (CI)
-if [ -z $WORKSPACE ]
-then
-    WORKSPACE="."
-fi
-
-export CONFIG_REPORTING_YAML=./reporting.yaml
-
-# ***************
-# Run unit tests
-# ***************
-echo "Running unit tests..."
-
-# start vitual env
-virtualenv $WORKSPACE/reporting_venv
-source $WORKSPACE/reporting_venv/bin/activate
-
-# install python packages
-easy_install -U setuptools
-easy_install -U pip
-pip install -r $WORKSPACE/docker/requirements.pip
-pip install -e $WORKSPACE
-
-python $WORKSPACE/setup.py develop
-
-# unit tests
-# TODO: remove cover-erase
-# To be deleted when all functest packages will be listed
-nosetests --with-xunit \
-         --cover-package=utils \
-         --with-coverage \
-         --cover-xml \
-         tests/unit
-rc=$?
-
-deactivate
index 599a938..0a178ba 100644 (file)
@@ -117,19 +117,29 @@ def getScenarios(case, installer, version):
     url = ("http://" + url_base + "?case=" + case +
            "&period=" + str(period) + "&installer=" + installer +
            "&version=" + version)
-    request = Request(url)
 
     try:
+        request = Request(url)
         response = urlopen(request)
         k = response.read()
         results = json.loads(k)
         test_results = results['results']
-    except URLError as e:
-        print('Got an error code:', e)
+
+        page = results['pagination']['total_pages']
+        if page > 1:
+            test_results = []
+            for i in range(1, page + 1):
+                url_page = url + "&page=" + str(i)
+                request = Request(url_page)
+                response = urlopen(request)
+                k = response.read()
+                results = json.loads(k)
+                test_results += results['results']
+    except URLError as err:
+        print('Got an error code:', err)
 
     if test_results is not None:
         test_results.reverse()
-
         scenario_results = {}
 
         for r in test_results:
@@ -157,7 +167,6 @@ def getScenarioStats(scenario_results):
     return scenario_stats
 
 
-# TODO convergence with above function getScenarios
 def getScenarioStatus(installer, version):
     period = get_config('general.period')
     url_base = get_config('testapi.url')
@@ -213,8 +222,8 @@ def getQtipResults(version, installer):
         k = response.read()
         response.close()
         results = json.loads(k)['results']
-    except URLError as e:
-        print('Got an error code:', e)
+    except URLError as err:
+        print('Got an error code:', err)
 
     result_dict = {}
     if results:
@@ -427,9 +436,9 @@ def export_csv(scenario_file_name, installer, version):
                                     "/functest/scenario_history_" +
                                     installer + ".csv")
     scenario_installer_file = open(scenario_installer_file_name, "a")
-    with open(scenario_file_name, "r") as f:
+    with open(scenario_file_name, "r") as scenario_file:
         scenario_installer_file.write("date,scenario,installer,detail,score\n")
-        for line in f:
+        for line in scenario_file:
             if installer in line:
                 scenario_installer_file.write(line)
         scenario_installer_file.close
index 4a2f23a..8c701c3 100644 (file)
         $stateProvider.
             state('home', {
                 url: '/',
-                templateUrl: '/testapi-ui/components/home/home.html'
+                templateUrl: 'testapi-ui/components/home/home.html'
             }).
             state('about', {
                 url: '/about',
-                templateUrl: '/testapi-ui/components/about/about.html'
+                templateUrl: 'testapi-ui/components/about/about.html'
             }).
             state('guidelines', {
                 url: '/guidelines',
-                templateUrl: '/testapi-ui/components/guidelines/guidelines.html',
+                templateUrl: 'testapi-ui/components/guidelines/guidelines.html',
                 controller: 'GuidelinesController as ctrl'
             }).
             state('communityResults', {
                 url: '/community_results',
-                templateUrl: '/testapi-ui/components/results/results.html',
+                templateUrl: 'testapi-ui/components/results/results.html',
                 controller: 'ResultsController as ctrl'
             }).
             state('userResults', {
-                url: '/user_results',
+                url: 'user_results',
                 templateUrl: '/testapi-ui/components/results/results.html',
                 controller: 'ResultsController as ctrl'
             }).
             state('resultsDetail', {
                 url: '/results/:testID',
-                templateUrl: '/testapi-ui/components/results-report' +
+                templateUrl: 'testapi-ui/components/results-report' +
                              '/resultsReport.html',
                 controller: 'ResultsReportController as ctrl'
             }).
             }).
             state('authFailure', {
                 url: '/auth_failure',
-                templateUrl: '/testapi-ui/components/home/home.html',
+                templateUrl: 'testapi-ui/components/home/home.html',
                 controller: 'AuthFailureController as ctrl'
             }).
             state('logout', {
                 url: '/logout',
-                templateUrl: '/testapi-ui/components/logout/logout.html',
+                templateUrl: 'testapi-ui/components/logout/logout.html',
                 controller: 'LogoutController as ctrl'
             }).
             state('userVendors', {
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/OpenStack_Project_Refstack_mascot_90x90.png b/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/OpenStack_Project_Refstack_mascot_90x90.png
deleted file mode 100755 (executable)
index 4695090..0000000
Binary files a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/OpenStack_Project_Refstack_mascot_90x90.png and /dev/null differ
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/openstack-logo.png b/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/openstack-logo.png
deleted file mode 100644 (file)
index 826bf2e..0000000
Binary files a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/openstack-logo.png and /dev/null differ
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/refstack-logo.png b/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/refstack-logo.png
deleted file mode 100755 (executable)
index fc45f3e..0000000
Binary files a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/refstack-logo.png and /dev/null differ
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/testapi-logo.png b/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/testapi-logo.png
new file mode 100644 (file)
index 0000000..ff78eb1
Binary files /dev/null and b/utils/test/testapi/3rd_party/static/testapi-ui/assets/img/testapi-logo.png differ
index 2a43cd1..3056e1d 100644 (file)
 <div cg-busy="{promise:ctrl.resultsRequest,message:'Loading'}"></div>
 
 <div ng-show="ctrl.data" class="results-table">
-    <table ng-show="ctrl.data" class="table table-striped table-hover">
+    <table ng-data="ctrl.data.result" ng-show="ctrl.data" class="table table-striped table-hover">
         <thead>
             <tr>
-                <th ng-if="ctrl.isUserResults"></th>
-                <th>Upload Date</th>
-                <th>Test Run ID</th>
-                <th ng-if="ctrl.isUserResults">Vendor</th>
-                <th ng-if="ctrl.isUserResults">Product (version)</th>
-                <th ng-if="ctrl.isUserResults">Target Program</th>
-                <th ng-if="ctrl.isUserResults">Guideline</th>
-                <th ng-if="ctrl.isUserResults">Verified</th>
-                <th ng-if="ctrl.isUserResults">Shared</th>
+                <th>ID</th>
+                <th>Pod</th>
+                <th>Project</th>
+                <th>Test Case</th>
+                <th>Installer</th>
+                <th>Version</th>
+                <th>Scenario</th>
+                <th>Criteria</th>
+                <th>Start Date</th>
+                <th>Stop Date</th>
             </tr>
         </thead>
 
         <tbody>
             <tr ng-repeat-start="(index, result) in ctrl.data.results">
-                <td ng-if="ctrl.isUserResults">
-                    <a ng-if="!result.expanded"
-                       class="glyphicon glyphicon-plus"
-                       ng-click="result.expanded = true">
-                    </a>
-                    <a ng-if="result.expanded"
-                       class="glyphicon glyphicon-minus"
-                       ng-click="result.expanded = false">
-                    </a>
-                </td>
-                <td>{{result.created_at}}</td>
-                <td><a ui-sref="resultsDetail({testID: result.id})">
-                        {{result.id.slice(0, 8)}}...{{result.id.slice(-8)}}
-                    </a>
-                </td>
-                <td ng-if="ctrl.isUserResults">
-                    {{ctrl.vendors[result.product_version.product_info.organization_id].name || '-'}}
-                </td>
-                <td ng-if="ctrl.isUserResults">{{result.product_version.product_info.name || '-'}}
-                    <span ng-if="result.product_version.version">
-                        ({{result.product_version.version}})
-                    </span>
-                </td>
-                <td ng-if="ctrl.isUserResults">{{ctrl.targetMappings[result.meta.target] || '-'}}</td>
-                <td ng-if="ctrl.isUserResults">{{result.meta.guideline.slice(0, -5) || '-'}}</td>
-                <td ng-if="ctrl.isUserResults">
-                    <span ng-if="result.verification_status" class="glyphicon glyphicon-ok"></span>
-                    <span ng-if="!result.verification_status">-</span>
-
-                </td>
-                <td ng-if="ctrl.isUserResults">
-                    <span ng-show="result.meta.shared" class="glyphicon glyphicon-share"></span>
-                </td>
+                <td>{{ result._id }}</td>
+                <td>{{ result.pod_name }}</td>
+                <td>{{ result.project_name }}</td>
+                <td>{{ result.case_name }}</td>
+                <td>{{ result.installer }}</td>
+                <td>{{ result.version }}</td>
+                <td>{{ result.scenario }}</td>
+                <td>{{ result.criteria }}</td>
+                <td>{{ result.start_date }}</td>
+                <td>{{ result.stop_date }}</td>
             </tr>
-            <tr ng-if="result.expanded" ng-repeat-end>
-                <td></td>
-                <td colspan="3">
-                    <strong>Publicly Shared:</strong>
-                    <span ng-if="result.meta.shared == 'true' && !result.sharedEdit">Yes</span>
-                    <span ng-if="!result.meta.shared && !result.sharedEdit">
-                        <em>No</em>
-                    </span>
-                    <select ng-if="result.sharedEdit"
-                            ng-model="result.meta.shared"
-                            class="form-inline">
-                            <option value="true">Yes</option>
-                            <option value="">No</option>
-                    </select>
-                    <a ng-if="!result.sharedEdit"
-                       ng-click="result.sharedEdit = true"
-                       title="Edit"
-                       class="glyphicon glyphicon-pencil"></a>
-                    <a ng-if="result.sharedEdit"
-                       ng-click="ctrl.associateMeta(index,'shared',result.meta.shared)"
-                       title="Save"
-                       class="glyphicon glyphicon-floppy-disk"></a>
-                    <br />
-
-                    <strong>Associated Guideline:</strong>
-                    <span ng-if="!result.meta.guideline && !result.guidelineEdit">
-                        <em>None</em>
-                    </span>
-                    <span ng-if="result.meta.guideline && !result.guidelineEdit">
-                        {{result.meta.guideline.slice(0, -5)}}
-                    </span>
-                    <select ng-if="result.guidelineEdit"
-                            ng-model="result.meta.guideline"
-                            ng-options="o as o.slice(0, -5) for o in ctrl.versionList"
-                            class="form-inline">
-                        <option value="">None</option>
-                    </select>
-                    <a ng-if="!result.guidelineEdit"
-                       ng-click="ctrl.getVersionList();result.guidelineEdit = true"
-                       title="Edit"
-                       class="glyphicon glyphicon-pencil"></a>
-                    <a ng-if="result.guidelineEdit"
-                       ng-click="ctrl.associateMeta(index, 'guideline', result.meta.guideline)"
-                       title="Save"
-                       class="glyphicon glyphicon-floppy-disk">
-                    </a>
-                    <br />
-
-                    <strong>Associated Target Program:</strong>
-                    <span ng-if="!result.meta.target && !result.targetEdit">
-                        <em>None</em>
-                    </span>
-                    <span ng-if="result.meta.target && !result.targetEdit">
-                        {{ctrl.targetMappings[result.meta.target]}}</span>
-                    <select ng-if="result.targetEdit"
-                            ng-model="result.meta.target"
-                            class="form-inline">
-                        <option value="">None</option>
-                        <option value="platform">OpenStack Powered Platform</option>
-                        <option value="compute">OpenStack Powered Compute</option>
-                        <option value="object">OpenStack Powered Object Storage</option>
-                    </select>
-                    <a ng-if="!result.targetEdit"
-                       ng-click="result.targetEdit = true;"
-                       title="Edit"
-                       class="glyphicon glyphicon-pencil">
-                    </a>
-                    <a ng-if="result.targetEdit"
-                       ng-click="ctrl.associateMeta(index, 'target', result.meta.target)"
-                       title="Save"
-                       class="glyphicon glyphicon-floppy-disk">
-                    </a>
-                    <br />
-
-                    <strong>Associated Product:</strong>
-                    <span ng-if="!result.product_version && !result.productEdit">
-                        <em>None</em>
-                    </span>
-                    <span ng-if="result.product_version && !result.productEdit">
-                        <span ng-if="ctrl.products[result.product_version.product_info.id].product_type == 0">
-                            <a ui-sref="distro({id: result.product_version.product_info.id})">
-                                {{ctrl.products[result.product_version.product_info.id].name}}
-                                <small ng-if="result.product_version.version">
-                                    ({{result.product_version.version}})
-                                </small>
-                            </a>
-                        </span>
-                        <span ng-if="ctrl.products[result.product_version.product_info.id].product_type != 0">
-                            <a ui-sref="cloud({id: result.product_version.product_info.id})">
-                                {{ctrl.products[result.product_version.product_info.id].name}}
-                                <small ng-if="result.product_version.version">
-                                    ({{result.product_version.version}})
-                                </small>
-                            </a>
-                        </span>
-                    </span>
-
-                    <select ng-if="result.productEdit"
-                            ng-options="product as product.name for product in ctrl.products | arrayConverter | orderBy: 'name' track by product.id"
-                            ng-model="result.selectedProduct"
-                            ng-change="ctrl.getProductVersions(result)">
-                        <option value="">-- No Product --</option>
-                    </select>
-
-                    <span ng-if="result.productVersions.length && result.productEdit">
-                        <span class="glyphicon glyphicon-arrow-right" style="padding-right:3px;color:#303030;"></span>
-                        Version:
-                        <select ng-options="version as version.version for version in result.productVersions | orderBy: 'version' track by version.id"
-                                ng-model="result.selectedVersion">
-                        </select>
-
-                    </span>
-                    <a ng-if="!result.productEdit"
-                       ng-click="ctrl.prepVersionEdit(result)"
-                       title="Edit"
-                       class="glyphicon glyphicon-pencil">
-                    </a>
-                    <a ng-if="result.productEdit"
-                       ng-click="ctrl.associateProductVersion(result)"
-                       confirm="Once you associate this test to this product, ownership
-                                will be transferred to the product's vendor admins.
-                                Continue?"
-                       title="Save"
-                       class="glyphicon glyphicon-floppy-disk">
-                    </a>
-                    <br />
-                </td>
+            <tr ng-repeat-end=>
             </tr>
         </tbody>
     </table>
index 2b0338c..9e3540d 100644 (file)
@@ -38,7 +38,6 @@
         ctrl.associateMeta = associateMeta;
         ctrl.getVersionList = getVersionList;
         ctrl.getUserProducts = getUserProducts;
-        ctrl.getVendors = getVendors;
         ctrl.associateProductVersion = associateProductVersion;
         ctrl.getProductVersions = getProductVersions;
         ctrl.prepVersionEdit = prepVersionEdit;
             ctrl.update();
         }
 
-        ctrl.getVendors();
-
         /**
          * This will contact the TestAPI API to get a listing of test run
          * results.
             var start = $filter('date')(ctrl.startDate, 'yyyy-MM-dd');
             if (start) {
                 content_url =
-                    content_url + '&start_date=' + start + ' 00:00:00';
+                    content_url + '&from=' + start + ' 00:00:00';
             }
             var end = $filter('date')(ctrl.endDate, 'yyyy-MM-dd');
             if (end) {
-                content_url = content_url + '&end_date=' + end + ' 23:59:59';
+                content_url = content_url + '&to=' + end + ' 23:59:59';
             }
             if (ctrl.isUserResults) {
                 content_url = content_url + '&signed';
             ctrl.resultsRequest =
                 $http.get(content_url).success(function (data) {
                     ctrl.data = data;
-                    ctrl.totalItems = ctrl.data.pagination.total_pages *
-                        ctrl.itemsPerPage;
+                    ctrl.totalItems = ctrl.data.pagination.total_pages * ctrl.itemsPerPage;
                     ctrl.currentPage = ctrl.data.pagination.current_page;
                 }).error(function (error) {
                     ctrl.data = null;
                 });
         }
 
-        /**
-         * This will contact the TestAPI API to get a listing of
-         * vendors.
-         */
-        function getVendors() {
-            var contentUrl = testapiApiUrl + '/vendors';
-            ctrl.vendorsRequest =
-                $http.get(contentUrl).success(function (data) {
-                    ctrl.vendors = {};
-                    data.vendors.forEach(function(vendor) {
-                        ctrl.vendors[vendor.id] = vendor;
-                    });
-                }).error(function (error) {
-                    ctrl.vendors = null;
-                    ctrl.showError = true;
-                    ctrl.error =
-                        'Error retrieving vendor listing from server: ' +
-                        angular.toJson(error);
-                });
-        }
-
         /**
          * Send a PUT request to the API server to associate a product with
          * a test result.
index 748bd34..6433fa6 100644 (file)
@@ -8,10 +8,10 @@ docker_compose_yml = './docker-compose.yml'
 docker_compose_template = './docker-compose.yml.template'
 
 
-def render_docker_compose(port, swagger_url):
+def render_docker_compose(port, base_url):
     vars = {
         "expose_port": port,
-        "swagger_url": swagger_url,
+        "base_url": base_url,
     }
     template = env.get_template(docker_compose_template)
     yml = template.render(vars=vars)
@@ -22,7 +22,7 @@ def render_docker_compose(port, swagger_url):
 
 
 def main(args):
-    render_docker_compose(args.expose_port, args.swagger_url)
+    render_docker_compose(args.expose_port, args.base_url)
     os.system('docker-compose -f {} up -d'.format(docker_compose_yml))
 
 
@@ -33,8 +33,8 @@ if __name__ == '__main__':
                         required=False,
                         default=8000,
                         help='testapi exposed port')
-    parser.add_argument('-su', '--swagger-url',
+    parser.add_argument('-l', '--base-url',
                         type=str,
                         required=True,
-                        help='testapi exposed swagger-url')
+                        help='testapi exposed base-url')
     main(parser.parse_args())
index 5b131f7..cd68404 100644 (file)
@@ -8,7 +8,7 @@ services:
     container_name: opnfv-testapi
     environment:
       - mongodb_url=mongodb://mongo:27017/
-      - swagger_url={{ vars.swagger_url }}
+      - base_url={{ vars.base_url }}
     ports:
       - "{{ vars.expose_port }}:8000"
     links:
index e031e19..5311f35 100644 (file)
@@ -9,7 +9,7 @@
 #
 # Execution:
 #    $ docker run -dti -p 8001:8000 \
-#      -e "swagger_url=http://10.63.243.17:8001" \
+#      -e "base_url=http://10.63.243.17:8001" \
 #      -e "mongodb_url=mongodb://10.63.243.17:27017/" \
 #      opnfv/testapi:tag
 #
index 9f07efb..4f1be7d 100755 (executable)
@@ -6,6 +6,10 @@ if [ "$mongodb_url" != "" ]; then
     sudo crudini --set --existing $FILE mongo url $mongodb_url
 fi
 
-if [ "$swagger_url" != "" ]; then
-    sudo crudini --set --existing $FILE swagger base_url $swagger_url
+if [ "$base_url" != "" ]; then
+    sudo crudini --set --existing $FILE api url $base_url/api/v1
+    sudo crudini --set --existing $FILE swagger base_url $base_url
+    sudo crudini --set --existing $FILE ui url $base_url
+    sudo echo "{\"testapiApiUrl\": \"$base_url/api/v1\"}" > \
+        /usr/local/lib/python2.7/dist-packages/opnfv_testapi/static/testapi-ui/config.json
 fi
index 692e488..9ae2520 100644 (file)
@@ -10,6 +10,10 @@ dbname = test_results_collection
 # Listening port
 url = http://localhost:8000/api/v1
 port = 8000
+
+# Number of results for one page (integer value)
+#results_per_page = 20
+
 # With debug_on set to true, error traces will be shown in HTTP responses
 debug = True
 authenticate = False
@@ -41,7 +45,7 @@ openid_ns = http://specs.openid.net/auth/2.0
 # Return endpoint in Refstack's API. Value indicating the endpoint
 # where the user should be returned to after signing in. Openstack Id
 # Idp only supports HTTPS address types. (string value)
-openid_return_to = /api/v1/auth/signin_return
+openid_return_to = v1/auth/signin_return
 
 # Claimed identifier. This value must be set to
 # "http://specs.openid.net/auth/2.0/identifier_select". or to user
index b8c4fb4..4576d9b 100644 (file)
@@ -40,13 +40,13 @@ if __name__ == '__main__':
                         type=str,
                         required=False,
                         default=('http://testresults.opnfv.org'
-                                 '/test/swagger/spec.json'),
+                                 '/test/swagger/resources.json'),
                         help='Resource Listing Spec File')
     parser.add_argument('-au', '--api-declaration-url',
                         type=str,
                         required=False,
                         default=('http://testresults.opnfv.org'
-                                 '/test/swagger/spec'),
+                                 '/test/swagger/APIs'),
                         help='API Declaration Spec File')
     parser.add_argument('-o', '--output-directory',
                         required=True,
index 46765ff..f73c0ab 100644 (file)
@@ -17,6 +17,7 @@ class Config(object):
     def __init__(self):
         self.file = self.CONFIG if self.CONFIG else self._default_config()
         self._parse()
+        self._parse_per_page()
         self.static_path = os.path.join(
             os.path.dirname(os.path.normpath(__file__)),
             os.pardir,
@@ -37,6 +38,10 @@ class Config(object):
         [setattr(self, '{}_{}'.format(section, k), self._parse_value(v))
          for k, v in config.items(section)]
 
+    def _parse_per_page(self):
+        if not hasattr(self, 'api_results_per_page'):
+            self.api_results_per_page = 20
+
     @staticmethod
     def _parse_value(value):
         try:
index 2fc31ca..c7fed8f 100644 (file)
@@ -101,22 +101,71 @@ class GenericApiHandler(web.RequestHandler):
     @web.asynchronous
     @gen.coroutine
     def _list(self, query=None, res_op=None, *args, **kwargs):
+        sort = kwargs.get('sort')
+        page = kwargs.get('page', 0)
+        last = kwargs.get('last', 0)
+        per_page = kwargs.get('per_page', 0)
         if query is None:
             query = {}
-        data = []
         cursor = self._eval_db(self.table, 'find', query)
-        if 'sort' in kwargs:
-            cursor = cursor.sort(kwargs.get('sort'))
-        if 'last' in kwargs:
-            cursor = cursor.limit(kwargs.get('last'))
+        records_count = yield cursor.count()
+        total_pages = self._calc_total_pages(records_count,
+                                             last,
+                                             page,
+                                             per_page)
+        pipelines = self._set_pipelines(query, sort, last, page, per_page)
+        cursor = self._eval_db(self.table,
+                               'aggregate',
+                               pipelines,
+                               allowDiskUse=True)
+        data = list()
         while (yield cursor.fetch_next):
             data.append(self.format_data(cursor.next_object()))
         if res_op is None:
             res = {self.table: data}
         else:
             res = res_op(data, *args)
+        if total_pages > 0:
+            res.update({
+                'pagination': {
+                    'current_page': kwargs.get('page'),
+                    'total_pages': total_pages
+                }
+            })
         self.finish_request(res)
 
+    @staticmethod
+    def _calc_total_pages(records_count, last, page, per_page):
+        records_nr = records_count
+        if (records_count > last) and (last > 0):
+            records_nr = last
+
+        total_pages = 0
+        if page > 0:
+            total_pages, remainder = divmod(records_nr, per_page)
+            if remainder > 0:
+                total_pages += 1
+        if page > total_pages:
+            raises.BadRequest(
+                'Request page > total_pages [{}]'.format(total_pages))
+        return total_pages
+
+    @staticmethod
+    def _set_pipelines(query, sort, last, page, per_page):
+        pipelines = list()
+        if query:
+            pipelines.append({'$match': query})
+        if sort:
+            pipelines.append({'$sort': sort})
+
+        if page > 0:
+            pipelines.append({'$skip': (page - 1) * per_page})
+            pipelines.append({'$limit': per_page})
+        elif last > 0:
+            pipelines.append({'$limit': last})
+
+        return pipelines
+
     @web.asynchronous
     @gen.coroutine
     @check.not_exist
index 214706f..1773216 100644 (file)
@@ -11,12 +11,15 @@ from datetime import timedelta
 
 from bson import objectid
 
+from opnfv_testapi.common import config
 from opnfv_testapi.common import message
 from opnfv_testapi.common import raises
 from opnfv_testapi.resources import handlers
 from opnfv_testapi.resources import result_models
 from opnfv_testapi.tornado_swagger import swagger
 
+CONF = config.Config()
+
 
 class GenericResultHandler(handlers.GenericApiHandler):
     def __init__(self, application, request, **kwargs):
@@ -35,6 +38,8 @@ class GenericResultHandler(handlers.GenericApiHandler):
 
     def set_query(self):
         query = dict()
+        date_range = dict()
+
         for k in self.request.query_arguments.keys():
             v = self.get_query_argument(k)
             if k == 'project' or k == 'pod' or k == 'case':
@@ -47,8 +52,14 @@ class GenericResultHandler(handlers.GenericApiHandler):
                     query['start_date'] = obj
             elif k == 'trust_indicator':
                 query[k + '.current'] = float(v)
-            elif k != 'last':
+            elif k == 'from':
+                date_range.update({'$gte': str(v)})
+            elif k == 'to':
+                date_range.update({'$lt': str(v)})
+            elif k != 'last' and k != 'page':
                 query[k] = v
+            if date_range:
+                query['start_date'] = date_range
         return query
 
 
@@ -64,9 +75,11 @@ class ResultsCLHandler(GenericResultHandler):
                  - case : case name
                  - pod : pod name
                  - version : platform version (Arno-R1, ...)
-                 - installer (fuel, ...)
+                 - installer : fuel/apex/compass/joid/daisy
                  - build_tag : Jenkins build tag name
-                 - period : x (x last days)
+                 - period : x last days, incompatible with from/to
+                 - from : starting time in 2016-01-01 or 2016-01-01 00:01:23
+                 - to : ending time in 2016-01-01 or 2016-01-01 00:01:23
                  - scenario : the test scenario (previously version)
                  - criteria : the global criteria status passed or failed
                  - trust_indicator : evaluate the stability of the test case
@@ -113,22 +126,40 @@ class ResultsCLHandler(GenericResultHandler):
             @type period: L{string}
             @in period: query
             @required period: False
+            @param from: i.e. 2016-01-01 or 2016-01-01 00:01:23
+            @type from: L{string}
+            @in from: query
+            @required from: False
+            @param to: i.e. 2016-01-01 or 2016-01-01 00:01:23
+            @type to: L{string}
+            @in to: query
+            @required to: False
             @param last: last records stored until now
             @type last: L{string}
             @in last: query
             @required last: False
+            @param page: which page to list
+            @type page: L{int}
+            @in page: query
+            @required page: False
             @param trust_indicator: must be float
             @type trust_indicator: L{float}
             @in trust_indicator: query
             @required trust_indicator: False
         """
+        limitations = {'sort': {'start_date': -1}}
         last = self.get_query_argument('last', 0)
         if last is not None:
             last = self.get_int('last', last)
+            limitations.update({'last': last})
+
+        page = self.get_query_argument('page', None)
+        if page is not None:
+            page = self.get_int('page', page)
+            limitations.update({'page': page,
+                                'per_page': CONF.api_results_per_page})
 
-        self._list(query=self.set_query(),
-                   sort=[('start_date', -1)],
-                   last=last)
+        self._list(query=self.set_query(), **limitations)
 
     @swagger.operation(nickname="createTestResult")
     def post(self):
index ef74a08..adaf6f7 100644 (file)
@@ -20,38 +20,52 @@ def thread_execute(method, *args, **kwargs):
 class MemCursor(object):
     def __init__(self, collection):
         self.collection = collection
-        self.count = len(self.collection)
+        self.length = len(self.collection)
         self.sorted = []
 
     def _is_next_exist(self):
-        return self.count != 0
+        return self.length != 0
 
     @property
     def fetch_next(self):
         return thread_execute(self._is_next_exist)
 
     def next_object(self):
-        self.count -= 1
+        self.length -= 1
         return self.collection.pop()
 
     def sort(self, key_or_list):
-        key = key_or_list[0][0]
-        if key_or_list[0][1] == -1:
-            reverse = True
-        else:
-            reverse = False
+        for k, v in key_or_list.iteritems():
+            if v == -1:
+                reverse = True
+            else:
+                reverse = False
 
-        if key_or_list is not None:
             self.collection = sorted(self.collection,
-                                     key=itemgetter(key), reverse=reverse)
+                                     key=itemgetter(k), reverse=reverse)
         return self
 
     def limit(self, limit):
         if limit != 0 and limit < len(self.collection):
-            self.collection = self.collection[0:limit]
-            self.count = limit
+            self.collection = self.collection[0: limit]
+            self.length = limit
+        return self
+
+    def skip(self, skip):
+        if skip < self.length and (skip > 0):
+            self.collection = self.collection[self.length - skip: -1]
+            self.length -= skip
+        elif skip >= self.length:
+            self.collection = []
+            self.length = 0
         return self
 
+    def _count(self):
+        return self.length
+
+    def count(self):
+        return thread_execute(self._count)
+
 
 class MemDb(object):
 
@@ -187,6 +201,27 @@ class MemDb(object):
     def find(self, *args):
         return MemCursor(self._find(*args))
 
+    def _aggregate(self, *args, **kwargs):
+        res = self.contents
+        print args
+        for arg in args[0]:
+            for k, v in arg.iteritems():
+                if k == '$match':
+                    res = self._find(v)
+        cursor = MemCursor(res)
+        for arg in args[0]:
+            for k, v in arg.iteritems():
+                if k == '$sort':
+                    cursor = cursor.sort(v)
+                elif k == '$skip':
+                    cursor = cursor.skip(v)
+                elif k == '$limit':
+                    cursor = cursor.limit(v)
+        return cursor
+
+    def aggregate(self, *args, **kwargs):
+        return self._aggregate(*args, **kwargs)
+
     def _update(self, spec, document, check_keys=True):
         updated = False
 
diff --git a/utils/upload-artifact.sh b/utils/upload-artifact.sh
new file mode 100644 (file)
index 0000000..b66cdb7
--- /dev/null
@@ -0,0 +1,48 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 Orange and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+set -e
+set -o pipefail
+
+export PATH=$PATH:/usr/local/bin/
+
+# 2 paramters
+# - $1: the source directory where the files to be uploaded are located
+# - $2: the target on artifact http://artifact.opnfv.org/<project>/$2
+#   if not set, default value is <project>/docs
+project=$PROJECT
+if [ -z "$2" ]
+  then
+      artifact_dir="$project/docs"
+  else
+      artifact_dir="$project/$2"
+fi
+DIRECTORY="$1"
+
+
+# check that the API doc directory does exist before pushing it to artifact
+if [ ! -d "$DIRECTORY" ]; then
+    echo "Directory to be uploaded "$DIRECTORY" does not exist"
+    exit 1
+fi
+set +e
+gsutil&>/dev/null
+if [ $? != 0 ]; then
+    echo "Not possible to push results to artifact: gsutil not installed"
+    exit 1
+else
+    gsutil ls gs://artifacts.opnfv.org/"$project"/ &>/dev/null
+    if [ $? != 0 ]; then
+        echo "Not possible to push results to artifact: gsutil not installed."
+        exit 1
+    else
+        echo "Uploading file(s) to artifact $artifact_dir"
+        gsutil -m cp -r "$DIRECTORY"/* gs://artifacts.opnfv.org/"$artifact_dir"/ >/dev/null 2>&1
+    fi
+fi