Merge "Initiate packetization of Testing reporting"
authorMorgan Richomme <morgan.richomme@orange.com>
Fri, 11 Aug 2017 12:14:01 +0000 (12:14 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Fri, 11 Aug 2017 12:14:01 +0000 (12:14 +0000)
134 files changed:
jjb-sandbox/releng/releng-sandbox-jobs.yml
jjb/apex/apex.yml
jjb/apex/apex.yml.j2
jjb/apex/scenarios.yaml.hidden
jjb/apex/update-build-result.groovy [new file with mode: 0644]
jjb/armband/armband-ci-jobs.yml
jjb/armband/armband-deploy.sh
jjb/armband/armband-download-artifact.sh
jjb/armband/armband-project-jobs.yml
jjb/armband/armband-verify-jobs.yml
jjb/armband/build.sh
jjb/bottlenecks/bottlenecks-ci-jobs.yml
jjb/bottlenecks/bottlenecks-cleanup.sh
jjb/bottlenecks/bottlenecks-run-suite.sh
jjb/ci_gate_security/anteater-report-to-gerrit.sh
jjb/ci_gate_security/anteater-security-audit-weekly.sh
jjb/ci_gate_security/anteater-security-audit.sh
jjb/ci_gate_security/opnfv-ci-gate-security.yml
jjb/compass4nfv/compass-ci-jobs.yml
jjb/compass4nfv/compass-deploy.sh
jjb/compass4nfv/compass-verify-jobs.yml
jjb/daisy4nfv/daisy-daily-jobs.yml
jjb/daisy4nfv/daisy-project-jobs.yml
jjb/doctor/doctor.yml
jjb/dovetail/dovetail-ci-jobs.yml
jjb/dovetail/dovetail-run.sh
jjb/dovetail/dovetail-weekly-jobs.yml
jjb/fuel/fuel-daily-jobs.yml
jjb/fuel/fuel-deploy.sh
jjb/fuel/fuel-project-jobs.yml
jjb/fuel/fuel-verify-jobs.yml
jjb/fuel/fuel-weekly-jobs.yml
jjb/functest/functest-alpine.sh [new file with mode: 0644]
jjb/functest/functest-daily-jobs.yml
jjb/functest/functest-project-jobs.yml
jjb/functest/set-functest-env.sh
jjb/global/installer-params.yml
jjb/global/releng-defaults.yml
jjb/global/releng-macros.yml
jjb/global/slave-params.yml
jjb/joid/joid-daily-jobs.yml
jjb/joid/joid-deploy.sh
jjb/kvmfornfv/kvmfornfv.yml
jjb/multisite/fuel-deploy-for-multisite.sh
jjb/multisite/multisite-daily-jobs.yml
jjb/orchestra/orchestra-daily-jobs.yml [new file with mode: 0644]
jjb/orchestra/orchestra-project-jobs.yml [new file with mode: 0644]
jjb/ovn4nfv/ovn4nfv-daily-jobs.yml [new file with mode: 0644]
jjb/ovn4nfv/ovn4nfv-project-jobs.yml [new file with mode: 0644]
jjb/ovsnfv/ovsnfv.yml
jjb/qtip/qtip-integration-jobs.yml [new file with mode: 0644]
jjb/qtip/qtip-validate-jobs.yml
jjb/qtip/qtip-verify-jobs.yml
jjb/qtip/qtip-weekly-jobs.yml [new file with mode: 0644]
jjb/releng/automate.yml
jjb/releng/docker-deploy.sh
jjb/releng/opnfv-docker-arm.yml
jjb/releng/opnfv-docker.sh
jjb/releng/opnfv-docker.yml
jjb/releng/opnfv-lint.yml
jjb/releng/releng-ci-jobs.yml
jjb/storperf/storperf.yml
jjb/xci/bifrost-cleanup-job.yml
jjb/xci/bifrost-verify-jobs.yml
jjb/xci/xci-daily-jobs.yml
jjb/yardstick/yardstick-daily-jobs.yml
jjb/yardstick/yardstick-daily.sh
prototypes/bifrost/playbooks/opnfv-virtual.yaml
prototypes/xci/playbooks/configure-opnfvhost.yml
prototypes/xci/var/Debian.yml
prototypes/xci/var/RedHat.yml
prototypes/xci/var/Suse.yml
prototypes/xci/xci-deploy.sh
utils/create_pod_file.py
utils/fetch_os_creds.sh
utils/push-test-logs.sh
utils/test/testapi/.gitignore [new file with mode: 0644]
utils/test/testapi/3rd_party/static/testapi-ui/app.js
utils/test/testapi/3rd_party/static/testapi-ui/components/guidelines/guidelines.html [deleted file]
utils/test/testapi/3rd_party/static/testapi-ui/components/guidelines/guidelinesController.js [deleted file]
utils/test/testapi/3rd_party/static/testapi-ui/components/guidelines/partials/guidelineDetails.html [deleted file]
utils/test/testapi/3rd_party/static/testapi-ui/components/guidelines/partials/testListModal.html [deleted file]
utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html [new file with mode: 0644]
utils/test/testapi/3rd_party/static/testapi-ui/components/pods/podsController.js [new file with mode: 0644]
utils/test/testapi/3rd_party/static/testapi-ui/components/results/results.html
utils/test/testapi/3rd_party/static/testapi-ui/components/results/resultsController.js
utils/test/testapi/3rd_party/static/testapi-ui/index.html
utils/test/testapi/3rd_party/static/testapi-ui/shared/header/header.html
utils/test/testapi/docker/prepare-env.sh
utils/test/testapi/etc/config.ini
utils/test/testapi/htmlize/htmlize.py
utils/test/testapi/opnfv_testapi/cmd/server.py
utils/test/testapi/opnfv_testapi/common/check.py
utils/test/testapi/opnfv_testapi/common/config.py
utils/test/testapi/opnfv_testapi/common/message.py
utils/test/testapi/opnfv_testapi/db/__init__.py [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/db/api.py [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/resources/handlers.py
utils/test/testapi/opnfv_testapi/resources/result_handlers.py
utils/test/testapi/opnfv_testapi/resources/result_models.py
utils/test/testapi/opnfv_testapi/router/url_mappings.py
utils/test/testapi/opnfv_testapi/tests/unit/common/noparam.ini
utils/test/testapi/opnfv_testapi/tests/unit/common/normal.ini
utils/test/testapi/opnfv_testapi/tests/unit/common/nosection.ini
utils/test/testapi/opnfv_testapi/tests/unit/common/notboolean.ini
utils/test/testapi/opnfv_testapi/tests/unit/common/notint.ini
utils/test/testapi/opnfv_testapi/tests/unit/common/test_config.py
utils/test/testapi/opnfv_testapi/tests/unit/conftest.py [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/tests/unit/executor.py
utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
utils/test/testapi/opnfv_testapi/tests/unit/resources/__init__.py [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/tests/unit/resources/scenario-c1.json [moved from utils/test/testapi/opnfv_testapi/tests/unit/scenario-c1.json with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/scenario-c2.json [moved from utils/test/testapi/opnfv_testapi/tests/unit/scenario-c2.json with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_base.py with 89% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_fake_pymongo.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_fake_pymongo.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_pod.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_pod.py with 97% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_project.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_project.py with 98% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_result.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_result.py with 83% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_scenario.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_scenario.py with 99% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_testcase.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_testcase.py with 99% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_token.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_token.py with 96% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_version.py [moved from utils/test/testapi/opnfv_testapi/tests/unit/test_version.py with 94% similarity]
utils/test/testapi/opnfv_testapi/ui/auth/constants.py
utils/test/testapi/opnfv_testapi/ui/auth/sign.py
utils/test/testapi/opnfv_testapi/ui/auth/user.py
utils/test/testapi/opnfv_testapi/ui/root.py
utils/test/testapi/requirements.txt
utils/test/testapi/run_test.sh [deleted file]
utils/test/testapi/setup.py
utils/test/testapi/test-requirements.txt
utils/test/testapi/tox.ini
utils/test/testapi/update/templates/backup_mongodb.py
utils/test/testapi/update/templates/update_mongodb.py
utils/upload-artifact.sh [new file with mode: 0644]

index 97fea89..f5aab1a 100644 (file)
@@ -56,3 +56,4 @@
     publishers:
         - archive-artifacts:
             artifacts: 'job_output/*'
+        - email-jenkins-admins-on-failure
index a395cf2..26eddb5 100644 (file)
         - 'apex-csit-promote-daily-{stream}'
         - 'apex-fdio-promote-daily-{stream}'
         - 'apex-verify-iso-{stream}'
-        - 'apex-run-deploy-test-baremetal-{stream}'
+        - 'apex-{scenario}-baremetal-{scenario_stream}'
+        - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
         - 'apex-upload-snapshot'
         - 'apex-create-snapshot'
     # stream:    branch with - in place of / (eg. stable-arno)
     # branch:    branch (eg. stable/arno)
     stream:
-        - master:
+        - master: &master
             branch: 'master'
             gs-pathname: ''
             build-slave: 'apex-build-master'
@@ -27,8 +28,9 @@
             baremetal-slave: 'apex-baremetal-master'
             verify-scenario: 'os-odl-nofeature-ha'
             concurrent-builds: 3
+            scenario_stream: 'master'
 
-        - danube:
+        - danube: &danube
             branch: 'stable/danube'
             gs-pathname: '/danube'
             build-slave: 'apex-build-danube'
             baremetal-slave: 'apex-baremetal-danube'
             verify-scenario: 'os-odl_l3-nofeature-ha'
             concurrent-builds: 1
-            disabled: false
+            scenario_stream: 'danube'
+            disabled: true
+
+    scenario:
+        - 'os-nosdn-nofeature-noha':
+              <<: *danube
+        - 'os-nosdn-nofeature-ha':
+              <<: *danube
+        - 'os-nosdn-nofeature-ha-ipv6':
+              <<: *danube
+        - 'os-nosdn-ovs-noha':
+              <<: *danube
+        - 'os-nosdn-ovs-ha':
+              <<: *danube
+        - 'os-nosdn-fdio-noha':
+              <<: *danube
+        - 'os-nosdn-fdio-ha':
+              <<: *danube
+        - 'os-nosdn-kvm-ha':
+              <<: *danube
+        - 'os-nosdn-kvm-noha':
+              <<: *danube
+        - 'os-odl_l2-fdio-noha':
+              <<: *danube
+        - 'os-odl_l2-fdio-ha':
+              <<: *danube
+        - 'os-odl_netvirt-fdio-noha':
+              <<: *danube
+        - 'os-odl_l2-sfc-noha':
+              <<: *danube
+        - 'os-odl_l3-nofeature-noha':
+              <<: *danube
+        - 'os-odl_l3-nofeature-ha':
+              <<: *danube
+        - 'os-odl_l3-ovs-noha':
+              <<: *danube
+        - 'os-odl_l3-ovs-ha':
+              <<: *danube
+        - 'os-odl-bgpvpn-ha':
+              <<: *danube
+        - 'os-odl-gluon-noha':
+              <<: *danube
+        - 'os-odl_l3-fdio-noha':
+              <<: *danube
+        - 'os-odl_l3-fdio-ha':
+              <<: *danube
+        - 'os-odl_l3-fdio_dvr-noha':
+              <<: *danube
+        - 'os-odl_l3-fdio_dvr-ha':
+              <<: *danube
+        - 'os-odl_l3-csit-noha':
+              <<: *danube
+        - 'os-onos-nofeature-ha':
+              <<: *danube
+        - 'os-ovn-nofeature-noha':
+              <<: *danube
+        - 'os-nosdn-nofeature-noha':
+              <<: *master
+        - 'os-nosdn-nofeature-ha':
+              <<: *master
+        - 'os-odl-nofeature-ha':
+              <<: *master
+        - 'os-odl-nofeature-noha':
+              <<: *master
+        - 'os-odl-bgpvpn-ha':
+              <<: *master
+        - 'os-ovn-nofeature-noha':
+              <<: *master
+        - 'os-nosdn-fdio-noha':
+              <<: *master
+        - 'os-nosdn-fdio-ha':
+              <<: *master
+        - 'os-odl-fdio-noha':
+              <<: *master
+        - 'os-odl-fdio-ha':
+              <<: *master
+        - 'os-nosdn-bar-ha':
+              <<: *master
+        - 'os-nosdn-bar-noha':
+              <<: *master
+        - 'os-nosdn-nofeature-ha-ipv6':
+              <<: *master
+        - 'os-nosdn-ovs_dpdk-noha':
+              <<: *master
+        - 'os-nosdn-ovs_dpdk-ha':
+              <<: *master
+        - 'os-nosdn-kvm_ovs_dpdk-noha':
+              <<: *master
+        - 'os-nosdn-kvm_ovs_dpdk-ha':
+              <<: *master
+        - 'os-odl-sfc-noha':
+              <<: *master
+        - 'os-odl-sfc-ha':
+              <<: *master
 
     platform:
          - 'baremetal'
 - job-template:
     name: 'apex-deploy-{platform}-{stream}'
 
-    # Job template for virtual deployment
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
     node: 'apex-{platform}-{stream}'
 
     concurrent: true
             fail: true
 
     parameters:
+        - '{project}-{platform}-{stream}-defaults'
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
                 - 'apex-deploy.*'
                 - 'functest.*'
                 - 'yardstick.*'
+                - 'dovetail.*'
         - throttle:
             max-per-node: 1
             max-total: 10
 
 # Baremetal Deploy and Test
 - job-template:
-    name: 'apex-run-deploy-test-baremetal-{stream}'
+    name: 'apex-{scenario}-baremetal-{scenario_stream}'
 
-    # Job template for daily build
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
     project-type: 'multijob'
 
     disabled: false
 
     parameters:
         - '{project}-defaults'
-        - '{project}-baremetal-{stream}-defaults'
+        - '{project}-baremetal-{scenario_stream}-defaults'
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
             gs-pathname: '{gs-pathname}'
         - string:
             name: DEPLOY_SCENARIO
-            default: '{verify-scenario}'
+            default: '{scenario}'
             description: "Scenario to deploy with."
     properties:
         - logrotate-default
                 - 'apex-runner.*'
                 - 'apex-.*-promote.*'
                 - 'apex-run.*'
+                - 'apex-.+-baremetal-.+'
+        - throttle:
+            max-per-node: 1
+            max-total: 10
+            option: 'project'
     builders:
         - description-setter:
             description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
             name: 'Baremetal Deploy'
             condition: ALWAYS
             projects:
-                - name: 'apex-deploy-baremetal-{stream}'
+                - name: 'apex-deploy-baremetal-{scenario_stream}'
                   node-parameters: true
                   current-parameters: true
                   predefined-parameters: |
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   git-revision: false
+        - multijob:
+            name: 'OPNFV Test Suite'
+            condition: SUCCESSFUL
+            projects:
+                - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+    publishers:
+        - groovy-postbuild:
+            script:
+                !include-raw-escape: ./update-build-result.groovy
+
+# Baremetal test job
+- job-template:
+    name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
+
+    project-type: 'multijob'
+
+    disabled: false
+
+    parameters:
+        - '{project}-defaults'
+        - '{project}-baremetal-{scenario_stream}-defaults'
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - apex-parameter:
+            gs-pathname: '{gs-pathname}'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: '{scenario}'
+            description: "Scenario to deploy with."
+    properties:
+        - logrotate-default
+        - build-blocker:
+            use-build-blocker: true
+            block-level: 'NODE'
+            blocking-jobs:
+                - 'apex-verify.*'
+                - 'apex-runner.*'
+                - 'apex-.*-promote.*'
+                - 'apex-run.*'
+                - 'apex-testsuite-.+-baremetal-.+'
+        - throttle:
+            max-per-node: 1
+            max-total: 10
+            option: 'project'
+    builders:
+        - description-setter:
+            description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
         - multijob:
             name: Functest
             condition: ALWAYS
             projects:
-                - name: 'functest-apex-baremetal-daily-{stream}'
+                - name: 'functest-apex-baremetal-daily-{scenario_stream}'
                   node-parameters: true
                   current-parameters: false
                   predefined-parameters:
             name: Yardstick
             condition: ALWAYS
             projects:
-                - name: 'yardstick-apex-baremetal-daily-{stream}'
+                - name: 'yardstick-apex-baremetal-daily-{scenario_stream}'
                   node-parameters: true
                   current-parameters: false
                   predefined-parameters:
                   kill-phase-on: NEVER
                   abort-all-job: false
                   git-revision: false
+        - multijob:
+            name: Dovetail
+            condition: ALWAYS
+            projects:
+                - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
+                  abort-all-job: false
+                  git-revision: false
+# Build status is always success due conditional plugin prefetching
+# build status before multijob phases execute
+#        - conditional-step:
+#            condition-kind: current-status
+#            condition-worst: SUCCESS
+#            condtion-best: SUCCESS
+#            on-evaluation-failure: mark-unstable
+#            steps:
+#                - shell: 'echo "Tests Passed"'
 
 
 # danube Daily
             condition: SUCCESSFUL
             projects:
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-nofeature-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-nofeature-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-nofeature-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-nofeature-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-nofeature-ha-ipv6
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-ovs-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-ovs-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-ovs-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-ovs-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-fdio-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-fdio-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-fdio-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-fdio-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-kvm-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-kvm-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-kvm-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-kvm-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l2-fdio-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l2-fdio-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l2-fdio-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l2-fdio-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_netvirt-fdio-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l2-sfc-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l2-sfc-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-nofeature-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-nofeature-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-nofeature-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-ovs-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-ovs-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-ovs-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-ovs-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl-bgpvpn-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl-bgpvpn-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl-gluon-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl-gluon-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-fdio-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-fdio-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-fdio-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-fdio-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-fdio_dvr-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-fdio_dvr-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-fdio_dvr-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-fdio_dvr-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-csit-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-csit-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-onos-nofeature-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-onos-nofeature-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-ovn-nofeature-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-ovn-nofeature-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
             condition: SUCCESSFUL
             projects:
 
-                - name: 'apex-run-deploy-test-baremetal-master'
+                - name: 'apex-os-nosdn-nofeature-noha-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-nofeature-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-master'
+                - name: 'apex-os-nosdn-nofeature-ha-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-nofeature-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-master'
+                - name: 'apex-os-odl-nofeature-ha-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl-nofeature-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-master'
+                - name: 'apex-os-odl-nofeature-noha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-odl-bgpvpn-ha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-ovn-nofeature-noha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-nosdn-fdio-noha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-nosdn-fdio-ha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-odl-fdio-noha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-odl-fdio-ha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-nosdn-bar-ha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-nosdn-bar-noha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-nosdn-ovs_dpdk-noha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-nosdn-ovs_dpdk-ha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-nosdn-kvm_ovs_dpdk-noha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-nosdn-kvm_ovs_dpdk-ha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-odl-sfc-noha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-odl-sfc-ha-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl-nofeature-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
 - trigger:
     name: 'apex-master'
     triggers:
-        - timed: '0 3 1 1 7'
+        - timed: '0 12 * * *'
 - trigger:
     name: 'apex-danube'
     triggers:
-        - timed: '0 12 * * *'
+        - timed: '0 3 1 1 7'
index 752cf28..5d7f150 100644 (file)
         - 'apex-csit-promote-daily-{stream}'
         - 'apex-fdio-promote-daily-{stream}'
         - 'apex-verify-iso-{stream}'
-        - 'apex-run-deploy-test-baremetal-{stream}'
+        - 'apex-{scenario}-baremetal-{scenario_stream}'
+        - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
         - 'apex-upload-snapshot'
         - 'apex-create-snapshot'
     # stream:    branch with - in place of / (eg. stable-arno)
     # branch:    branch (eg. stable/arno)
     stream:
-        - master:
+        - master: &master
             branch: 'master'
             gs-pathname: ''
             build-slave: 'apex-build-master'
@@ -27,8 +28,9 @@
             baremetal-slave: 'apex-baremetal-master'
             verify-scenario: 'os-odl-nofeature-ha'
             concurrent-builds: 3
+            scenario_stream: 'master'
 
-        - danube:
+        - danube: &danube
             branch: 'stable/danube'
             gs-pathname: '/danube'
             build-slave: 'apex-build-danube'
             baremetal-slave: 'apex-baremetal-danube'
             verify-scenario: 'os-odl_l3-nofeature-ha'
             concurrent-builds: 1
-            disabled: false
+            scenario_stream: 'danube'
+            disabled: true
+
+    scenario:
+        {%- for stream in scenarios %}
+        {%- for scenario in scenarios[stream] %}
+        - '{{scenario}}':
+              <<: *{{stream}}
+        {%- endfor %}
+        {%- endfor %}
 
     platform:
          - 'baremetal'
 - job-template:
     name: 'apex-deploy-{platform}-{stream}'
 
-    # Job template for virtual deployment
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
     node: 'apex-{platform}-{stream}'
 
     concurrent: true
             fail: true
 
     parameters:
+        - '{project}-{platform}-{stream}-defaults'
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
                 - 'apex-deploy.*'
                 - 'functest.*'
                 - 'yardstick.*'
+                - 'dovetail.*'
         - throttle:
             max-per-node: 1
             max-total: 10
 
 # Baremetal Deploy and Test
 - job-template:
-    name: 'apex-run-deploy-test-baremetal-{stream}'
+    name: 'apex-{scenario}-baremetal-{scenario_stream}'
 
-    # Job template for daily build
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
     project-type: 'multijob'
 
     disabled: false
 
     parameters:
         - '{project}-defaults'
-        - '{project}-baremetal-{stream}-defaults'
+        - '{project}-baremetal-{scenario_stream}-defaults'
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
             gs-pathname: '{gs-pathname}'
         - string:
             name: DEPLOY_SCENARIO
-            default: '{verify-scenario}'
+            default: '{scenario}'
             description: "Scenario to deploy with."
     properties:
         - logrotate-default
                 - 'apex-runner.*'
                 - 'apex-.*-promote.*'
                 - 'apex-run.*'
+                - 'apex-.+-baremetal-.+'
+        - throttle:
+            max-per-node: 1
+            max-total: 10
+            option: 'project'
     builders:
         - description-setter:
             description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
             name: 'Baremetal Deploy'
             condition: ALWAYS
             projects:
-                - name: 'apex-deploy-baremetal-{stream}'
+                - name: 'apex-deploy-baremetal-{scenario_stream}'
                   node-parameters: true
                   current-parameters: true
                   predefined-parameters: |
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   git-revision: false
+        - multijob:
+            name: 'OPNFV Test Suite'
+            condition: SUCCESSFUL
+            projects:
+                - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+    publishers:
+        - groovy-postbuild:
+            script:
+                !include-raw-escape: ./update-build-result.groovy
+
+# Baremetal test job
+- job-template:
+    name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
+
+    project-type: 'multijob'
+
+    disabled: false
+
+    parameters:
+        - '{project}-defaults'
+        - '{project}-baremetal-{scenario_stream}-defaults'
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - apex-parameter:
+            gs-pathname: '{gs-pathname}'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: '{scenario}'
+            description: "Scenario to deploy with."
+    properties:
+        - logrotate-default
+        - build-blocker:
+            use-build-blocker: true
+            block-level: 'NODE'
+            blocking-jobs:
+                - 'apex-verify.*'
+                - 'apex-runner.*'
+                - 'apex-.*-promote.*'
+                - 'apex-run.*'
+                - 'apex-testsuite-.+-baremetal-.+'
+        - throttle:
+            max-per-node: 1
+            max-total: 10
+            option: 'project'
+    builders:
+        - description-setter:
+            description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
         - multijob:
             name: Functest
             condition: ALWAYS
             projects:
-                - name: 'functest-apex-baremetal-daily-{stream}'
+                - name: 'functest-apex-baremetal-daily-{scenario_stream}'
                   node-parameters: true
                   current-parameters: false
                   predefined-parameters:
             name: Yardstick
             condition: ALWAYS
             projects:
-                - name: 'yardstick-apex-baremetal-daily-{stream}'
+                - name: 'yardstick-apex-baremetal-daily-{scenario_stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  abort-all-job: false
+                  git-revision: false
+        - multijob:
+            name: Dovetail
+            condition: ALWAYS
+            projects:
+                - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
                   node-parameters: true
                   current-parameters: false
                   predefined-parameters:
                     DEPLOY_SCENARIO=$DEPLOY_SCENARIO
                   kill-phase-on: NEVER
+                  enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
                   abort-all-job: false
                   git-revision: false
+# Build status is always success due conditional plugin prefetching
+# build status before multijob phases execute
+#        - conditional-step:
+#            condition-kind: current-status
+#            condition-worst: SUCCESS
+#            condtion-best: SUCCESS
+#            on-evaluation-failure: mark-unstable
+#            steps:
+#                - shell: 'echo "Tests Passed"'
 
 {% for stream in scenarios %}
 # {{ stream }} Daily
             condition: SUCCESSFUL
             projects:
 {% for scenario in scenarios[stream] %}
-                - name: 'apex-run-deploy-test-baremetal-{{ stream }}'
+                - name: 'apex-{{ scenario }}-baremetal-{{ stream }}'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO={{scenario}}
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
 - trigger:
     name: 'apex-master'
     triggers:
-        - timed: '0 3 1 1 7'
+        - timed: '0 12 * * *'
 - trigger:
     name: 'apex-danube'
     triggers:
-        - timed: '0 12 * * *'
+        - timed: '0 3 1 1 7'
 
index dc9107a..f49fa83 100644 (file)
@@ -3,6 +3,21 @@ master:
   - 'os-nosdn-nofeature-ha'
   - 'os-odl-nofeature-ha'
   - 'os-odl-nofeature-noha'
+  - 'os-odl-bgpvpn-ha'
+  - 'os-ovn-nofeature-noha'
+  - 'os-nosdn-fdio-noha'
+  - 'os-nosdn-fdio-ha'
+  - 'os-odl-fdio-noha'
+  - 'os-odl-fdio-ha'
+  - 'os-nosdn-bar-ha'
+  - 'os-nosdn-bar-noha'
+  - 'os-nosdn-nofeature-ha-ipv6'
+  - 'os-nosdn-ovs_dpdk-noha'
+  - 'os-nosdn-ovs_dpdk-ha'
+  - 'os-nosdn-kvm_ovs_dpdk-noha'
+  - 'os-nosdn-kvm_ovs_dpdk-ha'
+  - 'os-odl-sfc-noha'
+  - 'os-odl-sfc-ha'
 danube:
   - 'os-nosdn-nofeature-noha'
   - 'os-nosdn-nofeature-ha'
diff --git a/jjb/apex/update-build-result.groovy b/jjb/apex/update-build-result.groovy
new file mode 100644 (file)
index 0000000..9edca6b
--- /dev/null
@@ -0,0 +1,5 @@
+import hudson.model.*
+if (manager.logContains('^.*apex-deploy-baremetal.*SUCCESS$')
+      && manager.build.@result == hudson.model.Result.FAILURE) {
+    manager.build.@result = hudson.model.Result.UNSTABLE
+}
index 55d8ff9..a5d75bd 100644 (file)
         branch: '{stream}'
         gs-pathname: ''
         disabled: false
-    danube: &danube
-        stream: danube
+    euphrates: &euphrates
+        stream: euphrates
         branch: 'stable/{stream}'
         gs-pathname: '/{stream}'
-        disabled: false
+        disabled: true
 #--------------------------------
 # POD, INSTALLER, AND BRANCH MAPPING
 #--------------------------------
 # CI POD's
 #--------------------------------
-#        danube
+#        euphrates
 #--------------------------------
     pod:
         - armband-baremetal:
             slave-label: armband-baremetal
             installer: fuel
-            <<: *danube
+            <<: *euphrates
         - armband-virtual:
             slave-label: armband-virtual
             installer: fuel
-            <<: *danube
+            <<: *euphrates
 #--------------------------------
 #        master
 #--------------------------------
 #--------------------------------
 # NONE-CI POD's
 #--------------------------------
-#        danube
+#        euphrates
 #--------------------------------
         - arm-pod2:
             slave-label: arm-pod2
             installer: fuel
-            <<: *danube
+            <<: *euphrates
         - arm-pod3:
             slave-label: arm-pod3
             installer: fuel
-            <<: *danube
+            <<: *euphrates
         - arm-pod4:
             slave-label: arm-pod4
             installer: fuel
-            <<: *danube
+            <<: *euphrates
         - arm-virtual1:
             slave-label: arm-virtual1
             installer: fuel
-            <<: *danube
+            <<: *euphrates
 #--------------------------------
 #        master
 #--------------------------------
     publishers:
         - email:
             recipients: armband@enea.com
+        - email-jenkins-admins-on-failure
 
 ########################
 # parameter macros
             name: GS_URL
             default: artifacts.opnfv.org/$PROJECT{gs-pathname}
             description: "URL to Google Storage."
+        - string:
+            name: SSH_KEY
+            default: "/tmp/mcp.rsa"
+            description: "Path to private SSH key to access environment nodes. For MCP deployments only."
 
 ########################
 # trigger macros
         - timed: ''
 
 #----------------------------------------------------------------------
-# Enea Armband CI Baremetal Triggers running against danube branch
+# Enea Armband CI Baremetal Triggers running against euphrates branch
 #----------------------------------------------------------------------
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-euphrates-trigger'
     triggers:
-        - timed: '0 0,16 * * 2,4'
+        - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-euphrates-trigger'
     triggers:
-        - timed: '0 0 * * 1,5,7'
+        - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-euphrates-trigger'
     triggers:
-        - timed: '0 16 * * 1,5,7'
+        - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-euphrates-trigger'
     triggers:
-        - timed: '0 8 * * 2,4,6'
+        - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-euphrates-trigger'
     triggers:
-        - timed: '0 8 * * 1,3,5,7'
+        - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-euphrates-trigger'
     triggers:
-        - timed: '0 0 * * 3,6'
+        - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-euphrates-trigger'
     triggers:
-        - timed: '0 16 * * 3,6'
+        - timed: ''
 #---------------------------------------------------------------
 # Enea Armband CI Virtual Triggers running against master branch
 #---------------------------------------------------------------
     triggers:
         - timed: ''
 #--------------------------------------------------------------------
-# Enea Armband CI Virtual Triggers running against danube branch
+# Enea Armband CI Virtual Triggers running against euphrates branch
 #--------------------------------------------------------------------
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-ha-armband-virtual-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-ha-armband-virtual-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-ha-armband-virtual-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-ha-armband-virtual-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-armband-virtual-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-ha-armband-virtual-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-ha-armband-virtual-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-ha-armband-virtual-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-euphrates-trigger'
     triggers:
         - timed: ''
 
 #--------------------------------------------------------------------
-# Enea Armband Non CI Virtual Triggers running against danube branch
+# Enea Armband Non CI Virtual Triggers running against euphrates branch
 #--------------------------------------------------------------------
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual1-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual1-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-arm-virtual1-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-arm-virtual1-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual1-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual1-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual1-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual1-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual1-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual1-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-ha-arm-virtual1-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-ha-arm-virtual1-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-noha-arm-virtual1-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-noha-arm-virtual1-euphrates-trigger'
     triggers:
         - timed: ''
 
     triggers:
         - timed: ''
 #---------------------------------------------------------------
-# Enea Armband POD 2 Triggers running against danube branch
+# Enea Armband POD 2 Triggers running against euphrates branch
 #---------------------------------------------------------------
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-ha-arm-pod2-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-ha-arm-pod2-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-arm-pod2-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-arm-pod2-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-ha-arm-pod2-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-ha-arm-pod2-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod2-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod2-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-ha-arm-pod2-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-ha-arm-pod2-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-noha-arm-pod2-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-noha-arm-pod2-euphrates-trigger'
     triggers:
         - timed: ''
 #----------------------------------------------------------
     triggers:
         - timed: ''
 #---------------------------------------------------------------
-# Enea Armband POD 3 Triggers running against danube branch
+# Enea Armband POD 3 Triggers running against euphrates branch
 #---------------------------------------------------------------
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-euphrates-trigger'
     triggers:
         - timed: ''
 #--------------------------------------------------------------------------
     triggers:
         - timed: ''
 #--------------------------------------------------------------------------
-# Enea Armband POD 3 Triggers running against danube branch (aarch64 slave)
+# Enea Armband POD 3 Triggers running against euphrates branch (aarch64 slave)
 #--------------------------------------------------------------------------
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-ha-arm-pod4-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-ha-arm-pod4-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-arm-pod4-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-arm-pod4-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-ha-arm-pod4-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-ha-arm-pod4-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod4-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod4-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-noha-arm-pod4-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-noha-arm-pod4-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-ha-arm-pod4-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-ha-arm-pod4-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-noha-arm-pod4-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-noha-arm-pod4-euphrates-trigger'
     triggers:
         - timed: ''
index e445e08..08d3233 100755 (executable)
@@ -2,7 +2,7 @@
 # SPDX-license-identifier: Apache-2.0
 ##############################################################################
 # Copyright (c) 2016 Ericsson AB and others.
-#           (c) 2016 Enea Software AB
+#           (c) 2017 Enea Software AB
 # All rights reserved. This program and the accompanying materials
 # are made available under the terms of the Apache License, Version 2.0
 # which accompanies this distribution, and is available at
@@ -13,16 +13,18 @@ set -o pipefail
 
 export TERM="vt220"
 
-# source the file so we get OPNFV vars
-source latest.properties
+if [[ "$BRANCH" != 'master' ]]; then
+    # source the file so we get OPNFV vars
+    source latest.properties
 
-# echo the info about artifact that is used during the deployment
-echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+    # echo the info about artifact that is used during the deployment
+    echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+fi
 
 if [[ "$JOB_NAME" =~ "merge" ]]; then
     # set simplest scenario for virtual deploys to run for merges
     DEPLOY_SCENARIO="os-nosdn-nofeature-ha"
-else
+elif [[ "$BRANCH" != 'master' ]]; then
     # for none-merge deployments
     # checkout the commit that was used for building the downloaded artifact
     # to make sure the ISO and deployment mechanism uses same versions
@@ -68,10 +70,6 @@ if [[ $LAB_CONFIG_URL =~ ^(git|ssh):// ]]; then
     fi
 fi
 
-if [[ "$NODE_NAME" =~ "virtual" ]]; then
-    POD_NAME="virtual_kvm"
-fi
-
 # releng wants us to use nothing else but opnfv.iso for now. We comply.
 ISO_FILE=$WORKSPACE/opnfv.iso
 
@@ -93,7 +91,7 @@ fi
 # construct the command
 DEPLOY_COMMAND="$WORKSPACE/ci/deploy.sh -b ${LAB_CONFIG_URL} \
     -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://${ISO_FILE} \
-    -H -B ${DEFAULT_BRIDGE:-pxebr} -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME \
+    -B ${DEFAULT_BRIDGE:-,,,} -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME \
     ${DEPLOY_CACHE}"
 
 # log info to console
@@ -102,7 +100,7 @@ echo "--------------------------------------------------------"
 echo "Scenario: $DEPLOY_SCENARIO"
 echo "Lab: $LAB_NAME"
 echo "POD: $POD_NAME"
-echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
+[[ "$BRANCH" != 'master' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
 echo
 echo "Starting the deployment using $INSTALLER_TYPE. This could take some time..."
 echo "--------------------------------------------------------"
index e2dd097..4f83305 100755 (executable)
@@ -2,6 +2,7 @@
 # SPDX-license-identifier: Apache-2.0
 ##############################################################################
 # Copyright (c) 2016 Ericsson AB and others.
+#           (c) 2017 Enea AB
 # All rights reserved. This program and the accompanying materials
 # are made available under the terms of the Apache License, Version 2.0
 # which accompanies this distribution, and is available at
@@ -10,6 +11,9 @@
 set -o errexit
 set -o pipefail
 
+# disable Fuel ISO download for master branch
+[[ "$BRANCH" == 'master' ]] && exit 0
+
 echo "Host info: $(hostname) $(hostname -I)"
 
 # Configurable environment variables:
index f6840a0..0623b55 100644 (file)
             branch: '{stream}'
             gs-pathname: ''
             disabled: false
-        - danube:
+        - euphrates:
             branch: 'stable/{stream}'
             gs-pathname: '/{stream}'
-            disabled: false
+            disabled: true
 
 - job-template:
     name: 'armband-{installer}-build-daily-{stream}'
@@ -67,6 +67,7 @@
     publishers:
         - email:
             recipients: armband@enea.com
+        - email-jenkins-admins-on-failure
 
 ########################
 # parameter macros
index 567456d..c9476b1 100644 (file)
             branch: '{stream}'
             gs-pathname: ''
             disabled: false
-        - danube:
+        - euphrates:
             branch: 'stable/{stream}'
             gs-pathname: '/{stream}'
-            disabled: false
+            disabled: true
 #####################################
 # patch verification phases
 #####################################
index a71cf11..29c01bb 100755 (executable)
@@ -2,12 +2,21 @@
 # SPDX-license-identifier: Apache-2.0
 ##############################################################################
 # Copyright (c) 2016 Ericsson AB and others.
-# Copyright (c) 2016 Enea AB.
+# Copyright (c) 2017 Enea AB.
 # All rights reserved. This program and the accompanying materials
 # are made available under the terms of the Apache License, Version 2.0
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+
+# disable Armband iso build for master branch
+if [[ "$BRANCH" == 'master' ]]; then
+    touch $WORKSPACE/.noupload
+    echo "--------------------------------------------------------"
+    echo "Done!"
+    exit 0
+fi
+
 set -o errexit
 set -o nounset
 set -o pipefail
index c56ca19..71601cf 100644 (file)
     pod:
 #compass CI PODs
         - baremetal:
-            slave-label: compass-baremetal
+            slave-label: compass-baremetal-master
             installer: compass
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
         - virtual:
-            slave-label: compass-virtual
+            slave-label: compass-virtual-master
             installer: compass
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
         - baremetal:
-            slave-label: compass-baremetal
+            slave-label: compass-baremetal-branch
             installer: compass
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *danube
         - virtual:
-            slave-label: compass-virtual
+            slave-label: compass-virtual-branch
             installer: compass
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *danube
@@ -70,8 +70,6 @@
        #     <<: *master
 #--------------------------------------------
     suite:
-        - 'rubbos'
-        - 'vstf'
         - 'posca_stress_traffic'
         - 'posca_stress_ping'
 
 
     publishers:
         - email:
-            recipients: hongbo.tianhongbo@huawei.com matthew.lijun@huawei.com liangqi1@huawei.com sunshine.wang@huawei.com
+            recipients: gabriel.yuyang@huawei.com, liyin11@huawei.com
+        - email-jenkins-admins-on-failure
 
 ########################
 # builder macros
 ####################
 
 - parameter:
-    name: 'bottlenecks-params-compass-baremetal'
+    name: 'bottlenecks-params-compass-baremetal-master'
     parameters:
         - string:
             name: BOTTLENECKS_DB_TARGET
-            default: '104.197.68.199:8086'
+            default: 'http://testresults.opnfv.org/test/api/v1/results'
             description: 'Arguments to use in order to choose the backend DB'
 
 - parameter:
-    name: 'bottlenecks-params-compass-virtual'
+    name: 'bottlenecks-params-compass-virtual-master'
     parameters:
         - string:
             name: BOTTLENECKS_DB_TARGET
-            default: ''
+            default: 'http://testresults.opnfv.org/test/api/v1/results'
             description: 'Arguments to use in order to choose the backend DB'
 
 - parameter:
-    name: 'bottlenecks-params-orange-pod2'
+    name: 'bottlenecks-params-compass-baremetal-branch'
     parameters:
         - string:
             name: BOTTLENECKS_DB_TARGET
-            default: '104.197.68.199:8086'
+            default: 'http://testresults.opnfv.org/test/api/v1/results'
+            description: 'Arguments to use in order to choose the backend DB'
+
+- parameter:
+    name: 'bottlenecks-params-compass-virtual-branch'
+    parameters:
+        - string:
+            name: BOTTLENECKS_DB_TARGET
+            default: 'http://testresults.opnfv.org/test/api/v1/results'
             description: 'Arguments to use in order to choose the backend DB'
index 04e620c..d0e2088 100644 (file)
@@ -10,6 +10,7 @@
 
 #clean up correlated dockers and their images
 bash $WORKSPACE/docker/docker_cleanup.sh -d bottlenecks --debug
+bash $WORKSPACE/docker/docker_cleanup.sh -d Bottlenecks --debug
 bash $WORKSPACE/docker/docker_cleanup.sh -d yardstick --debug
 bash $WORKSPACE/docker/docker_cleanup.sh -d kibana --debug
 bash $WORKSPACE/docker/docker_cleanup.sh -d elasticsearch --debug
index e6f8d1b..6bab0e4 100644 (file)
@@ -1,66 +1,79 @@
 #!/bin/bash
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
 #set -e
 [[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 BOTTLENECKS_IMAGE=opnfv/bottlenecks
 REPORT="True"
 
-if [[ $SUITE_NAME == rubbos || $SUITE_NAME == vstf ]]; then
-    echo "Bottlenecks: to pull image $BOTTLENECKS_IMAGE:${DOCKER_TAG}"
-    docker pull $BOTTLENECKS_IMAGE:$DOCKER_TAG >${redirect}
+RELENG_REPO=${WORKSPACE}/releng
+[ -d ${RELENG_REPO} ] && rm -rf ${RELENG_REPO}
+git clone https://gerrit.opnfv.org/gerrit/releng ${RELENG_REPO} >${redirect}
 
-    echo "Bottlenecks: docker start running"
-    opts="--privileged=true -id"
-    envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
-          -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
-          -e BOTTLENECKS_BRANCH=${BOTTLENECKS_BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
-          -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL}"
-    cmd="sudo docker run ${opts} ${envs} $BOTTLENECKS_IMAGE:${DOCKER_TAG} /bin/bash"
-    echo "Bottlenecks: docker cmd running ${cmd}"
-    ${cmd} >${redirect}
+OPENRC=/tmp/admin_rc.sh
+OS_CACERT=/tmp/os_cacert
+
+if [[ $SUITE_NAME == *posca* ]]; then
+    POSCA_SCRIPT=/home/opnfv/bottlenecks/testsuites/posca
+
+    echo "BOTTLENECKS INFO: fetching os credentials from $INSTALLER_TYPE"
+    if [[ $INSTALLER_TYPE == 'compass' ]]; then
+        if [[ ${BRANCH} == 'master' ]]; then
+            ${RELENG_REPO}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} -o ${OS_CACERT} >${redirect}
+            if [[ -f ${OS_CACERT} ]]; then
+                echo "BOTTLENECKS INFO: successfully fetching os_cacert for openstack: ${OS_CACERT}"
+            else
+                echo "BOTTLENECKS ERROR: couldn't find os_cacert file: ${OS_CACERT}, please check if the it's been properly provided."
+                exit 1
+            fi
+        else
+            ${RELENG_REPO}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP}  >${redirect}
+        fi
+    fi
 
-    echo "Bottlenecks: obtain docker id"
-    container_id=$(docker ps | grep "$BOTTLENECKS_IMAGE:${DOCKER_TAG}" | awk '{print $1}' | head -1)
-    if [ -z ${container_id} ]; then
-        echo "Cannot find $BOTTLENECKS_IMAGE container ID ${container_id}. Please check if it exists."
-        docker ps -a
+    if [[ -f ${OPENRC} ]]; then
+        echo "BOTTLENECKS INFO: openstack credentials path is ${OPENRC}"
+        if [[ $INSTALLER_TYPE == 'compass' && ${BRANCH} == 'master' ]]; then
+            echo "BOTTLENECKS INFO: writing ${OS_CACERT} to ${OPENRC}"
+            echo "export OS_CACERT=${OS_CACERT}" >> ${OPENRC}
+        fi
+        cat ${OPENRC}
+    else
+        echo "BOTTLENECKS ERROR: couldn't find openstack rc file: ${OPENRC}, please check if the it's been properly provided."
         exit 1
     fi
 
-    echo "Bottlenecks: to prepare openstack environment"
-    prepare_env="${REPO_DIR}/ci/prepare_env.sh"
-    echo "Bottlenecks: docker cmd running: ${prepare_env}"
-    sudo docker exec ${container_id} ${prepare_env}
+    echo "INFO: pulling Bottlenecks docker ${DOCKER_TAG}"
+    docker pull opnfv/bottlenecks:${DOCKER_TAG} >$redirect
 
-    echo "Bottlenecks: to run testsuite ${SUITE_NAME}"
-    run_testsuite="${REPO_DIR}/run_tests.sh -s ${SUITE_NAME}"
-    echo "Bottlenecks: docker cmd running: ${run_testsuite}"
-    sudo docker exec ${container_id} ${run_testsuite}
-else
-    echo "Bottlenecks: installing POSCA docker-compose"
-    if [ -d usr/local/bin/docker-compose ]; then
-        rm -rf usr/local/bin/docker-compose
-    fi
-    curl -L https://github.com/docker/compose/releases/download/1.11.0/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
-    chmod +x /usr/local/bin/docker-compose
+    opts="--privileged=true -id"
+    envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
+          -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
+          -e BRANCH=${BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
+          -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL} \
+          -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}"
+    docker_volume="-v /var/run/docker.sock:/var/run/docker.sock -v /tmp:/tmp"
 
-    echo "Bottlenecks: composing up dockers"
-    cd $WORKSPACE
-    docker-compose -f $WORKSPACE/docker/bottleneck-compose/docker-compose.yml up -d
+    cmd="docker run ${opts} ${envs} --name bottlenecks-load-master ${docker_volume} opnfv/bottlenecks:${DOCKER_TAG} /bin/bash"
+    echo "BOTTLENECKS INFO: running docker run commond: ${cmd}"
+    ${cmd} >$redirect
+    sleep 5
 
-    echo "Bottlenecks: running traffic stress/factor testing in posca testsuite "
-    POSCA_SCRIPT=/home/opnfv/bottlenecks/testsuites/posca
     if [[ $SUITE_NAME == posca_stress_traffic ]]; then
         TEST_CASE=posca_factor_system_bandwidth
-        echo "Bottlenecks: pulling tutum/influxdb for yardstick"
-        docker pull tutum/influxdb:0.13
-        sleep 5
-        docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE $REPORT
+        testcase_cmd="docker exec bottlenecks-load-master python ${POSCA_SCRIPT}/../run_testsuite.py testcase $TEST_CASE $REPORT"
+        echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
+        ${testcase_cmd} >$redirect
     elif [[ $SUITE_NAME == posca_stress_ping ]]; then
         TEST_CASE=posca_factor_ping
-        sleep 5
-        docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE $REPORT
+        testcase_cmd="docker exec bottlenecks-load-master python ${POSCA_SCRIPT}/../run_testsuite.py testcase $TEST_CASE $REPORT"
+        echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
+        ${testcase_cmd} >$redirect
     fi
-
-    echo "Bottlenecks: cleaning up docker-compose images and dockers"
-    docker-compose -f $WORKSPACE/docker/bottleneck-compose/docker-compose.yml down --rmi all
-fi
\ No newline at end of file
+fi
index fc3018f..00a78ce 100644 (file)
@@ -12,14 +12,14 @@ if [[ -e securityaudit.log ]] ; then
     if grep ERROR securityaudit.log; then
         EXITSTATUS=1
     fi
-    
-    cat securityaudit.log  | awk -F"ERROR - " '{print $2}' > shortlog
-    
+
+    cat securityaudit.log  | awk -F"ERROR - " '{print $2}' | sed -e "s/\"/\\\\\"/g;s/\'/\\\\\'/g"> shortlog
+
     ssh -p 29418 gerrit.opnfv.org \
         "gerrit review -p $GERRIT_PROJECT \
         -m \"$(cat shortlog)\" \
         $GERRIT_PATCHSET_REVISION \
         --notify NONE"
-    
+
     exit $EXITSTATUS
 fi
index 436a173..1190963 100644 (file)
@@ -15,7 +15,7 @@ source $WORKSPACE/opnfv-projects.sh
 for project in "${PROJECT_LIST[@]}"
 
 do
-  cmd="anteater --project testproj --path /home/opnfv/anteater/allrepos/$project"
+  cmd="/home/opnfv/venv/bin/anteater --project testproj --path /home/opnfv/anteater/allrepos/$project"
   echo "Executing command inside container"
   echo "$cmd"
   echo "--------------------------------------------------------"
index 9bd3cc3..35f9354 100644 (file)
@@ -1,5 +1,7 @@
 #!/bin/bash
 cd $WORKSPACE
+REPORTDIR='.reports'
+mkdir -p $REPORTDIR
 echo "Generating patchset file to list changed files"
 git diff HEAD^1 --name-only | sed "s#^#/home/opnfv/anteater/$PROJECT/#" > $WORKSPACE/patchset
 echo "Changed files are"
@@ -7,7 +9,7 @@ echo "--------------------------------------------------------"
 cat $WORKSPACE/patchset
 echo "--------------------------------------------------------"
 
-vols="-v $WORKSPACE:/home/opnfv/anteater/$PROJECT"
+vols="-v $WORKSPACE:/home/opnfv/anteater/$PROJECT -v $WORKSPACE/$REPORTDIR:/home/opnfv/anteater/$REPORTDIR"
 envs="-e PROJECT=$PROJECT"
 
 echo "Pulling releng-anteater docker image"
index e2f6ceb..55d629c 100644 (file)
@@ -77,7 +77,7 @@
                     comment-contains-value: 'reverify'
             projects:
               - project-compare-type: 'REG_EXP'
-                project-pattern: 'apex|armband|bamboo|barometer|bottlenecks|calipso|compass4nfv|conductor|cooper|functest|octopus|pharos|releng|sandbox'
+                project-pattern: 'apex|armband|bamboo|barometer|bottlenecks|calipso|compass4nfv|conductor|cooper|cperf|daisy|doctor|dovetail|dpacc|enfv|escalator|fds|functest|octopus|pharos|releng|sandbox|yardstick'
                 branches:
                   - branch-compare-type: 'ANT'
                     branch-pattern: '**/{branch}'
     builders:
         - anteater-security-audit
         - report-security-audit-result-to-gerrit
+    publishers:
+      - archive-artifacts:
+          artifacts: ".reports/*"
+
 ########################
 # builder macros
 ########################
index 09ef441..4c12f19 100644 (file)
         stream: master
         branch: '{stream}'
         gs-pathname: ''
+        ppa-pathname: '/{stream}'
         disabled: false
         openstack-version: ocata
     danube: &danube
         stream: danube
         branch: 'stable/{stream}'
         gs-pathname: '/{stream}'
+        ppa-pathname: '/{stream}'
         disabled: false
         openstack-version: newton
 #--------------------------------
         - 'os-nosdn-openo-ha':
             disabled: false
             auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+        - 'os-odl-sfc-ha':
+            disabled: false
+            auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+        - 'os-nosdn-dpdk-ha':
+            disabled: false
+            auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+        - 'k8-nosdn-nofeature-ha':
+            disabled: false
+            auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
 
 
     jobs:
         - compass-ci-parameter:
             installer: '{installer}'
             gs-pathname: '{gs-pathname}'
+            ppa-pathname: '{ppa-pathname}'
         - string:
             name: DEPLOY_SCENARIO
             default: '{scenario}'
                   use-build-time: true
             steps:
                 - trigger-builds:
-                    - project: 'dovetail-compass-{pod}-proposed_tests-master'
+                    - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
                       current-parameters: false
-                      predefined-parameters:
+                      predefined-parameters: |
+                        DOCKER_TAG=latest
                         DEPLOY_SCENARIO={scenario}
                       block: true
                       same-node: true
                   use-build-time: true
             steps:
                 - trigger-builds:
-                    - project: 'dovetail-compass-{pod}-proposed_tests-danube'
+                    - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
                       current-parameters: false
                       predefined-parameters:
                         DEPLOY_SCENARIO={scenario}
                   label: '{stream}'
             steps:
                 - trigger-builds:
-                    - project: 'dovetail-compass-{pod}-proposed_tests-master'
+                    - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
+                      current-parameters: false
+                      predefined-parameters:
+                        DEPLOY_SCENARIO={scenario}
+                      block: true
+                      same-node: true
+                      block-thresholds:
+                        build-step-failure-threshold: 'never'
+                        failure-threshold: 'never'
+                        unstable-threshold: 'FAILURE'
 
 - job-template:
     name: 'compass-deploy-{pod}-daily-{stream}'
         - compass-ci-parameter:
             installer: '{installer}'
             gs-pathname: '{gs-pathname}'
+            ppa-pathname: '{ppa-pathname}'
         - '{slave-label}-defaults'
         - '{installer}-defaults'
 
     builders:
         - description-setter:
             description: "POD: $NODE_NAME"
-        - shell:
-            !include-raw-escape: ./compass-download-artifact.sh
-        - shell:
-            !include-raw-escape: ./compass-deploy.sh
+        - conditional-step:
+            condition-kind: regex-match
+            regex: master
+            label: '{stream}'
+            steps:
+                - shell:
+                    !include-raw-escape: ./compass-build.sh
+                - shell:
+                    !include-raw-escape: ./compass-deploy.sh
+        - conditional-step:
+            condition-kind: regex-match
+            regex: danube
+            label: '{stream}'
+            steps:
+                - shell:
+                    !include-raw-escape: ./compass-download-artifact.sh
+                - shell:
+                    !include-raw-escape: ./compass-deploy.sh
+
 
 ########################
 # parameter macros
             name: GS_URL
             default: '$GS_BASE{gs-pathname}'
             description: "URL to Google Storage."
+        - string:
+            name: CACHE_DIRECTORY
+            default: "$HOME/opnfv/cache/$PROJECT{gs-pathname}"
+            description: "Directory where the cache to be used during the build is located."
+        - string:
+            name: PPA_REPO
+            default: "http://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
+        - string:
+            name: PPA_CACHE
+            default: "$WORKSPACE/work/repo/"
 
 ########################
 # trigger macros
 - trigger:
     name: 'compass-os-odl_l2-nofeature-ha-baremetal-centos-master-trigger'
     triggers:
-        - timed: '0 23 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-odl_l3-nofeature-ha-baremetal-centos-master-trigger'
     triggers:
     name: 'compass-os-nosdn-kvm-ha-baremetal-centos-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-dpdk-ha-baremetal-centos-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl-sfc-ha-baremetal-centos-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-k8-nosdn-nofeature-ha-baremetal-centos-master-trigger'
+    triggers:
+        - timed: ''
 
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-baremetal-master-trigger'
 - trigger:
     name: 'compass-os-odl_l2-nofeature-ha-baremetal-master-trigger'
     triggers:
-        - timed: '0 22 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-odl_l3-nofeature-ha-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-baremetal-master-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 12 * * *'
 - trigger:
     name: 'compass-os-nosdn-kvm-ha-baremetal-master-trigger'
+    triggers:
+        - timed: '0 14 * * *'
+- trigger:
+    name: 'compass-os-nosdn-dpdk-ha-baremetal-master-trigger'
+    triggers:
+        - timed: '0 16 * * *'
+- trigger:
+    name: 'compass-k8-nosdn-nofeature-ha-baremetal-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'compass-os-odl-sfc-ha-baremetal-master-trigger'
+    triggers:
+        - timed: '0 4 * * *'
+
 
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-baremetal-danube-trigger'
     name: 'compass-os-nosdn-kvm-ha-baremetal-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-dpdk-ha-baremetal-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-k8-nosdn-nofeature-ha-baremetal-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl-sfc-ha-baremetal-danube-trigger'
+    triggers:
+        - timed: ''
+
 
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-virtual-master-trigger'
 - trigger:
     name: 'compass-os-odl_l2-nofeature-ha-virtual-master-trigger'
     triggers:
-        - timed: '0 20 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-odl_l3-nofeature-ha-virtual-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-virtual-master-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 22 * * *'
 - trigger:
     name: 'compass-os-nosdn-kvm-ha-virtual-master-trigger'
+    triggers:
+        - timed: '0 23 * * *'
+- trigger:
+    name: 'compass-os-nosdn-dpdk-ha-virtual-master-trigger'
+    triggers:
+        - timed: '0 17 * * *'
+- trigger:
+    name: 'compass-k8-nosdn-nofeature-ha-virtual-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'compass-os-odl-sfc-ha-virtual-master-trigger'
+    triggers:
+        - timed: '0 16 * * *'
 
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-virtual-danube-trigger'
     name: 'compass-os-nosdn-kvm-ha-virtual-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-dpdk-ha-virtual-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl-sfc-ha-virtual-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-k8-nosdn-nofeature-ha-virtual-danube-trigger'
+    triggers:
+        - timed: ''
index 2668ccd..7a5af5f 100644 (file)
@@ -6,24 +6,23 @@ echo "Starting the deployment on baremetal environment using $INSTALLER_TYPE. Th
 echo "--------------------------------------------------------"
 echo
 
-# source the properties file so we get OPNFV vars
-source $BUILD_DIRECTORY/latest.properties
-
-# echo the info about artifact that is used during the deployment
-echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
-
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
-    # for none-merge deployments
-    # checkout the commit that was used for building the downloaded artifact
-    # to make sure the ISO and deployment mechanism uses same versions
-    echo "Checking out $OPNFV_GIT_SHA1"
-    git checkout $OPNFV_GIT_SHA1 --quiet
-fi
-
 echo 1 > /proc/sys/vm/drop_caches
 
 export CONFDIR=$WORKSPACE/deploy/conf
 if [[ "$BRANCH" = 'stable/danube' ]]; then
+    # source the properties file so we get OPNFV vars
+    source $BUILD_DIRECTORY/latest.properties
+    # echo the info about artifact that is used during the deployment
+    echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+
+    if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
+        # for none-merge deployments
+        # checkout the commit that was used for building the downloaded artifact
+        # to make sure the ISO and deployment mechanism uses same versions
+        echo "Checking out $OPNFV_GIT_SHA1"
+        git checkout $OPNFV_GIT_SHA1 --quiet
+    fi
+
     export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
 else
     export ISO_URL=file://$BUILD_DIRECTORY/compass.tar.gz
@@ -40,6 +39,8 @@ elif [[ "${DEPLOY_SCENARIO}" =~ "-onos" ]]; then
     export NETWORK_CONF_FILE=network_onos.yml
 elif [[ "${DEPLOY_SCENARIO}" =~ "-openo" ]]; then
     export NETWORK_CONF_FILE=network_openo.yml
+elif [[ "${DEPLOY_SCENARIO}" =~ "-dpdk" ]]; then
+    export NETWORK_CONF_FILE=network_dpdk.yml
 else
     export NETWORK_CONF_FILE=network.yml
 fi
index e43f976..ee91e02 100644 (file)
@@ -29,7 +29,7 @@
             os-version: 'xenial'
             openstack-os-version: ''
         - 'centos7':
-            disabled: false
+            disabled: true
             os-version: 'centos7'
             openstack-os-version: ''
 #####################################
             condition: SUCCESSFUL
             projects:
                 - name: 'functest-compass-virtual-suite-{stream}'
-                  current-parameters: true
-                  predefined-parameters:
+                  current-parameters: false
+                  predefined-parameters: |
                     FUNCTEST_SUITE_NAME=healthcheck
+                    DEPLOY_SCENARIO=os-nosdn-nofeature-ha
                   node-parameters: true
                   kill-phase-on: NEVER
                   abort-all-job: true
                 - name: 'functest-compass-virtual-suite-{stream}'
-                  current-parameters: true
-                  predefined-parameters:
+                  current-parameters: false
+                  predefined-parameters: |
                     FUNCTEST_SUITE_NAME=vping_ssh
+                    DEPLOY_SCENARIO=os-nosdn-nofeature-ha
                   node-parameters: true
                   kill-phase-on: NEVER
                   abort-all-job: true
index 592e54d..6524d20 100644 (file)
         - 'os-nosdn-nofeature-noha':
             auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
         # ODL_L3 scenarios
-        - 'os-odl_l3-nofeature-noha':
+        - 'os-odl_l3-nofeature-ha':
             auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
         # ODL_L2 scenarios
-        - 'os-odl_l2-nofeature-noha':
+        - 'os-odl_l2-nofeature-ha':
             auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
 
     jobs:
             installer: '{installer}'
         - string:
             name: DEPLOY_SCENARIO
-            default: 'os-nosdn-nofeature-noha'
+            default: 'os-nosdn-nofeature-ha'
         - 'daisy-project-parameter':
             gs-pathname: '{gs-pathname}'
         - string:
 - trigger:
     name: 'daisy-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 12 * * *'
 # NOHA Scenarios
 - trigger:
     name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
     triggers:
-        - timed: 'H 12 * * *'
+        - timed: ''
 # ODL_L3 Scenarios
 - trigger:
-    name: 'daisy-os-odl_l3-nofeature-noha-baremetal-daily-master-trigger'
+    name: 'daisy-os-odl_l3-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: 'H 16 * * *'
+        - timed: '0 16 * * *'
 # ODL_L2 Scenarios
 - trigger:
-    name: 'daisy-os-odl_l2-nofeature-noha-baremetal-daily-master-trigger'
+    name: 'daisy-os-odl_l2-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
         - timed: ''
 #-----------------------------------------------
 - trigger:
     name: 'daisy-os-nosdn-nofeature-ha-virtual-daily-master-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 12 * * *'
 # NOHA Scenarios
 - trigger:
     name: 'daisy-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: 'H 12 * * *'
+        - timed: ''
 # ODL_L3 Scenarios
 - trigger:
-    name: 'daisy-os-odl_l3-nofeature-noha-virtual-daily-master-trigger'
+    name: 'daisy-os-odl_l3-nofeature-ha-virtual-daily-master-trigger'
     triggers:
-        - timed: 'H 16 * * *'
+        - timed: '0 16 * * *'
 # ODL_L3 Scenarios
 - trigger:
-    name: 'daisy-os-odl_l2-nofeature-noha-virtual-daily-master-trigger'
+    name: 'daisy-os-odl_l2-nofeature-ha-virtual-daily-master-trigger'
     triggers:
         - timed: ''
 
index 57e44e3..fd0da79 100644 (file)
 
     publishers:
         - '{installer}-recipients'
+        - email-jenkins-admins-on-failure
 
 - job-template:
     name: '{installer}-{phase}-daily-{stream}'
             description: 'Git URL to use on this Jenkins Slave'
         - string:
             name: DEPLOY_SCENARIO
-            default: 'os-nosdn-nofeature-noha'
+            default: 'os-nosdn-nofeature-ha'
         - '{installer}-project-parameter':
             gs-pathname: '{gs-pathname}'
 
     publishers:
         - email:
             recipients: hu.zhijiang@zte.com.cn lu.yao135@zte.com.cn zhou.ya@zte.com.cn yangyang1@zte.com.cn julienjut@gmail.com
+        - email-jenkins-admins-on-failure
 
 - parameter:
     name: 'daisy-project-parameter'
index c5454c7..afde9ef 100644 (file)
         - '{auto-trigger-name}':
             project: '{project}'
             branch: '{branch}'
+            files: 'tests/**'
 
     builders:
         - shell: "[ -e tests/run.sh ] && bash -n ./tests/run.sh"
         - '{auto-trigger-name}':
             project: '{project}'
             branch: '{branch}'
+            files: 'tests/**'
 
     builders:
         - 'clean-workspace-log'
             #       so this symbolic link should not be in 'tests/'. Otherwise,
             #       we'll have the same log twice in jenkins console log.
             ln -sfn $HOME/opnfv/functest/results/{stream} functest_results
+            # NOTE: Get functest script in $WORKSPACE. This functest script is
+            #       needed to perform VM image download in set-functest-env.sh
+            #       from E release cycle.
+            mkdir -p functest/ci
+            wget https://git.opnfv.org/functest/plain/functest/ci/download_images.sh -O functest/ci/download_images.sh
         - 'functest-suite-builder'
         - shell: |
-            functest_log="$HOME/opnfv/functest/results/{stream}/{project}.log"
+            functest_log="$HOME/opnfv/functest/results/{stream}/$FUNCTEST_SUITE_NAME.log"
             # NOTE: checking the test result, as the previous job could return
             #       0 regardless the result of doctor test scenario.
             grep -e ' OK$' $functest_log || exit 1
         - archive:
             artifacts: 'tests/*.log'
         - archive:
-            artifacts: 'functest_results/{project}.log'
+            artifacts: 'functest_results/$FUNCTEST_SUITE_NAME.log'
+        - email-jenkins-admins-on-failure
 
 
 #####################################
                     branch-pattern: '**/{branch}'
                 file-paths:
                   - compare-type: ANT
-                    pattern: 'tests/**'
+                    pattern: '{files}'
             skip-vote:
                 successful: true
                 failed: true
index 43978f6..3f130c9 100644 (file)
@@ -25,7 +25,7 @@
         branch: 'stable/{stream}'
         dovetail-branch: master
         gs-pathname: '/{stream}'
-        docker-tag: 'cvp.0.2.0'
+        docker-tag: 'cvp.0.4.0'
 
 #-----------------------------------
 # POD, PLATFORM, AND BRANCH MAPPING
 # that have not been switched using labels for slaves
 #--------------------------------
 #apex PODs
-        - lf-pod1:
-            slave-label: '{pod}'
+        - virtual:
+            slave-label: apex-virtual-master
             SUT: apex
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
-        - lf-pod1:
-            slave-label: '{pod}'
+        - baremetal:
+            slave-label: apex-baremetal-master
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - virtual:
+            slave-label: apex-virtual-danube
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *danube
+        - baremetal:
+            slave-label: apex-baremetal-danube
             SUT: apex
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *danube
             <<: *danube
 #--------------------------------
     testsuite:
-        - 'debug'
         - 'compliance_set'
         - 'proposed_tests'
 
             artifacts: 'results/**/*'
             allow-empty: true
             fingerprint: true
+        - email-jenkins-admins-on-failure
 
 #--------------------------
 # builder macros
index 85bc54d..a078c8f 100755 (executable)
@@ -69,6 +69,8 @@ else
     exit 1
 fi
 
+set +e
+
 sudo pip install virtualenv
 
 cd ${releng_repo}/modules
@@ -81,6 +83,8 @@ if [[ ${INSTALLER_TYPE} == compass ]]; then
     options="-u root -p root"
 elif [[ ${INSTALLER_TYPE} == fuel ]]; then
     options="-u root -p r00tme"
+elif [[ ${INSTALLER_TYPE} == apex ]]; then
+    options="-u stack -k /root/.ssh/id_rsa"
 else
     echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently."
     echo "HA test cases may not run properly."
@@ -93,6 +97,8 @@ ${cmd}
 
 deactivate
 
+set -e
+
 cd ${WORKSPACE}
 
 if [ -f ${DOVETAIL_CONFIG}/pod.yaml ]; then
@@ -111,6 +117,11 @@ if [ "$INSTALLER_TYPE" == "fuel" ]; then
     sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
 fi
 
+if [ "$INSTALLER_TYPE" == "apex" ]; then
+    echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
+    sudo scp $ssh_options stack@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
+fi
+
 # sdnvpn test case needs to download this image first before running
 echo "Download image ubuntu-16.04-server-cloudimg-amd64-disk1.img ..."
 wget -q -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_CONFIG}
@@ -159,5 +170,8 @@ sudo cp -r ${DOVETAIL_HOME}/results ./
 # PRIMARY_GROUP=$(id -gn $CURRENT_USER)
 # sudo chown -R ${CURRENT_USER}:${PRIMARY_GROUP} ${WORKSPACE}/results
 
+#remove useless workspace from yardstick to save disk space
+sudo rm -rf ./results/workspace
+
 echo "Dovetail: done!"
 
index 700657d..11904cb 100644 (file)
             artifacts: 'results/**/*'
             allow-empty: true
             fingerprint: true
+        - email-jenkins-admins-on-failure
 
 ########################
 # builder macros
index 6867708..4b2f8e9 100644 (file)
         branch: '{stream}'
         disabled: false
         gs-pathname: ''
-    danube: &danube
-        stream: danube
+    euphrates: &euphrates
+        stream: euphrates
         branch: 'stable/{stream}'
-        disabled: false
+        disabled: true
         gs-pathname: '/{stream}'
 #--------------------------------
 # POD, INSTALLER, AND BRANCH MAPPING
             <<: *master
         - baremetal:
             slave-label: fuel-baremetal
-            <<: *danube
+            <<: *euphrates
         - virtual:
             slave-label: fuel-virtual
-            <<: *danube
+            <<: *euphrates
 #--------------------------------
 #        None-CI PODs
 #--------------------------------
         - zte-pod1:
             slave-label: zte-pod1
             <<: *master
-        - zte-pod2:
-            slave-label: zte-pod2
-            <<: *master
         - zte-pod3:
             slave-label: zte-pod3
             <<: *master
         - zte-pod1:
             slave-label: zte-pod1
-            <<: *danube
+            <<: *euphrates
         - zte-pod3:
             slave-label: zte-pod3
-            <<: *danube
+            <<: *euphrates
 #--------------------------------
 #       scenarios
 #--------------------------------
                         build-step-failure-threshold: 'never'
                         failure-threshold: 'never'
                         unstable-threshold: 'FAILURE'
+        # ZTE pod1 weekly(Sunday), os-odl_l2-nofeature-ha, run against master and euphrates
+        - conditional-step:
+            condition-kind: and
+            condition-operands:
+                - condition-kind: regex-match
+                  regex: os-odl_l2-nofeature-ha
+                  label: '{scenario}'
+                - condition-kind: regex-match
+                  regex: zte-pod1
+                  label: '{pod}'
+                - condition-kind: day-of-week
+                  day-selector: select-days
+                  days:
+                      SAT: true
+                  use-build-time: true
+            steps:
+                - trigger-builds:
+                    - project: 'dovetail-fuel-zte-pod1-proposed_tests-{stream}'
+                      current-parameters: false
+                      predefined-parameters:
+                        DEPLOY_SCENARIO={scenario}
+                      block: true
+                      same-node: true
+                      block-thresholds:
+                        build-step-failure-threshold: 'never'
+                        failure-threshold: 'never'
+                        unstable-threshold: 'FAILURE'
 
     publishers:
         - email:
             recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
+        - email-jenkins-admins-on-failure
 
 - job-template:
     name: 'fuel-deploy-{pod}-daily-{stream}'
     publishers:
         - email:
             recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
+        - email-jenkins-admins-on-failure
 
 ########################
 # parameter macros
     triggers:
         - timed: ''
 #-----------------------------------------------
-# Triggers for job running on fuel-baremetal against danube branch
+# Triggers for job running on fuel-baremetal against euphrates branch
 #-----------------------------------------------
 # HA Scenarios
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-euphrates-trigger'
     triggers:
-        - timed: '0 20 * * *'
+        - timed: '' # '0 20 * * *'
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-euphrates-trigger'
     triggers:
-        - timed: '0 23 * * *'
+        - timed: '' # '0 23 * * *'
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-euphrates-trigger'
     triggers:
-        - timed: '0 2 * * *'
+        - timed: '' # '0 2 * * *'
 - trigger:
-    name: 'fuel-os-onos-sfc-ha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-onos-sfc-ha-baremetal-daily-euphrates-trigger'
     triggers:
         - timed: '' # '0 5 * * *'
 - trigger:
-    name: 'fuel-os-onos-nofeature-ha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-onos-nofeature-ha-baremetal-daily-euphrates-trigger'
     triggers:
         - timed: '' # '0 8 * * *'
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-euphrates-trigger'
     triggers:
-        - timed: '0 11 * * *'
+        - timed: '' # '0 11 * * *'
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-euphrates-trigger'
     triggers:
-        - timed: '0 14 * * *'
+        - timed: '' # '0 14 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-euphrates-trigger'
     triggers:
-        - timed: '0 17 * * *'
+        - timed: '' # '0 17 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-euphrates-trigger'
     triggers:
-        - timed: '0 20 * * *'
+        - timed: '' # '0 20 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-euphrates-trigger'
     triggers:
-        - timed: '0 12 * * *'
+        - timed: '' # '0 12 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-euphrates-trigger'
     triggers:
-        - timed: '0 8 * * *'
+        - timed: '' # '0 8 * * *'
 # NOHA Scenarios
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-noha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-noha-baremetal-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-noha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-noha-baremetal-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-onos-sfc-noha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-onos-sfc-noha-baremetal-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-onos-nofeature-noha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-onos-nofeature-noha-baremetal-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-noha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-noha-baremetal-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-noha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-noha-baremetal-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-euphrates-trigger'
     triggers:
         - timed: ''
 #-----------------------------------------------
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '' # '35 15 * * *'
+        - timed: '35 15 * * *'
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '' # '5 18 * * *'
+        - timed: '5 18 * * *'
 - trigger:
     name: 'fuel-os-onos-sfc-noha-virtual-daily-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-nosdn-ovs-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '' # '5 9 * * *'
+        - timed: '5 9 * * *'
 - trigger:
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-master-trigger'
     triggers:
     triggers:
         - timed: '' # '30 20 * * *'
 #-----------------------------------------------
-# Triggers for job running on fuel-virtual against danube branch
+# Triggers for job running on fuel-virtual against euphrates branch
 #-----------------------------------------------
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-ha-virtual-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-ha-virtual-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-ha-virtual-daily-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-ha-virtual-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-onos-sfc-ha-virtual-daily-danube-trigger'
+    name: 'fuel-os-onos-sfc-ha-virtual-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-onos-nofeature-ha-virtual-daily-danube-trigger'
+    name: 'fuel-os-onos-nofeature-ha-virtual-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-ha-virtual-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-ha-virtual-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm-ha-virtual-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm-ha-virtual-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-ovs-ha-virtual-daily-danube-trigger'
+    name: 'fuel-os-nosdn-ovs-ha-virtual-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-euphrates-trigger'
     triggers:
         - timed: ''
 # NOHA Scenarios
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-euphrates-trigger'
     triggers:
-        - timed: '0 13 * * *'
+        - timed: '' # '0 13 * * *'
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-euphrates-trigger'
     triggers:
-        - timed: '30 15 * * *'
+        - timed: '' # '30 15 * * *'
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-euphrates-trigger'
     triggers:
-        - timed: '0 18 * * *'
+        - timed: '' # '0 18 * * *'
 - trigger:
-    name: 'fuel-os-onos-sfc-noha-virtual-daily-danube-trigger'
+    name: 'fuel-os-onos-sfc-noha-virtual-daily-euphrates-trigger'
     triggers:
         - timed: '' # '30 20 * * *'
 - trigger:
-    name: 'fuel-os-onos-nofeature-noha-virtual-daily-danube-trigger'
+    name: 'fuel-os-onos-nofeature-noha-virtual-daily-euphrates-trigger'
     triggers:
         - timed: '' # '0 23 * * *'
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-noha-virtual-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-noha-virtual-daily-euphrates-trigger'
     triggers:
-        - timed: '30 1 * * *'
+        - timed: '' # '30 1 * * *'
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-noha-virtual-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-noha-virtual-daily-euphrates-trigger'
     triggers:
-        - timed: '0 4 * * *'
+        - timed: '' # '0 4 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-kvm-noha-virtual-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm-noha-virtual-daily-euphrates-trigger'
     triggers:
-        - timed: '30 6 * * *'
+        - timed: '' # '30 6 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-ovs-noha-virtual-daily-danube-trigger'
+    name: 'fuel-os-nosdn-ovs-noha-virtual-daily-euphrates-trigger'
     triggers:
-        - timed: '0 9 * * *'
+        - timed: '' # '0 9 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-euphrates-trigger'
     triggers:
-        - timed: '0 16 * * *'
+        - timed: '' # '0 16 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-euphrates-trigger'
     triggers:
-        - timed: '0 20 * * *'
+        - timed: '' # '0 20 * * *'
 #-----------------------------------------------
 # ZTE POD1 Triggers running against master branch
 #-----------------------------------------------
 - trigger:
     name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-master-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 18 * * *'
 - trigger:
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-master-trigger'
     triggers:
     name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-master-trigger'
     triggers:
         - timed: ''
-
-#-----------------------------------------------
-# ZTE POD2 Triggers running against master branch
-#-----------------------------------------------
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l2-nofeature-ha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l3-nofeature-ha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-ha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-ha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l2-sfc-ha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-ha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-ovs-ha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-# NOHA Scenarios
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l2-nofeature-noha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l3-nofeature-noha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-noha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-noha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l2-sfc-noha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-noha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-ovs-noha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod2-daily-master-trigger'
-    triggers:
-        - timed: ''
 #-----------------------------------------------
 # ZTE POD3 Triggers running against master branch
 #-----------------------------------------------
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-zte-pod3-daily-master-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 10 * * *'
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-zte-pod3-daily-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-master-trigger'
     triggers:
-        - timed: '0 10 * * *'
+        - timed: ''
 - trigger:
     name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-master-trigger'
     triggers:
     triggers:
         - timed: ''
 #-----------------------------------------------
-# ZTE POD1 Triggers running against danube branch
-#-----------------------------------------------
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: '0 2 * * *'
-- trigger:
-    name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-# NOHA Scenarios
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l2-nofeature-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l3-nofeature-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l2-sfc-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: ''
-
-#-----------------------------------------------
-# ZTE POD2 Triggers running against danube branch
+# ZTE POD1 Triggers running against euphrates branch
 #-----------------------------------------------
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-ha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-euphrates-trigger'
     triggers:
-        - timed: ''
+        - timed: '' # '0 2 * * *'
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-ha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-onos-sfc-ha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-onos-nofeature-ha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-ha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm-ha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-ovs-ha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 # NOHA Scenarios
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-noha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-noha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-noha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-noha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-onos-sfc-noha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-onos-sfc-noha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-onos-nofeature-noha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-noha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-noha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm-noha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-ovs-noha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod2-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-euphrates-trigger'
     triggers:
         - timed: ''
 #-----------------------------------------------
-# ZTE POD3 Triggers running against danube branch
+# ZTE POD3 Triggers running against euphrates branch
 #-----------------------------------------------
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-zte-pod3-daily-euphrates-trigger'
     triggers:
-        - timed: '0 18 * * *'
+        - timed: '' # '0 18 * * *'
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-ha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-ha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-ha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-ha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-onos-sfc-ha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-onos-sfc-ha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-onos-nofeature-ha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-onos-nofeature-ha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-ha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-ha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-euphrates-trigger'
     triggers:
-        - timed: '0 2 * * *'
+        - timed: '' # '0 2 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 # NOHA Scenarios
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-nofeature-noha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-nofeature-noha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-noha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-odl_l3-nofeature-noha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-onos-sfc-noha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-onos-sfc-noha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-onos-nofeature-noha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-onos-nofeature-noha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-sfc-noha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-sfc-noha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm-noha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm-noha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-danube-trigger'
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-euphrates-trigger'
     triggers:
         - timed: ''
index 2fb5c71..29b173a 100755 (executable)
@@ -33,7 +33,7 @@ fi
 
 # set deployment parameters
 export TMPDIR=$HOME/tmpdir
-BRIDGE=${BRIDGE:-pxebr}
+BRIDGE=${BRIDGE:-,,,}
 LAB_NAME=${NODE_NAME/-*}
 POD_NAME=${NODE_NAME/*-}
 
@@ -69,7 +69,7 @@ FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
 # construct the command
 DEPLOY_COMMAND="sudo $WORKSPACE/ci/deploy.sh -b file://$WORKSPACE/securedlab \
     -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://$WORKSPACE/opnfv.iso \
-    -H -B $BRIDGE -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME"
+    -B $BRIDGE -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME"
 
 # log info to console
 echo "Deployment parameters"
index 1f0ddd3..6bb7e51 100644 (file)
             branch: '{stream}'
             gs-pathname: ''
             disabled: false
-        - danube:
+        - euphrates:
             branch: 'stable/{stream}'
             gs-pathname: '/{stream}'
-            disabled: false
+            disabled: true
 
     jobs:
         - 'fuel-build-daily-{stream}'
@@ -79,6 +79,7 @@
     publishers:
         - email:
             recipients: fzhadaev@mirantis.com
+        - email-jenkins-admins-on-failure
 
 - job-template:
     name: 'fuel-merge-build-{stream}'
     publishers:
         - email:
             recipients: fzhadaev@mirantis.com
+        - email-jenkins-admins-on-failure
 
 - job-template:
     name: 'fuel-deploy-generic-daily-{stream}'
index 549f7da..469ca92 100644 (file)
             branch: '{stream}'
             gs-pathname: ''
             disabled: false
-        - danube:
+        - euphrates:
             branch: 'stable/{stream}'
             gs-pathname: '/{stream}'
-            disabled: false
+            disabled: true
 #####################################
 # patch verification phases
 #####################################
index bd42ed8..57e36e1 100644 (file)
         branch: '{stream}'
         disabled: false
         gs-pathname: ''
-    danube: &danube
-        stream: danube
+    euphrates: &euphrates
+        stream: euphrates
         branch: 'stable/{stream}'
-        disabled: false
+        disabled: true
         gs-pathname: '/{stream}'
 #--------------------------------
 # POD, INSTALLER, AND BRANCH MAPPING
             <<: *master
         - baremetal:
             slave-label: fuel-baremetal
-            <<: *danube
+            <<: *euphrates
         - virtual:
             slave-label: fuel-virtual
-            <<: *danube
+            <<: *euphrates
 #--------------------------------
 #       scenarios
 #--------------------------------
     publishers:
         - email:
             recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
+        - email-jenkins-admins-on-failure
 
 - job-template:
     name: 'fuel-deploy-{pod}-weekly-{stream}'
     publishers:
         - email:
             recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
+        - email-jenkins-admins-on-failure
 
 ########################
 # parameter macros
diff --git a/jjb/functest/functest-alpine.sh b/jjb/functest/functest-alpine.sh
new file mode 100644 (file)
index 0000000..9084cca
--- /dev/null
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+set -e
+set +u
+set +o pipefail
+
+[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+FUNCTEST_DIR=/home/opnfv/functest
+
+# Prepare OpenStack credentials volume
+if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
+    rc_file=$LAB_CONFIG/admin-openrc
+elif [[ ${INSTALLER_TYPE} == 'compass' && ${BRANCH} == 'master' ]]; then
+    cacert_file_vol="-v ${HOME}/os_cacert:${FUNCTEST_DIR}/conf/os_cacert"
+    echo "export OS_CACERT=${FUNCTEST_DIR}/conf/os_cacert" >> ${HOME}/opnfv-openrc.sh
+    rc_file=${HOME}/opnfv-openrc.sh
+else
+    rc_file=${HOME}/opnfv-openrc.sh
+fi
+rc_file_vol="-v ${rc_file}:${FUNCTEST_DIR}/conf/openstack.creds"
+
+
+# Set iptables rule to allow forwarding return traffic for container
+if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
+    sudo iptables -I FORWARD -j RETURN
+fi
+
+DEPLOY_TYPE=baremetal
+[[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
+HOST_ARCH=$(uname -m)
+
+echo "Functest: Start Docker and prepare environment"
+
+echo "Functest: Download images that will be used by test cases"
+images_dir="${HOME}/opnfv/functest/images"
+download_script=${WORKSPACE}/functest/ci/download_images.sh
+if [[ ! -f ${download_script} ]]; then
+    # to support Danube as well
+    wget https://git.opnfv.org/functest/plain/functest/ci/download_images.sh -O ${download_script} 2> ${redirect}
+fi
+chmod +x ${download_script}
+${download_script} ${images_dir} ${DEPLOY_SCENARIO} ${HOST_ARCH} 2> ${redirect}
+
+images_vol="-v ${images_dir}:${FUNCTEST_DIR}/images"
+
+dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
+mkdir -p ${dir_result}
+sudo rm -rf ${dir_result}/*
+results_vol="-v ${dir_result}:${FUNCTEST_DIR}/results"
+custom_params=
+test -f ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG} && custom_params=$(cat ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG})
+
+envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
+    -e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
+    -e BUILD_TAG=${BUILD_TAG} -e DEPLOY_TYPE=${DEPLOY_TYPE}"
+
+if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} == *'os-nosdn-openo-ha'* ]]; then
+    ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+    openo_msb_port=${openo_msb_port:-80}
+    openo_msb_endpoint="$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
+    'mysql -ucompass -pcompass -Dcompass -e "select package_config from cluster;" \
+    | sed s/,/\\n/g | grep openo_ip | cut -d \" -f 4'):$openo_msb_port"
+
+    envs=${env}" -e OPENO_MSB_ENDPOINT=${openo_msb_endpoint}"
+fi
+
+volumes="${images_vol} ${results_vol} ${sshkey_vol} ${rc_file_vol} ${cacert_file_vol}"
+
+
+tiers=(healthcheck smoke)
+for tier in ${tiers[@]}; do
+    FUNCTEST_IMAGE=opnfv/functest-${tier}
+    echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
+    docker pull ${FUNCTEST_IMAGE}>/dev/null
+    cmd="docker run ${envs} ${volumes} ${FUNCTEST_IMAGE}"
+    echo "Running Functest tier '${tier}'. CMD: ${cmd}"
+    ${cmd}
+done
index fdef6f4..f14ca75 100644 (file)
 #            <<: *master
 #--------------------------------
 
+    alpine-pod:
+        - ericsson-virtual-pod1bl01:
+            slave-label: '{alpine-pod}'
+            installer: fuel
+            <<: *master
+        - huawei-virtual5:
+            slave-label: '{alpine-pod}'
+            installer: compass
+            <<: *master
+
     testsuite:
         - 'suite':
             job-timeout: 60
 
     jobs:
         - 'functest-{installer}-{pod}-{testsuite}-{stream}'
+        - 'functest-alpine-{installer}-{alpine-pod}-{testsuite}-{stream}'
 
 ################################
 # job template
             description: "Built on $NODE_NAME"
         - 'functest-{testsuite}-builder'
 
+- job-template:
+    name: 'functest-alpine-{installer}-{alpine-pod}-{testsuite}-{stream}'
+
+    concurrent: true
+
+    properties:
+        - logrotate-default
+        - throttle:
+            enabled: true
+            max-per-node: 1
+            option: 'project'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER Suite: $FUNCTEST_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+        - timeout:
+            timeout: '{job-timeout}'
+            abort: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - '{installer}-defaults'
+        - '{slave-label}-defaults'
+        - 'functest-{testsuite}-parameter'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-nosdn-nofeature-noha'
+        - functest-parameter:
+            gs-pathname: '{gs-pathname}'
+
+    scm:
+        - git-scm
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - 'functest-alpine-daily-builder'
+
 ########################
 # parameter macros
 ########################
         - 'functest-store-results'
         - 'functest-exit'
 
+- builder:
+    name: functest-alpine-daily-builder
+    builders:
+        - shell:
+            !include-raw:
+                - ./functest-env-presetup.sh
+                - ../../utils/fetch_os_creds.sh
+                - ./functest-alpine.sh
+
 - builder:
     name: functest-daily
     builders:
index 1c1e8e1..1966e52 100644 (file)
@@ -9,6 +9,7 @@
 
     jobs:
         - 'functest-verify-{stream}'
+        - 'functest-docs-upload-{stream}'
 
     stream:
         - master:
                     healthy: 50
                     unhealthy: 40
                     failing: 30
+        - email-jenkins-admins-on-failure
+
+- job-template:
+    name: 'functest-docs-upload-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+        - git-scm
+
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - change-merged-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'remerge'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                disable-strict-forbidden-file-verification: 'true'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'docs/**|.gitignore'
+
+    builders:
+        - functest-upload-doc-artifact
 
 ################################
 # job builders
     builders:
         - shell: |
             cd $WORKSPACE && tox
+
+- builder:
+    name: functest-upload-doc-artifact
+    builders:
+        - shell: |
+            cd $WORKSPACE && tox -edocs
+            wget -O - https://git.opnfv.org/releng/plain/utils/upload-artifact.sh | bash -s "functest/api/_build" "docs"
index 5f936f5..f6071e3 100755 (executable)
@@ -9,6 +9,10 @@ set +o pipefail
 # Prepare OpenStack credentials volume
 if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
     rc_file_vol="-v $LAB_CONFIG/admin-openrc:/home/opnfv/functest/conf/openstack.creds"
+elif [[ ${INSTALLER_TYPE} == 'compass' && ${BRANCH} == 'master' ]]; then
+    cacert_file_vol="-v ${HOME}/os_cacert:/home/opnfv/functest/conf/os_cacert"
+    echo "export OS_CACERT=/home/opnfv/functest/conf/os_cacert" >> ${HOME}/opnfv-openrc.sh
+    rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/home/opnfv/functest/conf/openstack.creds"
 else
     rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/home/opnfv/functest/conf/openstack.creds"
 fi
@@ -21,6 +25,7 @@ fi
 
 DEPLOY_TYPE=baremetal
 [[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
+HOST_ARCH=$(uname -m)
 
 echo "Functest: Start Docker and prepare environment"
 
@@ -28,7 +33,7 @@ if [ "$BRANCH" != 'stable/danube' ]; then
   echo "Functest: Download images that will be used by test cases"
   images_dir="${HOME}/opnfv/functest/images"
   chmod +x ${WORKSPACE}/functest/ci/download_images.sh
-  ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} 2> ${redirect}
+  ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} ${DEPLOY_SCENARIO} ${HOST_ARCH} 2> ${redirect}
   images_vol="-v ${images_dir}:/home/opnfv/functest/images"
 fi
 
@@ -54,12 +59,11 @@ if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} == *'os-nosdn-openo-h
 fi
 
 if [ "$BRANCH" != 'stable/danube' ]; then
-  volumes="${images_vol} ${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
+  volumes="${images_vol} ${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol} ${cacert_file_vol}"
 else
   volumes="${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
 fi
 
-HOST_ARCH=$(uname -m)
 FUNCTEST_IMAGE="opnfv/functest"
 if [ "$HOST_ARCH" = "aarch64" ]; then
     FUNCTEST_IMAGE="${FUNCTEST_IMAGE}_${HOST_ARCH}"
index ee154af..c4e715d 100644 (file)
@@ -55,8 +55,8 @@
             description: 'external network for test'
         - string:
             name: BRIDGE
-            default: 'pxebr'
-            description: 'pxe bridge for booting of Fuel master'
+            default: ',,,'
+            description: 'Bridge(s) to be used by salt master'
 
 - parameter:
     name: 'joid-defaults'
@@ -75,8 +75,8 @@
             description: 'Model to deploy (os|k8)'
         - string:
             name: OS_RELEASE
-            default: 'newton'
-            description: 'OpenStack release (mitaka|newton)'
+            default: 'ocata'
+            description: 'OpenStack release (mitaka|newton|ocata)'
         - string:
             name: EXTERNAL_NETWORK
             default: ext-net
         - string:
             name: BRIDGE
             default: 'br7'
-            description: 'pxe bridge for booting of Fuel master'
+            description: 'pxe bridge for booting of Daisy master'
 
 - parameter:
     name: 'infra-defaults'
index 2838886..75e00f9 100644 (file)
 
     properties:
         - logrotate-default
+
+    publishers:
+      # Any project that has a publisher will not have this macro
+      # included due to the nature of JJB defaults. Projects will have
+      # to explicitly add this macro to their list of publishers in
+      # order for emails to be sent.
+      - email-jenkins-admins-on-failure
index e4dfa8d..89a3802 100644 (file)
                         branch-pattern: '**/{branch}'
                   file-paths:
                       - compare-type: 'ANT'
-                        pattern: 'tests/**'
+                        pattern: '{files}'
             skip-vote:
                 successful: true
                 failed: true
                 unhealthy: 40
                 failing: 30
 
+# The majority of the email-ext plugin options are set to the default
+# for this macro so they can be managed through Jenkins' global
+# settings.
+- publisher:
+    name: email-jenkins-admins-on-failure
+    publishers:
+      - email-ext:
+          content-type: text
+          attach-build-log: true
+          compress-log: true
+          always: false
+          failure: true
+          send-to:
+            - recipients
index 50859c4..0e23380 100644 (file)
         - string:
             name: BRIDGE
             default: 'daisy1'
-            description: 'pxe bridge for booting of Fuel master'
+            description: 'pxe bridge for booting of Daisy master'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
+- parameter:
+    name: 'huawei-virtual5-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'huawei-virtual5'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
 - parameter:
     name: 'huawei-virtual7-defaults'
     parameters:
             name: BRIDGE
             default: 'br0'
             description: 'pxe bridge for booting of Fuel master'
+- parameter:
+    name: zte-pod4-defaults
+    parameters:
+        - node:
+            name: SLAVE_NAME
+            description: 'Slave name on Jenkins'
+            allowed-slaves:
+                - zte-pod4
+            default-slaves:
+                - zte-pod4
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
 - parameter:
     name: 'juniper-pod1-defaults'
     parameters:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'ericsson-virtual-pod1bl01-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'ericsson-virtual-pod1bl01'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
 - parameter:
     name: 'odl-netvirt-virtual-defaults'
     parameters:
index 13ea9b3..1c7b8cd 100644 (file)
         branch: '{stream}'
         disabled: false
         gs-pathname: ''
-    danube: &danube
-        stream: danube
-        branch: 'stable/{stream}'
-        disabled: false
-        gs-pathname: '/{stream}'
 #--------------------------------
 # POD, INSTALLER, AND BRANCH MAPPING
 #--------------------------------
         - virtual:
             slave-label: joid-virtual
             <<: *master
-        - baremetal:
-            slave-label: joid-baremetal
-            <<: *danube
-        - virtual:
-            slave-label: joid-virtual
-            <<: *danube
 #--------------------------------
 #        None-CI PODs
 #--------------------------------
@@ -62,7 +51,7 @@
         - 'os-nosdn-lxd-noha':
             auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
         - 'os-odl_l2-nofeature-ha':
-            auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+            auto-trigger-name: 'daily-trigger-disabled'
         - 'os-onos-nofeature-ha':
             auto-trigger-name: 'daily-trigger-disabled'
         - 'os-odl_l2-nofeature-noha':
         - 'os-ocl-nofeature-noha':
             auto-trigger-name: 'daily-trigger-disabled'
         - 'k8-nosdn-nofeature-noha':
-            auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+            auto-trigger-name: 'daily-trigger-disabled'
         - 'k8-nosdn-lb-noha':
             auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+        - 'k8-ovn-lb-noha':
+            auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+        - 'os-nosdn-openbaton-ha':
+            auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
 
     jobs:
         - 'joid-{scenario}-{pod}-daily-{stream}'
     name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-nosdn-nofeature-ha trigger - branch: danube
-- trigger:
-    name: 'joid-os-nosdn-nofeature-ha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 2 * * *'
-- trigger:
-    name: 'joid-os-nosdn-nofeature-ha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-nofeature-ha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # os-odl_l2-nofeature-ha trigger - branch: master
 - trigger:
     name: 'joid-os-odl_l2-nofeature-ha-baremetal-master-trigger'
     name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-odl_l2-nofeature-ha trigger - branch: danube
-- trigger:
-    name: 'joid-os-odl_l2-nofeature-ha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 7 * * *'
-- trigger:
-    name: 'joid-os-odl_l2-nofeature-ha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-odl_l2-nofeature-ha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # os-onos-nofeature-ha trigger - branch: master
 - trigger:
     name: 'joid-os-onos-nofeature-ha-baremetal-master-trigger'
     name: 'joid-os-onos-nofeature-ha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-onos-nofeature-ha trigger - branch: danube
-- trigger:
-    name: 'joid-os-onos-nofeature-ha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 12 * * *'
-- trigger:
-    name: 'joid-os-onos-nofeature-ha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-onos-nofeature-ha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-onos-nofeature-ha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # os-onos-sfc-ha trigger - branch: master
 - trigger:
     name: 'joid-os-onos-sfc-ha-baremetal-master-trigger'
     name: 'joid-os-onos-sfc-ha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-onos-sfc-ha trigger - branch: danube
-- trigger:
-    name: 'joid-os-onos-sfc-ha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 17 * * *'
-- trigger:
-    name: 'joid-os-onos-sfc-ha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-onos-sfc-ha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-onos-sfc-ha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # os-nosdn-lxd-noha trigger - branch: master
 - trigger:
     name: 'joid-os-nosdn-lxd-noha-baremetal-master-trigger'
     name: 'joid-os-nosdn-lxd-noha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-nosdn-lxd-noha trigger - branch: danube
-- trigger:
-    name: 'joid-os-nosdn-lxd-noha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 22 * * *'
-- trigger:
-    name: 'joid-os-nosdn-lxd-noha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-lxd-noha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-lxd-noha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # os-nosdn-lxd-ha trigger - branch: master
 - trigger:
     name: 'joid-os-nosdn-lxd-ha-baremetal-master-trigger'
     name: 'joid-os-nosdn-lxd-ha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-nosdn-lxd-ha trigger - branch: danube
-- trigger:
-    name: 'joid-os-nosdn-lxd-ha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 10 * * *'
-- trigger:
-    name: 'joid-os-nosdn-lxd-ha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-lxd-ha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-lxd-ha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # os-nosdn-nofeature-noha trigger - branch: master
 - trigger:
     name: 'joid-os-nosdn-nofeature-noha-baremetal-master-trigger'
     name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# os-nosdn-nofeature-noha trigger - branch: danube
-- trigger:
-    name: 'joid-os-nosdn-nofeature-noha-baremetal-danube-trigger'
-    triggers:
-        - timed: '0 4 * * *'
-- trigger:
-    name: 'joid-os-nosdn-nofeature-noha-virtual-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-nofeature-noha-orange-pod1-danube-trigger'
-    triggers:
-        - timed: ''
-- trigger:
-    name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
-    triggers:
-        - timed: ''
 # k8-nosdn-nofeature-noha trigger - branch: master
 - trigger:
     name: 'joid-k8-nosdn-nofeature-noha-baremetal-master-trigger'
     name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# k8-nosdn-nofeature-noha trigger - branch: danube
+# k8-nosdn-lb-noha trigger - branch: master
 - trigger:
-    name: 'joid-k8-nosdn-nofeature-noha-baremetal-danube-trigger'
+    name: 'joid-k8-nosdn-lb-noha-baremetal-master-trigger'
     triggers:
-        - timed: '0 15 * * *'
+        - timed: '5 20 * * *'
 - trigger:
-    name: 'joid-k8-nosdn-nofeature-noha-virtual-danube-trigger'
+    name: 'joid-k8-nosdn-lb-noha-virtual-master-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'joid-k8-nosdn-nofeature-noha-orange-pod1-danube-trigger'
+    name: 'joid-k8-nosdn-lb-noha-orange-pod1-master-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
+    name: 'joid-k8-nosdn-lb-noha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# k8-nosdn-lb-noha trigger - branch: master
+# k8-ovn-lb-noha trigger - branch: master
 - trigger:
-    name: 'joid-k8-nosdn-lb-noha-baremetal-master-trigger'
+    name: 'joid-k8-ovn-lb-noha-baremetal-master-trigger'
     triggers:
-        - timed: '5 20 * * *'
+        - timed: '5 17 * * *'
 - trigger:
-    name: 'joid-k8-nosdn-lb-noha-virtual-master-trigger'
+    name: 'joid-k8-ovn-lb-noha-virtual-master-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'joid-k8-nosdn-lb-noha-orange-pod1-master-trigger'
+    name: 'joid-k8-ovn-lb-noha-orange-pod1-master-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'joid-k8-nosdn-lb-noha-cengn-pod1-master-trigger'
+    name: 'joid-k8-ovn-lb-noha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
-# k8-nosdn-lb-noha trigger - branch: danube
+
+# os-nosdn-openbaton-ha trigger - branch: master
 - trigger:
-    name: 'joid-k8-nosdn-lb-noha-baremetal-danube-trigger'
+    name: 'joid-os-nosdn-openbaton-ha-baremetal-master-trigger'
     triggers:
-        - timed: '0 20 * * *'
+        - timed: '5 25 * * *'
 - trigger:
-    name: 'joid-k8-nosdn-lb-noha-virtual-danube-trigger'
+    name: 'joid-os-nosdn-openbaton-ha-virtual-master-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'joid-k8-nosdn-lb-noha-orange-pod1-danube-trigger'
+    name: 'joid-os-nosdn-openbaton-ha-orange-pod1-master-trigger'
     triggers:
         - timed: ''
 - trigger:
-    name: 'joid-k8-nosdn-lb-noha-cengn-pod1-danube-trigger'
+    name: 'joid-os-nosdn-openbaton-ha-cengn-pod1-master-trigger'
     triggers:
         - timed: ''
index e197dbd..9740d38 100644 (file)
@@ -94,9 +94,6 @@ EXTRA=${DEPLOY_OPTIONS[4]}
 if [ "$SDN_CONTROLLER" == 'odl_l2' ] || [ "$SDN_CONTROLLER" == 'odl_l3' ]; then
     SDN_CONTROLLER='odl'
 fi
-if [ "$HA_MODE" == 'noha' ]; then
-    HA_MODE='nonha'
-fi
 
 # Add extra to features
 if [ "$EXTRA" != "" ];then
index e5b56bf..a39249a 100644 (file)
@@ -26,6 +26,7 @@
     testname:
         - 'cyclictest'
         - 'packet_forward'
+        - 'livemigration'
 #####################################
 # patch verification phases
 #####################################
                   git-revision: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
+        - multijob:
+            name: livemigration-test
+            condition: SUCCESSFUL
+            projects:
+                - name: 'kvmfornfv-livemigration-daily-test-{stream}'
+                  current-parameters: false
+                  node-parameters: false
+                  git-revision: true
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+
 
 - job-template:
     name: 'kvmfornfv-daily-build-{stream}'
             !include-raw: ./kvmfornfv-download-artifact.sh
         - shell:
             !include-raw: ./kvmfornfv-test.sh
+- builder:
+    name: 'kvmfornfv-livemigration-daily-test-macro'
+    builders:
+        - shell:
+            !include-raw: ./kvmfornfv-download-artifact.sh
+        - shell:
+            !include-raw: ./kvmfornfv-test.sh
+
 #####################################
 # parameter macros
 #####################################
index 71c6cc1..827e5c2 100755 (executable)
@@ -46,7 +46,7 @@ curl -L -s -o $WORKSPACE/opnfv.iso http://$OPNFV_ARTIFACT_URL > gsutil.iso.log 2
 # set deployment parameters
 DEPLOY_SCENARIO="os-nosdn-nofeature-noha"
 export TMPDIR=$HOME/tmpdir
-BRIDGE=${BRIDGE:-pxebr}
+BRIDGE=${BRIDGE:-,,,}
 LAB_NAME=${NODE_NAME/-*}
 POD_NAME=${NODE_NAME/*-}
 
@@ -89,7 +89,7 @@ FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
 # construct the command
 DEPLOY_COMMAND="sudo $WORKSPACE/fuel/ci/deploy.sh -b file://$WORKSPACE/securedlab \
     -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://$WORKSPACE/opnfv.iso \
-    -H -B $BRIDGE -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME"
+    -B $BRIDGE -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME"
 
 # log info to console
 echo "Deployment parameters"
index 06cefb6..c5e1866 100644 (file)
 
     publishers:
         - 'multisite-{phase}-publisher'
+        - email-jenkins-admins-on-failure
 
 ########################
 # builder macros
diff --git a/jjb/orchestra/orchestra-daily-jobs.yml b/jjb/orchestra/orchestra-daily-jobs.yml
new file mode 100644 (file)
index 0000000..6baaab8
--- /dev/null
@@ -0,0 +1,98 @@
+###################################
+# job configuration for orchestra
+###################################
+- project:
+    name: 'orchestra-daily-jobs'
+
+    project: 'orchestra'
+
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+    master: &master
+        stream: master
+        branch: '{stream}'
+        gs-pathname: ''
+        disabled: false
+
+#-------------------------------------------------------
+# POD, INSTALLER, AND BRANCH MAPPING
+#-------------------------------------------------------
+    pod:
+        - virtual:
+            slave-label: 'joid-virtual'
+            os-version: 'xenial'
+            <<: *master
+
+    jobs:
+        - 'orchestra-{pod}-daily-{stream}'
+
+################################
+# job template
+################################
+- job-template:
+    name: 'orchestra-{pod}-daily-{stream}'
+
+    project-type: multijob
+
+    disabled: '{obj:disabled}'
+
+    concurrent: false
+
+    properties:
+        - logrotate-default
+        - throttle:
+            enabled: true
+            max-total: 1
+            max-per-node: 1
+            option: 'project'
+
+    scm:
+        - git-scm
+
+    wrappers:
+        - ssh-agent-wrapper
+
+        - timeout:
+            timeout: 240
+            fail: true
+
+    triggers:
+         - timed: '@daily'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: os-nosdn-openbaton-ha
+        - '{slave-label}-defaults'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - multijob:
+            name: deploy
+            condition: SUCCESSFUL
+            projects:
+                - name: 'joid-deploy-{pod}-daily-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    DEPLOY_SCENARIO=os-nosdn-openbaton-ha
+                    COMPASS_OS_VERSION=xenial
+                  node-parameters: true
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: functest
+            condition: SUCCESSFUL
+            projects:
+                - name: 'functest-joid-{pod}-daily-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    DEPLOY_SCENARIO=os-nosdn-openbaton-ha
+                    FUNCTEST_SUITE_NAME=orchestra_ims
+                  node-parameters: true
+                  kill-phase-on: NEVER
+                  abort-all-job: true
diff --git a/jjb/orchestra/orchestra-project-jobs.yml b/jjb/orchestra/orchestra-project-jobs.yml
new file mode 100644 (file)
index 0000000..0f0c0f6
--- /dev/null
@@ -0,0 +1,50 @@
+- project:
+
+    name: orchestra-project
+
+    project: 'orchestra'
+
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+
+    jobs:
+        - 'orchestra-build-{stream}'
+
+- job-template:
+    name: 'orchestra-build-{stream}'
+
+    concurrent: true
+
+    properties:
+        - logrotate-default
+        - throttle:
+            enabled: true
+            max-total: 1
+            max-per-node: 1
+            option: 'project'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+
+    scm:
+        - git-scm
+
+    triggers:
+        - timed: 'H 23 * * *'
+
+    builders:
+        - 'orchestra-build-macro'
+
+- builder:
+    name: 'orchestra-build-macro'
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            echo "Hello world!"
+
+
diff --git a/jjb/ovn4nfv/ovn4nfv-daily-jobs.yml b/jjb/ovn4nfv/ovn4nfv-daily-jobs.yml
new file mode 100644 (file)
index 0000000..ed6df41
--- /dev/null
@@ -0,0 +1,87 @@
+- project:
+    name: 'ovn4nfv-daily-jobs'
+
+    project: 'ovn4nfv'
+
+    master: &master
+        stream: master
+        branch: '{stream}'
+        gs-pathname: ''
+        disabled: false
+
+    pod:
+        - virtual:
+            slave-label: 'joid-virtual'
+            os-version: 'xenial'
+            <<: *master
+
+    jobs:
+        - 'ovn4nfv-{pod}-daily-{stream}'
+
+- job-template:
+    name: 'ovn4nfv-{pod}-daily-{stream}'
+
+    project-type: multijob
+
+    disabled: '{obj:disabled}'
+
+    concurrent: false
+
+    properties:
+        - logrotate-default
+        - throttle:
+            enabled: true
+            max-total: 1
+            max-per-node: 1
+            option: 'project'
+
+    scm:
+        - git-scm
+
+    wrappers:
+        - ssh-agent-wrapper
+
+        - timeout:
+            timeout: 240
+            fail: true
+
+    triggers:
+         - timed: '@daily'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: os-ovn-nofeature-noha
+        - '{slave-label}-defaults'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - multijob:
+            name: deploy
+            condition: SUCCESSFUL
+            projects:
+                - name: 'joid-deploy-{pod}-daily-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    DEPLOY_SCENARIO=os-ovn-nofeature-noha
+                    COMPASS_OS_VERSION=xenial
+                  node-parameters: true
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: functest
+            condition: SUCCESSFUL
+            projects:
+                - name: 'functest-joid-{pod}-daily-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    DEPLOY_SCENARIO=os-ovn-nofeature-ha
+                    FUNCTEST_SUITE_NAME=ovn4nfv_test_suite
+                  node-parameters: true
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+
diff --git a/jjb/ovn4nfv/ovn4nfv-project-jobs.yml b/jjb/ovn4nfv/ovn4nfv-project-jobs.yml
new file mode 100644 (file)
index 0000000..805aa04
--- /dev/null
@@ -0,0 +1,51 @@
+- project:
+    name: ovn4nfv
+
+    project: '{name}'
+
+
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+            disabled: false
+
+    jobs:
+        - 'ovn4nfv-build-{stream}'
+
+- job-template:
+    name: 'ovn4nfv-build-{stream}'
+
+    concurrent: true
+
+    disabled: '{obj:disabled}'
+
+    properties:
+        - logrotate-default
+        - throttle:
+            enabled: true
+            max-total: 1
+            max-per-node: 1
+            option: 'project'
+
+    parametert:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+
+    scm:
+        - git-scm
+
+    triggers:
+        - timed: 'H 23 * * *'
+
+    builders:
+        - 'ovn4nfv-build-macro'
+
+- builder:
+    name: 'ovn4nfv-build-macro'
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            echo "hello world"
index 0e8c713..62f6de0 100644 (file)
     publishers:
         - email:
             recipients: therbert@redhat.com mark.d.gray@intel.com billy.o.mahony@intel.com
+        - email-jenkins-admins-on-failure
 
 - builder:
     name: build-rpms
diff --git a/jjb/qtip/qtip-integration-jobs.yml b/jjb/qtip/qtip-integration-jobs.yml
new file mode 100644 (file)
index 0000000..46c101d
--- /dev/null
@@ -0,0 +1,48 @@
+######################
+# verify before MERGE
+######################
+
+- project:
+    name: qtip-integration-jobs
+    project: qtip
+    jobs:
+        - 'qtip-storage-{stream}'
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+            disabled: false
+
+################################
+## job templates
+#################################
+
+- job-template:
+    name: 'qtip-storage-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        # Pin the tests on zte-pod4 with apex deployment
+        - apex-defaults
+        - zte-pod4-defaults
+    scm:
+        - git-scm-gerrit
+
+    triggers:
+        - experimental:
+            project: '{project}'
+            branch: '{branch}'
+            files: '**'
+
+    builders:
+        - shell: |
+             #!/bin/bash
+                set -o errexit
+                set -o pipefail
+                set -o xtrace
+
+                source integration/storperf/storperf.sh
index 8dd97de..73d7993 100644 (file)
         branch: '{stream}'
         gs-pathname: ''
         docker-tag: latest
-    danube: &danube
-        stream: danube
-        branch: 'stable/{stream}'
-        gs-pathname: '/{stream}'
-        docker-tag: 'stable'
 
 #--------------------------------
 # JOB VARIABLES
             installer: fuel
             scenario: os-nosdn-kvm-ha
             <<: *master
-        - zte-pod1:
-            installer: fuel
-            scenario: os-odl_l2-nofeature-ha
-            <<: *danube
-        - zte-pod3:
-            installer: fuel
-            scenario: os-nosdn-nofeature-ha
-            <<: *danube
-        - zte-pod3:
-            installer: fuel
-            scenario: os-nosdn-kvm-ha
-            <<: *danube
 
 #--------------------------------
 # JOB LIST
@@ -83,6 +66,7 @@
         - qtip-validate-deploy
     publishers:
         - qtip-common-publishers
+        - email-jenkins-admins-on-failure
 
 ################
 # MARCOS
     name: 'qtip-os-nosdn-kvm-ha-zte-pod3-daily-master-trigger'
     triggers:
         - timed: '0 15 * * *'
-
-- trigger:
-    name: 'qtip-os-odl_l2-nofeature-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-        - timed: '0 7 * * *'
-
-- trigger:
-    name: 'qtip-os-nosdn-kvm-ha-zte-pod3-daily-danube-trigger'
-    triggers:
-        - timed: '0 7 * * *'
-
-- trigger:
-    name: 'qtip-os-nosdn-nofeature-ha-zte-pod3-daily-danube-trigger'
-    triggers:
-        - timed: '30 0 * * *'
index dd444c7..a273c85 100644 (file)
@@ -7,6 +7,8 @@
     project: qtip
     jobs:
         - 'qtip-verify-{stream}'
+        - 'qtip-review-notebook-{stream}'
+        - 'qtip-merge-{stream}'
     stream:
         - master:
             branch: '{stream}'
         - qtip-unit-tests-and-docs-build
     publishers:
         - publish-coverage
+        - email-jenkins-admins-on-failure
+
+# upload juypter notebook to artifacts for review
+- job-template:
+    name: 'qtip-review-notebook-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+        - git-scm-gerrit
+
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                disable-strict-forbidden-file-verification: 'true'
+                file-paths:
+                  - compare-type: ANT
+                    pattern: 'examples/**'
+    builders:
+        - upload-under-review-notebooks-to-opnfv-artifacts
+        - report-build-result-to-gerrit
+
+- job-template:
+    name: 'qtip-merge-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: $GERRIT_PROJECT
+            branch: '{branch}'
+        - string:
+            name: GS_URL
+            default: '$GS_BASE{gs-pathname}'
+            description: "Directory where the build artifact will be located upon the completion of the build."
+        - string:
+            name: GERRIT_REFSPEC
+            default: 'refs/heads/{branch}'
+            description: "JJB configured GERRIT_REFSPEC parameter"
+
+    scm:
+        - git-scm
+
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - change-merged-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'remerge'
+            projects:
+                - project-compare-type: 'ANT'
+                  project-pattern: '*'
+                  branches:
+                      - branch-compare-type: 'ANT'
+                        branch-pattern: '**/{branch}'
+                  file-paths:
+                      - compare-type: ANT
+                        pattern: examples/**
+
+    builders:
+        - remove-old-docs-from-opnfv-artifacts
 
 ################################
 ## job builders
             set -o xtrace
 
             tox
+
+# modified from upload-under-review-docs-to-opnfv-artifacts in global/releng-macro.yml
+- builder:
+    name: upload-under-review-notebooks-to-opnfv-artifacts
+    builders:
+        - shell: |
+            #!/bin/bash
+            set -o errexit
+            set -o pipefail
+            set -o xtrace
+            export PATH=$PATH:/usr/local/bin/
+
+            [[ $GERRIT_CHANGE_NUMBER =~ .+ ]]
+            [[ -d examples ]] || exit 0
+
+            echo
+            echo "###########################"
+            echo "UPLOADING DOCS UNDER REVIEW"
+            echo "###########################"
+            echo
+
+            gs_base="artifacts.opnfv.org/$PROJECT/review"
+            gs_path="$gs_base/$GERRIT_CHANGE_NUMBER"
+            local_path="upload/$GERRIT_CHANGE_NUMBER"
+
+            mkdir -p upload
+            cp -r examples "$local_path"
+            gsutil -m cp -r "$local_path" "gs://$gs_base/"
+
+            echo "Document link(s):" >> gerrit_comment.txt
+            find "$local_path" | grep -e 'ipynb$' | \
+                sed -e "s|^$local_path|    https://nbviewer.jupyter.org/url/$gs_path|" >> gerrit_comment.txt
diff --git a/jjb/qtip/qtip-weekly-jobs.yml b/jjb/qtip/qtip-weekly-jobs.yml
new file mode 100644 (file)
index 0000000..0182488
--- /dev/null
@@ -0,0 +1,108 @@
+#################
+# QTIP weekly job
+#################
+- project:
+    name: qtip-weekly
+    project: qtip
+
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+    danube: &danube
+        stream: danube
+        branch: 'stable/{stream}'
+        gs-pathname: '/{stream}'
+        docker-tag: 'stable'
+
+#--------------------------------
+# JOB VARIABLES
+#--------------------------------
+    pod:
+        - zte-pod1:
+            installer: fuel
+            scenario: os-odl_l2-nofeature-ha
+            <<: *danube
+        - zte-pod3:
+            installer: fuel
+            scenario: os-nosdn-nofeature-ha
+            <<: *danube
+        - zte-pod3:
+            installer: fuel
+            scenario: os-nosdn-kvm-ha
+            <<: *danube
+
+#--------------------------------
+# JOB LIST
+#--------------------------------
+    jobs:
+        - 'qtip-{scenario}-{pod}-weekly-{stream}'
+
+################################
+# job templates
+################################
+- job-template:
+    name: 'qtip-{scenario}-{pod}-weekly-{stream}'
+    disabled: false
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - '{installer}-defaults'
+        - '{pod}-defaults'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: '{scenario}'
+        - string:
+            name: DOCKER_TAG
+            default: '{docker-tag}'
+            description: 'Tag to pull docker image'
+        - string:
+            name: CI_DEBUG
+            default: 'false'
+            description: "Show debug output information"
+    scm:
+        - git-scm
+    triggers:
+        - 'qtip-{scenario}-{pod}-weekly-{stream}-trigger'
+    builders:
+        - description-setter:
+            description: "POD: $NODE_NAME"
+        - qtip-validate-deploy-weekly
+    publishers:
+        - email:
+            recipients: wu.zhihui1@zte.com.cn, zhang.yujunz@zte.com.cn
+        - email-jenkins-admins-on-failure
+
+################
+# MARCOS
+################
+
+#---------
+# builder
+#---------
+- builder:
+    name: qtip-validate-deploy-weekly
+    builders:
+        - shell:
+            !include-raw: ./helpers/cleanup-deploy.sh
+        - shell:
+            !include-raw: ./helpers/validate-deploy.sh
+
+#---------
+# trigger
+#---------
+
+- trigger:
+    name: 'qtip-os-odl_l2-nofeature-ha-zte-pod1-weekly-danube-trigger'
+    triggers:
+        - timed: '0 7 * * 0'
+
+- trigger:
+    name: 'qtip-os-nosdn-kvm-ha-zte-pod3-weekly-danube-trigger'
+    triggers:
+        - timed: '0 7 * * 0'
+
+- trigger:
+    name: 'qtip-os-nosdn-nofeature-ha-zte-pod3-weekly-danube-trigger'
+    triggers:
+        - timed: '30 0 * * 0'
index 4bea703..d12ee5d 100644 (file)
 
     builders:
         - shell: |
-            bash ./utils/test/{module}/run_test.sh
+            cd ./utils/test/{module}/
+            tox
+            if [ -e *.xml ];then
+                cp *.xml $WORKSPACE
+            fi
 
     publishers:
-        - junit:
-            results: nosetests.xml
-        - cobertura:
-            report-file: "coverage.xml"
-            only-stable: "true"
-            health-auto-update: "false"
-            stability-auto-update: "false"
-            zoom-coverage-chart: "true"
-            targets:
-                - files:
-                    healthy: 10
-                    unhealthy: 20
-                    failing: 30
-                - method:
-                    healthy: 50
-                    unhealthy: 40
-                    failing: 30
+        - publish-coverage
+        - email-jenkins-admins-on-failure
 
 - job-template:
     name: '{module}-automate-{stream}'
 
     publishers:
         - 'email-publisher'
+        - email-jenkins-admins-on-failure
 
 - job-template:
     name: '{module}-automate-{phase}-{stream}'
     name: 'testapi-automate-docker-deploy-macro'
     builders:
         - shell: |
-            bash ./jjb/releng/docker-deploy.sh 'sudo docker run -dti -p 8082:8000 -e mongodb_url=mongodb://172.17.0.1:27017 -e base_url=http://testresults.opnfv.org/test opnfv/testapi' "http://testresults.opnfv.org/test/swagger/APIs"
+            bash ./jjb/releng/docker-deploy.sh "sudo docker run -dti -p 8082:8000
+            -e mongodb_url=mongodb://172.17.0.1:27017
+            -e base_url=http://testresults.opnfv.org/test opnfv/testapi" \
+            "http://testresults.opnfv.org/test/swagger/APIs" "testapi"
+
 - builder:
     name: 'reporting-automate-docker-deploy-macro'
     builders:
         - shell: |
-            bash ./jjb/releng/docker-deploy.sh 'sudo docker run -itd -p 8084:8000 -e SERVER_URL=http://testresults.opnfv.org/reporting2:8084 opnfv/reporting' "http://testresults.opnfv.org/reporting2/reporting/index.html"
+            bash ./jjb/releng/docker-deploy.sh "sudo docker run -itd -p 8084:8000 opnfv/reporting" \
+            "http://testresults.opnfv.org/reporting2/reporting/index.html" "reporting"
 
 - builder:
     name: mongodb-backup
index b3b930f..2a3e078 100644 (file)
@@ -19,6 +19,7 @@
 # Assigning Variables
 command=$1
 url=$2
+module=$3
 
 function check() {
 
@@ -38,24 +39,26 @@ function check() {
 }
 
 echo "Getting contianer Id of the currently running one"
-contId=$(sudo docker ps | grep "opnfv/testapi:latest" | awk '{print $1}')
+contId=$(sudo docker ps | grep "opnfv/${module}:latest" | awk '{print $1}')
+
+echo $contId
 
 echo "Pulling the latest image"
-sudo docker pull opnfv/testapi:latest
+sudo docker pull opnfv/${module}:latest
 
-echo "Deleting old containers of opnfv/testapi:old"
-sudo docker ps -a | grep "opnfv/testapi" | grep "old" | awk '{print $1}' | xargs -r sudo docker rm -f
+echo "Deleting old containers of opnfv/${module}:old"
+sudo docker ps -a | grep "opnfv/${module}" | grep "old" | awk '{print $1}' | xargs -r sudo docker rm -f
 
-echo "Deleting old images of opnfv/testapi:latest"
-sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}' | xargs -r sudo docker rmi -f
+echo "Deleting old images of opnfv/${module}:latest"
+sudo docker images | grep "opnfv/${module}" | grep "old" | awk '{print $3}' | xargs -r sudo docker rmi -f
 
 
 if [[ -z "$contId" ]]
 then
-    echo "No running testapi container"
+    echo "No running ${module} container"
 
-    echo "Removing stopped testapi containers in the previous iterations"
-    sudo docker ps -f status=exited | grep "opnfv_testapi" | awk '{print $1}' | xargs -r sudo docker rm -f
+    echo "Removing stopped ${module} containers in the previous iterations"
+    sudo docker ps -f status=exited | grep "opnfv_${module}" | awk '{print $1}' | xargs -r sudo docker rm -f
 else
     echo $contId
 
@@ -70,13 +73,13 @@ else
     fi
 
     echo "Changing current image tag to old"
-    sudo docker tag "$currImgId" opnfv/testapi:old
+    sudo docker tag "$currImgId" opnfv/${module}:old
 
-    echo "Removing stopped testapi containers in the previous iteration"
-    sudo docker ps -f status=exited | grep "opnfv_testapi" | awk '{print $1}' | xargs -r sudo docker rm -f
+    echo "Removing stopped ${module} containers in the previous iteration"
+    sudo docker ps -f status=exited | grep "opnfv_${module}" | awk '{print $1}' | xargs -r sudo docker rm -f
 
-    echo "Renaming the running container name to opnfv_testapi as to identify it."
-    sudo docker rename $contId opnfv_testapi
+    echo "Renaming the running container name to opnfv_${module} as to identify it."
+    sudo docker rename $contId opnfv_${module}
 
     echo "Stop the currently running container"
     sudo docker stop $contId
@@ -86,10 +89,10 @@ echo "Running a container with the new image"
 $command:latest
 
 if check; then
-    echo "TestResults Hosted."
+    echo "TestResults Module Hosted."
 else
-    echo "TestResults Hosting Failed"
-    if [[ $(sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}') ]]; then
+    echo "TestResults Module Failed"
+    if [[ $(sudo docker images | grep "opnfv/${module}" | grep "old" | awk '{print $3}') ]]; then
         echo "Running old Image"
         $command:old
         exit 1
index 417fc70..d70640a 100644 (file)
             name: RELEASE_VERSION
             default: ""
             description: "Release version, e.g. 1.0, 2.0, 3.0"
+        - string:
+            name: DOCKER_DIR
+            default: "docker"
+            description: "Directory containing files needed by the Dockerfile"
         - string:
             name: DOCKERFILE
             default: "Dockerfile.aarch64"
@@ -83,3 +87,4 @@
     publishers:
         - email:
             recipients: '{receivers}'
+        - email-jenkins-admins-on-failure
index ebd0c9f..0de3df2 100644 (file)
@@ -54,7 +54,7 @@ if [[ -n "$(docker images | grep $DOCKER_REPO_NAME)" ]]; then
     done
 fi
 
-cd $WORKSPACE/docker
+cd $WORKSPACE/$DOCKER_DIR
 HOST_ARCH=$(uname -m)
 if [ ! -f "${DOCKERFILE}" ]; then
     # If this is expected to be a Dockerfile for other arch than x86
index 095ba41..7e605b9 100644 (file)
     other-receivers: &other-receivers
         receivers: ''
 
-    project:
+    dockerfile: "Dockerfile"
+    dockerdir: "docker"
+
+    # This is the dockerhub repo the image will be pushed to as
+    # 'opnfv/{dockerrepo}. See: DOCKER_REPO_NAME parameter.
+    # 'project' is the OPNFV repo we expect to contain the Dockerfile
+    dockerrepo:
         # projects with jobs for master
         - 'releng-anteater':
+            project: 'releng-anteater'
             <<: *master
             <<: *other-receivers
         - 'bottlenecks':
+            project: 'bottlenecks'
             <<: *master
             <<: *other-receivers
         - 'cperf':
+            project: 'cperf'
             <<: *master
             <<: *other-receivers
         - 'dovetail':
+            project: 'dovetail'
             <<: *master
             <<: *other-receivers
         - 'functest':
+            project: 'functest'
             <<: *master
             <<: *functest-receivers
         - 'qtip':
+            project: 'qtip'
             <<: *master
             <<: *other-receivers
-        - 'storperf':
+        - 'storperf-master':
+            project: 'storperf'
+            dockerdir: 'docker/storperf-master'
+            <<: *master
+            <<: *other-receivers
+        - 'storperf-httpfrontend':
+            project: 'storperf'
+            dockerdir: 'docker/storperf-httpfrontend'
+            <<: *master
+            <<: *other-receivers
+        - 'storperf-reporting':
+            project: 'storperf'
+            dockerdir: 'docker/storperf-reporting'
             <<: *master
             <<: *other-receivers
         - 'yardstick':
+            project: 'yardstick'
             <<: *master
             <<: *other-receivers
         # projects with jobs for stable
         - 'bottlenecks':
+            project: 'bottlenecks'
             <<: *danube
             <<: *other-receivers
         - 'functest':
+            project: 'functest'
             <<: *danube
             <<: *functest-receivers
         - 'qtip':
+            project: 'qtip'
             <<: *danube
             <<: *other-receivers
         - 'storperf':
+            project: 'storperf'
             <<: *danube
             <<: *other-receivers
         - 'yardstick':
+            project: 'yardstick'
             <<: *danube
             <<: *other-receivers
 
     jobs:
-        - '{project}-docker-build-push-{stream}'
+        - "{dockerrepo}-docker-build-push-{stream}"
 
 
 - project:
 
     name: opnfv-monitor-docker        # projects which only monitor dedicated file or path
 
+    dockerfile: "Dockerfile"
+    dockerdir: "docker"
+
     project:
         # projects with jobs for master
         - 'daisy':
+            dockerrepo: 'daisy'
             <<: *master
         - 'escalator':
+            dockerrepo: 'escalator'
             <<: *master
 
     jobs:
 # job templates
 ########################
 - job-template:
-    name: '{project}-docker-build-push-{stream}'
+    name: '{dockerrepo}-docker-build-push-{stream}'
 
     disabled: '{obj:disabled}'
 
             description: "To enable/disable pushing the image to Dockerhub."
         - string:
             name: DOCKER_REPO_NAME
-            default: "opnfv/{project}"
+            default: "opnfv/{dockerrepo}"
             description: "Dockerhub repo to be pushed to."
+        - string:
+            name: DOCKER_DIR
+            default: "{dockerdir}"
+            description: "Directory containing files needed by the Dockerfile"
         - string:
             name: COMMIT_ID
             default: ""
             description: "Release version, e.g. 1.0, 2.0, 3.0"
         - string:
             name: DOCKERFILE
-            default: "Dockerfile"
+            default: "{dockerfile}"
             description: "Dockerfile to use for creating the image."
 
     scm:
     publishers:
         - email:
             recipients: '{receivers}'
+        - email-jenkins-admins-on-failure
 
 - job-template:
     name: '{project}-docker-build-push-monitor-{stream}'
index 8c231c3..d253da0 100644 (file)
@@ -53,7 +53,7 @@
                     comment-contains-value: 'reverify'
             projects:
               - project-compare-type: 'REG_EXP'
-                project-pattern: 'functest|sdnvpn|qtip|daisy|sfc|escalator|releng|pharos|octopus|securedlab'
+                project-pattern: 'functest|sdnvpn|qtip|daisy|sfc|escalator|releng'
                 branches:
                   - branch-compare-type: 'ANT'
                     branch-pattern: '**/{branch}'
                     comment-contains-value: 'reverify'
             projects:
               - project-compare-type: 'REG_EXP'
-                project-pattern: ''
+                project-pattern: 'octopus|releng-anteater|pharos'
                 branches:
                   - branch-compare-type: 'ANT'
                     branch-pattern: '**/{branch}'
index ecc8730..dc9bfd5 100644 (file)
@@ -52,6 +52,7 @@
     publishers:
         - archive-artifacts:
             artifacts: 'job_output/*'
+        - email-jenkins-admins-on-failure
 
 - job-template:
     name: 'releng-merge-jjb'
index 13186a1..f675cbb 100644 (file)
@@ -81,6 +81,7 @@
                     healthy: 50
                     unhealthy: 40
                     failing: 30
+        - email-jenkins-admins-on-failure
 
 - job-template:
     name: 'storperf-merge-{stream}'
                     healthy: 50
                     unhealthy: 40
                     failing: 30
+        - email-jenkins-admins-on-failure
 
 - job-template:
     name: 'storperf-daily-{stream}'
index d5a444d..998464a 100644 (file)
@@ -83,6 +83,7 @@
     publishers:
         - email:
             recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn
+        - email-jenkins-admins-on-failure
 #--------------------------------
 # trigger macros
 #--------------------------------
index 319f8eb..fbe7ecf 100644 (file)
     publishers:
         - email:
             recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com julienjut@gmail.com
+        - email-jenkins-admins-on-failure
 #--------------------------------
 # trigger macros
 #--------------------------------
index 64e13d3..b582772 100644 (file)
     publishers:
         - email:
             recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com julienjut@gmail.com
+        - email-jenkins-admins-on-failure
 
 - job-template:
     name: 'xci-{phase}-{pod}-{distro}-daily-{stream}'
index ff1d47e..007384b 100644 (file)
 # that have been switched using labels for slaves
 #--------------------------------
     pod:
+# apex CI PODs
+        - virtual:
+            slave-label: apex-virtual-master
+            installer: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - baremetal:
+            slave-label: apex-baremetal-master
+            installer: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - virtual:
+            slave-label: apex-virtual-danube
+            installer: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *danube
+        - baremetal:
+            slave-label: apex-baremetal-danube
+            installer: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *danube
 # fuel CI PODs
         - baremetal:
             slave-label: fuel-baremetal
             installer: joid
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *danube
-
 # compass CI PODs
         - baremetal:
             slave-label: compass-baremetal
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *danube
 #--------------------------------
-#    Installers not using labels
-#            CI PODs
-# This section should only contain the installers
-# that have not been switched using labels for slaves
-#--------------------------------
-        - lf-pod1:
-            slave-label: '{pod}'
-            installer: apex
-            auto-trigger-name: 'daily-trigger-disabled'
-            <<: *master
-        - lf-pod1:
-            slave-label: '{pod}'
-            installer: apex
-            auto-trigger-name: 'daily-trigger-disabled'
-            <<: *danube
-#--------------------------------
 #        None-CI PODs
 #--------------------------------
         - orange-pod1:
         - description-setter:
             description: "POD: $NODE_NAME"
         - 'yardstick-cleanup'
-        #- 'yardstick-fetch-os-creds'
+        - 'yardstick-fetch-os-creds'
         - 'yardstick-{testsuite}'
         - 'yardstick-store-results'
 
     publishers:
         - email:
             recipients: jean.gaoliang@huawei.com limingjiang@huawei.com ross.b.brattain@intel.com
+        - email-jenkins-admins-on-failure
 
 ########################
 # builder macros
 # parameter macros
 ########################
 - parameter:
-    name: 'yardstick-params-fuel-baremetal'
+    name: 'yardstick-params-apex-virtual-master'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-fuel-virtual'
+    name: 'yardstick-params-apex-baremetal-master'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-armband-baremetal'
+    name: 'yardstick-params-apex-virtual-danube'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-armband-virtual'
+    name: 'yardstick-params-apex-baremetal-danube'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-arm-virtual1'
+    name: 'yardstick-params-fuel-baremetal'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-joid-baremetal'
+    name: 'yardstick-params-fuel-virtual'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-joid-virtual'
+    name: 'yardstick-params-armband-baremetal'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-intel-pod8'
+    name: 'yardstick-params-armband-virtual'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-lf-pod1'
+    name: 'yardstick-params-arm-virtual1'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
-
 - parameter:
-    name: 'yardstick-params-lf-pod2'
+    name: 'yardstick-params-joid-baremetal'
+    parameters:
+        - string:
+            name: YARDSTICK_DB_BACKEND
+            default: '-i 104.197.68.199:8086'
+            description: 'Arguments to use in order to choose the backend DB'
+- parameter:
+    name: 'yardstick-params-joid-virtual'
+    parameters:
+        - string:
+            name: YARDSTICK_DB_BACKEND
+            default: '-i 104.197.68.199:8086'
+            description: 'Arguments to use in order to choose the backend DB'
+- parameter:
+    name: 'yardstick-params-intel-pod8'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
-
 - parameter:
     name: 'yardstick-params-compass-baremetal'
     parameters:
index 1c2abad..56d0874 100755 (executable)
@@ -2,9 +2,10 @@
 set -e
 [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
-# labconfig is used only for joid
-labconfig=""
+rc_file_vol=""
+cacert_file_vol=""
 sshkey=""
+
 if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
     instack_mac=$(sudo virsh domiflist undercloud | grep default | \
                   grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
@@ -15,12 +16,20 @@ if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
         sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
         sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
     fi
-elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then
+fi
+
+if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
     # If production lab then creds may be retrieved dynamically
     # creds are on the jumphost, always in the same folder
-    labconfig="-v $LAB_CONFIG/admin-openrc:/etc/yardstick/openstack.creds"
+    rc_file_vol="-v $LAB_CONFIG/admin-openrc:/etc/yardstick/openstack.creds"
     # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
     # replace the default one by the customized one provided by jenkins config
+elif [[ ${INSTALLER_TYPE} == 'compass' && ${BRANCH} == 'master' ]]; then
+    cacert_file_vol="-v ${HOME}/os_cacert:/etc/yardstick/os_cacert"
+    echo "export OS_CACERT=/etc/yardstick/os_cacert" >> ${HOME}/opnfv-openrc.sh
+    rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/etc/yardstick/openstack.creds"
+else
+    rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/etc/yardstick/openstack.creds"
 fi
 
 # Set iptables rule to allow forwarding return traffic for container
@@ -46,7 +55,7 @@ sudo rm -rf ${dir_result}/*
 map_log_dir="-v ${dir_result}:/tmp/yardstick"
 
 # Run docker
-cmd="sudo docker run ${opts} ${envs} ${labconfig} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \
+cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \
     exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
 echo "Yardstick: Running docker cmd: ${cmd}"
 ${cmd}
index 94de628..3cba98b 100644 (file)
@@ -48,6 +48,7 @@
       dib_os_element: "{{ ipa_dib_os_element|default('debian') }}"
       dib_os_release: "jessie"
       dib_elements: "ironic-agent {{ ipa_extra_dib_elements | default('') }}"
+      dib_notmpfs: true
       when: create_ipa_image | bool == true
     - role: bifrost-create-dib-image
       dib_imagetype: "qcow2"
@@ -57,6 +58,7 @@
       extra_dib_elements: "{{ lookup('env', 'EXTRA_DIB_ELEMENTS') | default('') }}"
       dib_elements: "vm enable-serial-console simple-init devuser growroot {{ extra_dib_elements }}"
       dib_packages: "{{ lookup('env', 'DIB_OS_PACKAGES') }}"
+      dib_notmpfs: true
       when: create_image_via_dib | bool == true and transform_boot_image | bool == false
     - role: bifrost-keystone-client-config
       user: "{{ ansible_env.SUDO_USER }}"
index 8656ff9..a7ce521 100644 (file)
       command: "/bin/bash ./scripts/bootstrap-ansible.sh"
       args:
         chdir: "{{OPENSTACK_OSA_PATH}}"
+    - name: install python Crypto module
+      package:
+        name: "{{ python_crypto_package_name }}"
+    - name: install PyYAML
+      pip:
+        name: pyyaml
+        state: present
     - name: generate password token
       command: "python pw-token-gen.py --file {{OPENSTACK_OSA_ETC_PATH}}/user_secrets.yml"
       args:
index d13d080..33f1105 100644 (file)
@@ -9,3 +9,4 @@
 ##############################################################################
 # this is the interface the VM nodes are connected to libvirt network "default"
 interface: "ens3"
+python_crypto_package_name: python-crypto
index 6d03e0f..eae7d12 100644 (file)
@@ -8,3 +8,4 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 # this is placeholder and left blank intentionally to complete later on
+python_crypto_package_name: python-crypto
index 6d03e0f..9674ed2 100644 (file)
@@ -8,3 +8,4 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 # this is placeholder and left blank intentionally to complete later on
+python_crypto_package_name: python-pycrypto
index a72c927..d711256 100755 (executable)
@@ -42,7 +42,7 @@ source $XCI_PATH/config/env-vars
 #-------------------------------------------------------------------------------
 user_local_dev_vars=(OPNFV_RELENG_DEV_PATH OPNFV_OSA_DEV_PATH OPNFV_BIFROST_DEV_PATH)
 for local_user_var in ${user_local_dev_vars[@]}; do
-    [[ -n ${!local_user_var} ]] && export $local_user_var=${!local_user_var%/}/
+    [[ -n ${!local_user_var:-} ]] && export $local_user_var=${!local_user_var%/}/
 done
 unset user_local_dev_vars local_user_var
 
index 197e493..e2c57d2 100644 (file)
@@ -49,7 +49,7 @@ def get_with_passwd():
                                        args.user, installer_pwd=args.password)
 
 
-def create_file(handler):
+def create_file(handler, INSTALLER_TYPE):
     """
     Create the yaml file of nodes info.
     As Yardstick required, node name must be node1, node2, ... and node1 must
@@ -62,27 +62,30 @@ def create_file(handler):
     nodes = handler.nodes
     node_list = []
     index = 1
+    user = 'root'
+    if INSTALLER_TYPE == 'apex':
+        user = 'heat-admin'
     for node in nodes:
         try:
             if node.roles[0].lower() == "controller":
                 node_info = {'name': "node%s" % index, 'role': node.roles[0],
-                             'ip': node.ip, 'user': 'root'}
+                             'ip': node.ip, 'user': user}
                 node_list.append(node_info)
                 index += 1
         except Exception:
             node_info = {'name': node.name, 'role': 'unknown', 'ip': node.ip,
-                         'user': 'root'}
+                         'user': user}
             node_list.append(node_info)
     for node in nodes:
         try:
             if node.roles[0].lower() == "compute":
                 node_info = {'name': "node%s" % index, 'role': node.roles[0],
-                             'ip': node.ip, 'user': 'root'}
+                             'ip': node.ip, 'user': user}
                 node_list.append(node_info)
                 index += 1
         except Exception:
             node_info = {'name': node.name, 'role': 'unknown', 'ip': node.ip,
-                         'user': 'root'}
+                         'user': user}
             node_list.append(node_info)
     if args.INSTALLER_TYPE == 'compass':
         for item in node_list:
@@ -105,7 +108,7 @@ def main():
     if not handler:
         print("Error: failed to get the node's handler.")
         return 1
-    create_file(handler)
+    create_file(handler, args.INSTALLER_TYPE)
 
 
 if __name__ == '__main__':
index 993c0b9..8374edb 100755 (executable)
@@ -12,7 +12,7 @@ set -o nounset
 set -o pipefail
 
 usage() {
-    echo "usage: $0 [-v] -d <destination> -i <installer_type> -a <installer_ip> [-s <ssh_key>]" >&2
+    echo "usage: $0 [-v] -d <destination> -i <installer_type> -a <installer_ip> [-o <os_cacert>] [-s <ssh_key>]" >&2
     echo "[-v] Virtualized deployment" >&2
     echo "[-s <ssh_key>] Path to ssh key. For MCP deployments only" >&2
 }
@@ -54,12 +54,13 @@ swap_to_public() {
 : ${DEPLOY_TYPE:=''}
 
 #Get options
-while getopts ":d:i:a:h:s:v" optchar; do
+while getopts ":d:i:a:h:s:o:v" optchar; do
     case "${optchar}" in
         d) dest_path=${OPTARG} ;;
         i) installer_type=${OPTARG} ;;
         a) installer_ip=${OPTARG} ;;
         s) ssh_key=${OPTARG} ;;
+        o) os_cacert=${OPTARG} ;;
         v) DEPLOY_TYPE="virt" ;;
         *) echo "Non-option argument: '-${OPTARG}'" >&2
            usage
@@ -70,6 +71,7 @@ done
 
 # set vars from env if not provided by user as options
 dest_path=${dest_path:-$HOME/opnfv-openrc.sh}
+os_cacert=${os_cacert:-$HOME/os_cacert}
 installer_type=${installer_type:-$INSTALLER_TYPE}
 installer_ip=${installer_ip:-$INSTALLER_IP}
 if [ "${installer_type}" == "fuel" ] && [ "${BRANCH}" == "master" ]; then
@@ -153,36 +155,41 @@ elif [ "$installer_type" == "apex" ]; then
     sudo scp $ssh_options root@$installer_ip:/home/stack/overcloudrc.v3 $dest_path
 
 elif [ "$installer_type" == "compass" ]; then
-    verify_connectivity $installer_ip
-    controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
-        'mysql -ucompass -pcompass -Dcompass -e"select *  from cluster;"' \
-        | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"127.0.0.1\"/) {print $(i+2);break;}}'  \
-        | grep -oP "\d+.\d+.\d+.\d+")
-
-    if [ -z $controller_ip ]; then
-        error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
-    fi
-
-    info "Fetching rc file from controller $controller_ip..."
-    sshpass -p root ssh 2>/dev/null $ssh_options root@${installer_ip} \
-        "scp $ssh_options ${controller_ip}:/opt/admin-openrc.sh ." &> /dev/null
-    sshpass -p root scp 2>/dev/null $ssh_options root@${installer_ip}:~/admin-openrc.sh $dest_path &> /dev/null
+    if [ "${BRANCH}" == "master" ]; then
+        sudo docker cp compass-tasks:/opt/openrc $dest_path &> /dev/null
+        sudo chown $(whoami):$(whoami) $dest_path
+        sudo docker cp compass-tasks:/opt/os_cacert $os_cacert &> /dev/null
+    else
+        verify_connectivity $installer_ip
+        controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
+            'mysql -ucompass -pcompass -Dcompass -e"select *  from cluster;"' \
+            | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"127.0.0.1\"/) {print $(i+2);break;}}'  \
+            | grep -oP "\d+.\d+.\d+.\d+")
 
-    info "This file contains the mgmt keystone API, we need the public one for our rc file"
+        if [ -z $controller_ip ]; then
+            error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
+        fi
 
-    if grep "OS_AUTH_URL.*v2" $dest_path > /dev/null 2>&1 ; then
-        public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
-            "ssh ${controller_ip} 'source /opt/admin-openrc.sh; openstack endpoint show identity '" \
-            | grep publicurl | awk '{print $4}')
-    else
-        public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
-            "ssh ${controller_ip} 'source /opt/admin-openrc.sh; \
-                 openstack endpoint list --interface public --service identity '" \
-            | grep identity | awk '{print $14}')
+        info "Fetching rc file from controller $controller_ip..."
+        sshpass -p root ssh 2>/dev/null $ssh_options root@${installer_ip} \
+            "scp $ssh_options ${controller_ip}:/opt/admin-openrc.sh ." &> /dev/null
+        sshpass -p root scp 2>/dev/null $ssh_options root@${installer_ip}:~/admin-openrc.sh $dest_path &> /dev/null
+
+        info "This file contains the mgmt keystone API, we need the public one for our rc file"
+
+        if grep "OS_AUTH_URL.*v2" $dest_path > /dev/null 2>&1 ; then
+            public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
+                "ssh ${controller_ip} 'source /opt/admin-openrc.sh; openstack endpoint show identity '" \
+                | grep publicurl | awk '{print $4}')
+        else
+            public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
+                "ssh ${controller_ip} 'source /opt/admin-openrc.sh; \
+                     openstack endpoint list --interface public --service identity '" \
+                | grep identity | awk '{print $14}')
+        fi
+        info "public_ip: $public_ip"
+        swap_to_public $public_ip
     fi
-    info "public_ip: $public_ip"
-    swap_to_public $public_ip
-
 
 elif [ "$installer_type" == "joid" ]; then
     # do nothing...for the moment
index 5021b78..eb57deb 100644 (file)
@@ -30,7 +30,8 @@ node_list=(\
 'arm-pod1' 'arm-pod3' \
 'huawei-pod1' 'huawei-pod2' 'huawei-pod3' 'huawei-pod4' 'huawei-pod5' \
 'huawei-pod6' 'huawei-pod7' 'huawei-pod12' \
-'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4'\
+'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4' \
+'zte-pod2' \
 'zte-virtual1')
 
 
diff --git a/utils/test/testapi/.gitignore b/utils/test/testapi/.gitignore
new file mode 100644 (file)
index 0000000..c7b63b5
--- /dev/null
@@ -0,0 +1,4 @@
+AUTHORS
+ChangeLog
+setup.cfg-e
+
index 8c701c3..5f5b861 100644 (file)
         .module('testapiApp')
         .config(configureRoutes);
 
+    angular
+        .module('testapiApp')
+        .directive('dynamicModel', ['$compile', '$parse', function ($compile, $parse) {
+            return {
+                restrict: 'A',
+                terminal: true,
+                priority: 100000,
+                link: function (scope, elem) {
+                    var name = $parse(elem.attr('dynamic-model'))(scope);
+                    elem.removeAttr('dynamic-model');
+                    elem.attr('ng-model', name);
+                    $compile(elem)(scope);
+                }
+            };
+        }]);
+
     configureRoutes.$inject = ['$stateProvider', '$urlRouterProvider'];
 
     /**
                 url: '/about',
                 templateUrl: 'testapi-ui/components/about/about.html'
             }).
-            state('guidelines', {
-                url: '/guidelines',
-                templateUrl: 'testapi-ui/components/guidelines/guidelines.html',
-                controller: 'GuidelinesController as ctrl'
+            state('pods', {
+                url: '/pods',
+                templateUrl: 'testapi-ui/components/pods/pods.html',
+                controller: 'PodsController as ctrl'
             }).
             state('communityResults', {
                 url: '/community_results',
@@ -54,8 +70,8 @@
                 controller: 'ResultsController as ctrl'
             }).
             state('userResults', {
-                url: 'user_results',
-                templateUrl: '/testapi-ui/components/results/results.html',
+                url: '/user_results',
+                templateUrl: 'testapi-ui/components/results/results.html',
                 controller: 'ResultsController as ctrl'
             }).
             state('resultsDetail', {
@@ -66,7 +82,7 @@
             }).
             state('profile', {
                 url: '/profile',
-                templateUrl: '/testapi-ui/components/profile/profile.html',
+                templateUrl: 'testapi-ui/components/profile/profile.html',
                 controller: 'ProfileController as ctrl'
             }).
             state('authFailure', {
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/components/guidelines/guidelines.html b/utils/test/testapi/3rd_party/static/testapi-ui/components/guidelines/guidelines.html
deleted file mode 100644 (file)
index 1dd39ff..0000000
+++ /dev/null
@@ -1,80 +0,0 @@
-<h3>OpenStack Powered&#8482; Guidelines</h3>
-
-<!-- Guideline Filters -->
-<div class="row">
-    <div class="col-md-3">
-        <strong>Version:</strong>
-        <!-- Slicing the version file name here gets rid of the '.json' file extension -->
-        <select ng-model="ctrl.version"
-                ng-change="ctrl.update()"
-                class="form-control"
-                ng-options="versionFile.slice(0,-5) for versionFile in ctrl.versionList">
-        </select>
-    </div>
-    <div class="col-md-4">
-        <strong>Target Program:</strong>
-        <span class="program-about"><a target="_blank" href="http://www.openstack.org/brand/interop/">About</a></span>
-        <select ng-model="ctrl.target" class="form-control" ng-change="ctrl.updateTargetCapabilities()">
-            <option value="platform">OpenStack Powered Platform</option>
-            <option value="compute">OpenStack Powered Compute</option>
-            <option value="object">OpenStack Powered Object Storage</option>
-        </select>
-    </div>
-</div>
-
-<br />
-<div ng-if="ctrl.guidelines">
-    <strong>Guideline Status:</strong>
-    {{ctrl.guidelines.status | capitalize}}
-</div>
-
-<div ng-show="ctrl.guidelines">
-    <strong>Corresponding OpenStack Releases:</strong>
-    <ul class="list-inline">
-        <li ng-repeat="release in ctrl.guidelines.releases">
-            {{release | capitalize}}
-        </li>
-    </ul>
-</div>
-
-<strong>Capability Status:</strong>
-<div class="checkbox">
-    <label>
-    <input type="checkbox" ng-model="ctrl.status.required">
-    <span class="required">Required</span>
-    </label>
-    <label>
-    <input type="checkbox" ng-model="ctrl.status.advisory">
-    <span class="advisory">Advisory</span>
-    </label>
-    <label>
-    <input type="checkbox" ng-model="ctrl.status.deprecated">
-    <span class="deprecated">Deprecated</span>
-    </label>
-    <label>
-    <input type="checkbox" ng-model="ctrl.status.removed">
-    <span class="removed">Removed</span>
-    </label>
-    <a class="test-list-dl pull-right"
-       title="Get a test list for capabilities matching selected statuses."
-       ng-click="ctrl.openTestListModal()">
-
-        Test List <span class="glyphicon glyphicon-file"></span>
-    </a>
-</div>
-<!-- End Capability Filters -->
-
-<p><small>Tests marked with <span class="glyphicon glyphicon-flag text-warning"></span> are tests flagged by Interop Working Group.</small></p>
-
-<!-- Loading animation divs -->
-<div cg-busy="{promise:ctrl.versionsRequest,message:'Loading versions'}"></div>
-<div cg-busy="{promise:ctrl.capsRequest,message:'Loading capabilities'}"></div>
-
-<!-- Get the version-specific template -->
-<div ng-include src="ctrl.detailsTemplate"></div>
-
-<div ng-show="ctrl.showError" class="alert alert-danger" role="alert">
-    <span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true"></span>
-    <span class="sr-only">Error:</span>
-    {{ctrl.error}}
-</div>
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/components/guidelines/guidelinesController.js b/utils/test/testapi/3rd_party/static/testapi-ui/components/guidelines/guidelinesController.js
deleted file mode 100644 (file)
index a6f4258..0000000
+++ /dev/null
@@ -1,322 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-(function () {
-    'use strict';
-
-    angular
-        .module('testapiApp')
-        .controller('GuidelinesController', GuidelinesController);
-
-    GuidelinesController.$inject = ['$http', '$uibModal', 'testapiApiUrl'];
-
-    /**
-     * TestAPI Guidelines Controller
-     * This controller is for the '/guidelines' page where a user can browse
-     * through tests belonging to Interop WG defined capabilities.
-     */
-    function GuidelinesController($http, $uibModal, testapiApiUrl) {
-        var ctrl = this;
-
-        ctrl.getVersionList = getVersionList;
-        ctrl.update = update;
-        ctrl.updateTargetCapabilities = updateTargetCapabilities;
-        ctrl.filterStatus = filterStatus;
-        ctrl.getObjectLength = getObjectLength;
-        ctrl.openTestListModal = openTestListModal;
-
-        /** The target OpenStack marketing program to show capabilities for. */
-        ctrl.target = 'platform';
-
-        /** The various possible capability statuses. */
-        ctrl.status = {
-            required: true,
-            advisory: false,
-            deprecated: false,
-            removed: false
-        };
-
-        /**
-         * The template to load for displaying capability details.
-         */
-        ctrl.detailsTemplate = 'components/guidelines/partials/' +
-                               'guidelineDetails.html';
-
-        /**
-         * Retrieve an array of available guideline files from the TestAPI
-         * API server, sort this array reverse-alphabetically, and store it in
-         * a scoped variable. The scope's selected version is initialized to
-         * the latest (i.e. first) version here as well. After a successful API
-         * call, the function to update the capabilities is called.
-         * Sample API return array: ["2015.03.json", "2015.04.json"]
-         */
-        function getVersionList() {
-            var content_url = testapiApiUrl + '/guidelines';
-            ctrl.versionsRequest =
-                $http.get(content_url).success(function (data) {
-                    ctrl.versionList = data.sort().reverse();
-                    // Default to the first approved guideline which is expected
-                    // to be at index 1.
-                    ctrl.version = ctrl.versionList[1];
-                    ctrl.update();
-                }).error(function (error) {
-                    ctrl.showError = true;
-                    ctrl.error = 'Error retrieving version list: ' +
-                        angular.toJson(error);
-                });
-        }
-
-        /**
-         * This will contact the TestAPI API server to retrieve the JSON
-         * content of the guideline file corresponding to the selected
-         * version.
-         */
-        function update() {
-            var content_url = testapiApiUrl + '/guidelines/' + ctrl.version;
-            ctrl.capsRequest =
-                $http.get(content_url).success(function (data) {
-                    ctrl.guidelines = data;
-                    ctrl.updateTargetCapabilities();
-                }).error(function (error) {
-                    ctrl.showError = true;
-                    ctrl.guidelines = null;
-                    ctrl.error = 'Error retrieving guideline content: ' +
-                        angular.toJson(error);
-                });
-        }
-
-        /**
-         * This will update the scope's 'targetCapabilities' object with
-         * capabilities belonging to the selected OpenStack marketing program
-         * (programs typically correspond to 'components' in the Interop WG
-         * schema). Each capability will have its status mapped to it.
-         */
-        function updateTargetCapabilities() {
-            ctrl.targetCapabilities = {};
-            var components = ctrl.guidelines.components;
-            var targetCaps = ctrl.targetCapabilities;
-
-            // The 'platform' target is comprised of multiple components, so
-            // we need to get the capabilities belonging to each of its
-            // components.
-            if (ctrl.target === 'platform') {
-                var platform_components = ctrl.guidelines.platform.required;
-
-                // This will contain status priority values, where lower
-                // values mean higher priorities.
-                var statusMap = {
-                    required: 1,
-                    advisory: 2,
-                    deprecated: 3,
-                    removed: 4
-                };
-
-                // For each component required for the platform program.
-                angular.forEach(platform_components, function (component) {
-                    // Get each capability list belonging to each status.
-                    angular.forEach(components[component],
-                        function (caps, status) {
-                            // For each capability.
-                            angular.forEach(caps, function(cap) {
-                                // If the capability has already been added.
-                                if (cap in targetCaps) {
-                                    // If the status priority value is less
-                                    // than the saved priority value, update
-                                    // the value.
-                                    if (statusMap[status] <
-                                        statusMap[targetCaps[cap]]) {
-                                        targetCaps[cap] = status;
-                                    }
-                                }
-                                else {
-                                    targetCaps[cap] = status;
-                                }
-                            });
-                        });
-                });
-            }
-            else {
-                angular.forEach(components[ctrl.target],
-                    function (caps, status) {
-                        angular.forEach(caps, function(cap) {
-                            targetCaps[cap] = status;
-                        });
-                    });
-            }
-        }
-
-        /**
-         * This filter will check if a capability's status corresponds
-         * to a status that is checked/selected in the UI. This filter
-         * is meant to be used with the ng-repeat directive.
-         * @param {Object} capability
-         * @returns {Boolean} True if capability's status is selected
-         */
-        function filterStatus(capability) {
-            var caps = ctrl.targetCapabilities;
-            return (ctrl.status.required &&
-                caps[capability.id] === 'required') ||
-                (ctrl.status.advisory &&
-                caps[capability.id] === 'advisory') ||
-                (ctrl.status.deprecated &&
-                caps[capability.id] === 'deprecated') ||
-                (ctrl.status.removed &&
-                caps[capability.id] === 'removed');
-        }
-
-        /**
-         * This function will get the length of an Object/dict based on
-         * the number of keys it has.
-         * @param {Object} object
-         * @returns {Number} length of object
-         */
-        function getObjectLength(object) {
-            return Object.keys(object).length;
-        }
-
-        /**
-         * This will open the modal that will show a list of all tests
-         * belonging to capabilities with the selected status(es).
-         */
-        function openTestListModal() {
-            $uibModal.open({
-                templateUrl: '/components/guidelines/partials' +
-                        '/testListModal.html',
-                backdrop: true,
-                windowClass: 'modal',
-                animation: true,
-                controller: 'TestListModalController as modal',
-                size: 'lg',
-                resolve: {
-                    version: function () {
-                        return ctrl.version.slice(0, -5);
-                    },
-                    target: function () {
-                        return ctrl.target;
-                    },
-                    status: function () {
-                        return ctrl.status;
-                    }
-                }
-            });
-        }
-
-        ctrl.getVersionList();
-    }
-
-    angular
-        .module('testapiApp')
-        .controller('TestListModalController', TestListModalController);
-
-    TestListModalController.$inject = [
-        '$uibModalInstance', '$http', 'version',
-        'target', 'status', 'testapiApiUrl'
-    ];
-
-    /**
-     * Test List Modal Controller
-     * This controller is for the modal that appears if a user wants to see the
-     * test list corresponding to Interop WG capabilities with the selected
-     * statuses.
-     */
-    function TestListModalController($uibModalInstance, $http, version,
-        target, status, testapiApiUrl) {
-
-        var ctrl = this;
-
-        ctrl.version = version;
-        ctrl.target = target;
-        ctrl.status = status;
-        ctrl.close = close;
-        ctrl.updateTestListString = updateTestListString;
-
-        ctrl.aliases = true;
-        ctrl.flagged = false;
-
-        // Check if the API URL is absolute or relative.
-        if (testapiApiUrl.indexOf('http') > -1) {
-            ctrl.url = testapiApiUrl;
-        }
-        else {
-            ctrl.url = location.protocol + '//' + location.host +
-                testapiApiUrl;
-        }
-
-        /**
-         * This function will close/dismiss the modal.
-         */
-        function close() {
-            $uibModalInstance.dismiss('exit');
-        }
-
-        /**
-         * This function will return a list of statuses based on which ones
-         * are selected.
-         */
-        function getStatusList() {
-            var statusList = [];
-            angular.forEach(ctrl.status, function(value, key) {
-                if (value) {
-                    statusList.push(key);
-                }
-            });
-            return statusList;
-        }
-
-        /**
-         * This will get the list of tests from the API and update the
-         * controller's test list string variable.
-         */
-        function updateTestListString() {
-            var statuses = getStatusList();
-            if (!statuses.length) {
-                ctrl.error = 'No tests matching selected criteria.';
-                return;
-            }
-            ctrl.testListUrl = [
-                ctrl.url, '/guidelines/', ctrl.version, '/tests?',
-                'target=', ctrl.target, '&',
-                'type=', statuses.join(','), '&',
-                'alias=', ctrl.aliases.toString(), '&',
-                'flag=', ctrl.flagged.toString()
-            ].join('');
-            ctrl.testListRequest =
-                $http.get(ctrl.testListUrl).
-                    then(function successCallback(response) {
-                        ctrl.error = null;
-                        ctrl.testListString = response.data;
-                        if (!ctrl.testListString) {
-                            ctrl.testListCount = 0;
-                        }
-                        else {
-                            ctrl.testListCount =
-                                ctrl.testListString.split('\n').length;
-                        }
-                    }, function errorCallback(response) {
-                        ctrl.testListString = null;
-                        ctrl.testListCount = null;
-                        if (angular.isObject(response.data) &&
-                            response.data.message) {
-                            ctrl.error = 'Error retrieving test list: ' +
-                                response.data.message;
-                        }
-                        else {
-                            ctrl.error = 'Unknown error retrieving test list.';
-                        }
-                    });
-        }
-
-        updateTestListString();
-    }
-})();
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/components/guidelines/partials/guidelineDetails.html b/utils/test/testapi/3rd_party/static/testapi-ui/components/guidelines/partials/guidelineDetails.html
deleted file mode 100644 (file)
index f020c9a..0000000
+++ /dev/null
@@ -1,50 +0,0 @@
-<!--
-HTML for guidelines page for all OpenStack Powered (TM) guideline schemas
-This expects the JSON data of the guidelines file to be stored in scope
-variable 'guidelines'.
--->
-
-<ol ng-show="ctrl.guidelines" class="capabilities">
-  <li class="capability-list-item" ng-repeat="capability in ctrl.guidelines.capabilities | arrayConverter | filter:ctrl.filterStatus | orderBy:'id'">
-    <span class="capability-name">{{capability.id}}</span><br />
-    <em>{{capability.description}}</em><br />
-    Status: <span class="{{ctrl.targetCapabilities[capability.id]}}">{{ctrl.targetCapabilities[capability.id]}}</span><br />
-    <span ng-if="capability.project">Project: {{capability.project | capitalize}}<br /></span>
-    <a ng-click="showAchievements = !showAchievements">Achievements ({{capability.achievements.length}})</a><br />
-    <ol uib-collapse="!showAchievements" class="list-inline">
-        <li ng-repeat="achievement in capability.achievements">
-            {{achievement}}
-        </li>
-    </ol>
-
-    <a ng-click="showTests = !showTests">Tests ({{ctrl.getObjectLength(capability.tests)}})</a>
-    <ul uib-collapse="!showTests">
-        <li ng-if="ctrl.guidelines.schema === '1.2'" ng-repeat="test in capability.tests">
-           <span ng-class="{'glyphicon glyphicon-flag text-warning': capability.flagged.indexOf(test) > -1}"></span>
-           {{test}}
-        </li>
-        <li ng-if="ctrl.guidelines.schema > '1.2'" ng-repeat="(testName, testDetails) in capability.tests">
-           <span ng-class="{'glyphicon glyphicon-flag text-warning': testDetails.flagged}" title="{{testDetails.flagged.reason}}"></span>
-           {{testName}}
-           <div class="test-detail" ng-if="testDetails.aliases">
-               <strong>Aliases:</strong>
-               <ul><li ng-repeat="alias in testDetails.aliases">{{alias}}</li></ul>
-           </div>
-        </li>
-    </ul>
-  </li>
-</ol>
-
-<div ng-show="ctrl.guidelines" class="criteria">
-    <hr>
-    <h4><a ng-click="showCriteria = !showCriteria">Criteria</a></h4>
-    <div uib-collapse="showCriteria">
-        <ul>
-            <li ng-repeat="(key, criterion) in ctrl.guidelines.criteria">
-                <span class="criterion-name">{{criterion.name}}</span><br />
-                <em>{{criterion.Description}}</em><br />
-                Weight: {{criterion.weight}}
-            </li>
-        </ul>
-    </div>
-</div>
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/components/guidelines/partials/testListModal.html b/utils/test/testapi/3rd_party/static/testapi-ui/components/guidelines/partials/testListModal.html
deleted file mode 100644 (file)
index 5b1d698..0000000
+++ /dev/null
@@ -1,46 +0,0 @@
-<div class="modal-content">
-    <div class="modal-header">
-        <button type="button" class="close" aria-hidden="true" ng-click="modal.close()">&times;</button>
-        <h4>Test List ({{modal.testListCount}})</h4>
-        <p>Use this test list with <a title="testapi-client" target="_blank"href="https://github.com/openstack/testapi-client">testapi-client</a>
-           to run only tests in the {{modal.version}} OpenStack Powered&#8482; guideline from capabilities with the following statuses:
-        </p>
-        <ul class="list-inline">
-            <li class="required" ng-if="modal.status.required"> Required</li>
-            <li class="advisory" ng-if="modal.status.advisory"> Advisory</li>
-            <li class="deprecated" ng-if="modal.status.deprecated"> Deprecated</li>
-            <li class="removed" ng-if="modal.status.removed"> Removed</li>
-        </ul>
-        <div class="checkbox checkbox-test-list">
-            <label><input type="checkbox" ng-model="modal.aliases" ng-change="modal.updateTestListString()">Aliases</label>
-            <span class="glyphicon glyphicon-info-sign info-hover" aria-hidden="true"
-                  title="Include test aliases as tests may have been renamed over time. It does not hurt to include these."></span>
-            &nbsp;
-            <label><input type="checkbox" ng-model="modal.flagged" ng-change="modal.updateTestListString()">Flagged</label>
-            <span class="glyphicon glyphicon-info-sign info-hover" aria-hidden="true"
-                  title="Include flagged tests.">
-            </span>
-        </div>
-        <p ng-hide="modal.error"> Alternatively, get the test list directly from the API on your CLI:</p>
-        <code ng-hide="modal.error">wget "{{modal.testListUrl}}" -O {{modal.version}}-test-list.txt</code>
-    </div>
-    <div class="modal-body tests-modal-content">
-        <div cg-busy="{promise:modal.testListRequest,message:'Loading'}"></div>
-        <div ng-show="modal.error" class="alert alert-danger" role="alert">
-            <span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true"></span>
-            <span class="sr-only">Error:</span>
-            {{modal.error}}
-        </div>
-        <div class="form-group">
-            <textarea class="form-control" rows="16" id="tests" wrap="off">{{modal.testListString}}</textarea>
-        </div>
-    </div>
-    <div class="modal-footer">
-        <a target="_blank" href="{{modal.testListUrl}}" download="{{modal.version + '-test-list.txt'}}">
-            <button class="btn btn-primary" ng-if="modal.testListCount > 0" type="button">
-                Download
-            </button>
-        </a>
-        <button class="btn btn-primary" type="button" ng-click="modal.close()">Close</button>
-    </div>
-</div>
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html b/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html
new file mode 100644 (file)
index 0000000..cdfcfaf
--- /dev/null
@@ -0,0 +1,71 @@
+<h3>{{ctrl.pageHeader}}</h3>
+<p>{{ctrl.pageParagraph}}</p>
+<div class="row" style="margin-bottom:24px;"></div>
+
+<div class="pod-create">
+    <h4>Create</h4>
+    <div class="row">
+        <div ng-repeat="require in ctrl.createRequirements">
+            <div class="create-pod" style="margin-left:24px;">
+                <p class="input-group">
+                    <label for="cpid">{{require.label|capitalize}}: </label>
+                    <a ng-if="require.type == 'select'">
+                        <select dynamic-model="'ctrl.' + require.label" ng-options="option for option in require.selects"></select>
+                    </a>
+                    <a ng-if="require.type == 'text'">
+                        <input type="text" dynamic-model="'ctrl.' + require.label"/>
+                    </a>
+                    <a ng-if="require.type == 'textarea'">
+                        <textarea rows="2" cols="50" dynamic-model="'ctrl.' + require.label">
+                        </textarea>
+                    </a>
+                </p>
+            </div>
+        </div>
+
+        <div class="col-md-3" style="margin-top:12px; margin-left:8px;">
+            <button type="submit" class="btn btn-primary" ng-click="ctrl.create()">Create</button>
+        </div>
+    </div>
+</div>
+
+<div class="pods-filters" style="margin-top:36px;">
+    <h4>Filters</h4>
+    <div class="row">
+        <div class="col-md-3" style="margin-top:12px; margin-left:8px;">
+            <button type="submit" class="btn btn-primary" ng-click="ctrl.update()">Filter</button>
+            <button type="submit" class="btn btn-primary btn-danger" ng-click="ctrl.clearFilters()">Clear</button>
+        </div>
+    </div>
+</div>
+
+<div cg-busy="{promise:ctrl.authRequest,message:'Loading'}"></div>
+<div cg-busy="{promise:ctrl.podsRequest,message:'Loading'}"></div>
+
+<div ng-show="ctrl.data" class="pods-table" style="margin-top:24px; margin-left:8px;">
+    <table ng-data="ctrl.data.pods" ng-show="ctrl.data" class="table table-striped table-hover">
+        <tbody>
+            <tr ng-repeat-start="(index, pod) in ctrl.data.pods">
+                <td>
+                    <a href="#" ng-click="showPod = !showPod">{{pod.name}}</a>
+                    <div class="show-pod" ng-class="{ 'hidden': ! showPod }" style="margin-left:24px;">
+                        <p>
+                            role: {{pod.role}}<br>
+                            mode: {{pod.mode}}<br>
+                            create_date: {{pod.creation_date}}<br>
+                            details: {{pod.details}}
+                        </p>
+                    </div>
+                </td>
+            </tr>
+            <tr ng-repeat-end=>
+            </tr>
+        </tbody>
+    </table>
+</div>
+
+<div ng-show="ctrl.showError" class="alert alert-danger" role="alert">
+    <span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true"></span>
+    <span class="sr-only">Error:</span>
+    {{ctrl.error}}
+</div>
diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/podsController.js b/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/podsController.js
new file mode 100644 (file)
index 0000000..53e8b1e
--- /dev/null
@@ -0,0 +1,119 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+(function () {
+    'use strict';
+
+    angular
+        .module('testapiApp')
+        .controller('PodsController', PodsController);
+
+    PodsController.$inject = [
+        '$scope', '$http', '$filter', '$state', 'testapiApiUrl','raiseAlert'
+    ];
+
+    /**
+     * TestAPI Pods Controller
+     * This controller is for the '/pods' page where a user can browse
+     * through pods declared in TestAPI.
+     */
+    function PodsController($scope, $http, $filter, $state, testapiApiUrl,
+        raiseAlert) {
+        var ctrl = this;
+
+        ctrl.url = testapiApiUrl + '/pods';
+
+        ctrl.create = create;
+        ctrl.update = update;
+        ctrl.open = open;
+        ctrl.clearFilters = clearFilters;
+
+        ctrl.roles = ['community-ci', 'production-ci'];
+        ctrl.modes = ['metal', 'virtual'];
+        ctrl.createRequirements = [
+            {label: 'name', type: 'text', required: true},
+            {label: 'mode', type: 'select', selects: ctrl.modes},
+            {label: 'role', type: 'select', selects: ctrl.roles},
+            {label: 'details', type: 'textarea', required: false}
+        ];
+
+        ctrl.name = '';
+        ctrl.role = 'community-ci';
+        ctrl.mode = 'metal';
+        ctrl.details = '';
+
+        ctrl.pageHeader = 'Pods';
+        ctrl.pageParagraph = 'This page is used to create or query pods.';
+
+        /**
+         * This is called when the date filter calendar is opened. It
+         * does some event handling, and sets a scope variable so the UI
+         * knows which calendar was opened.
+         * @param {Object} $event - The Event object
+         * @param {String} openVar - Tells which calendar was opened
+         */
+        function open($event, openVar) {
+            $event.preventDefault();
+            $event.stopPropagation();
+            ctrl[openVar] = true;
+        }
+
+        /**
+         * This function will clear all filters and update the results
+         * listing.
+         */
+        function clearFilters() {
+            ctrl.update();
+        }
+
+        /**
+         * This will contact the TestAPI to create a new pod.
+         */
+        function create() {
+            ctrl.showError = false;
+            var pods_url = ctrl.url;
+            var body = {
+                name: ctrl.name,
+                mode: ctrl.mode,
+                role: ctrl.role,
+                details: ctrl.details
+            };
+
+            ctrl.podsRequest =
+                $http.post(pods_url, body).error(function (error) {
+                    ctrl.showError = true;
+                    ctrl.error =
+                        'Error creating the new pod from server: ' +
+                        angular.toJson(error);
+                });
+        }
+
+        /**
+         * This will contact the TestAPI to get a listing of declared pods.
+         */
+        function update() {
+            ctrl.showError = false;
+            ctrl.podsRequest =
+                $http.get(ctrl.url).success(function (data) {
+                    ctrl.data = data;
+                }).error(function (error) {
+                    ctrl.data = null;
+                    ctrl.showError = true;
+                    ctrl.error =
+                        'Error retrieving pods from server: ' +
+                        angular.toJson(error);
+                });
+        }
+    }
+})();
index 3056e1d..2ae5339 100644 (file)
@@ -1,6 +1,23 @@
 <h3>{{ctrl.pageHeader}}</h3>
 <p>{{ctrl.pageParagraph}}</p>
-
+<form class="form-inline" ng-show="ctrl.isUserResults">
+<h4>Upload Results</h4>
+<div class="form-group col-m-3">
+     <input class="form-contrl btn btn-default" type = "file" file-model = "resultFile"/>
+</div>
+<div class="checkbox col-m-1">
+  <label>
+      <input type="checkbox" ng-model="ctrl.isPublic">public
+  </label>
+</div>
+<div class="form-group col-m-3">
+     <button class="btn btn-primary" ng-click = "ctrl.uploadFile()">upload result</button>
+</div>
+<div>
+<lable>{{ctrl.uploadState}}</label>
+</div>
+</form>
+<div class="row" style="margin-bottom:24px;"></div>
 <div class="result-filters">
     <h4>Filters</h4>
     <div class="row">
@@ -41,7 +58,6 @@
 
 <div cg-busy="{promise:ctrl.authRequest,message:'Loading'}"></div>
 <div cg-busy="{promise:ctrl.resultsRequest,message:'Loading'}"></div>
-
 <div ng-show="ctrl.data" class="results-table">
     <table ng-data="ctrl.data.result" ng-show="ctrl.data" class="table table-striped table-hover">
         <thead>
index 9e3540d..cc6cc0b 100644 (file)
         .module('testapiApp')
         .controller('ResultsController', ResultsController);
 
+    angular
+        .module('testapiApp')
+        .directive('fileModel', ['$parse', function ($parse) {
+            return {
+               restrict: 'A',
+               link: function(scope, element, attrs) {
+                  var model = $parse(attrs.fileModel);
+                  var modelSetter = model.assign;
+
+                  element.bind('change', function(){
+                     scope.$apply(function(){
+                        modelSetter(scope, element[0].files[0]);
+                     });
+                  });
+               }
+            };
+         }]);
+
     ResultsController.$inject = [
         '$scope', '$http', '$filter', '$state', 'testapiApiUrl','raiseAlert'
     ];
@@ -32,6 +50,7 @@
         raiseAlert) {
         var ctrl = this;
 
+        ctrl.uploadFile=uploadFile;
         ctrl.update = update;
         ctrl.open = open;
         ctrl.clearFilters = clearFilters;
@@ -76,6 +95,8 @@
         ctrl.format = 'yyyy-MM-dd';
 
         /** Check to see if this page should display user-specific results. */
+        // ctrl.isUserResults = $state.current.name === 'userResults';
+        // need auth to browse
         ctrl.isUserResults = $state.current.name === 'userResults';
 
         // Should only be on user-results-page if authenticated.
             'The most recently uploaded community test results are listed ' +
             'here.';
 
+        ctrl.uploadState = '';
+
+        ctrl.isPublic = false;
+
         if (ctrl.isUserResults) {
             ctrl.authRequest = $scope.auth.doSignCheck()
                 .then(ctrl.update);
-            ctrl.getUserProducts();
+            // ctrl.getUserProducts();
         } else {
             ctrl.update();
         }
 
+
+        function uploadFileToUrl(file, uploadUrl){
+           var fd = new FormData();
+           fd.append('file', file);
+           fd.append('public', ctrl.isPublic)
+
+           $http.post(uploadUrl, fd, {
+              transformRequest: angular.identity,
+              headers: {'Content-Type': undefined}
+           })
+
+           .success(function(data){
+              var id = data.href.substr(data.href.lastIndexOf('/')+1);
+              ctrl.uploadState = "Upload succeed. Result id is " + id;
+              ctrl.update();
+           })
+
+           .error(function(data, status){
+              ctrl.uploadState = "Upload failed. Error code is " + status;
+           });
+        }
+
+        function uploadFile(){
+           var file = $scope.resultFile;
+           console.log('file is ' );
+           console.dir(file);
+
+           var uploadUrl = testapiApiUrl + "/results/upload";
+           uploadFileToUrl(file, uploadUrl);
+        };
+
         /**
          * This will contact the TestAPI API to get a listing of test run
          * results.
index 46ccc61..2d7399f 100644 (file)
@@ -40,7 +40,7 @@
         <script src="testapi-ui/shared/header/headerController.js"></script>
         <script src="testapi-ui/shared/alerts/alertModalFactory.js"></script>
         <script src="testapi-ui/shared/alerts/confirmModalFactory.js"></script>
-        <script src="testapi-ui/components/guidelines/guidelinesController.js"></script>
+        <script src="testapi-ui/components/pods/podsController.js"></script>
         <script src="testapi-ui/components/results/resultsController.js"></script>
         <script src="testapi-ui/components/results-report/resultsReportController.js"></script>
         <script src="testapi-ui/components/profile/profileController.js"></script>
index f2c49e8..f5b2414 100644 (file)
@@ -17,7 +17,7 @@ TestAPI
           <ul class="nav navbar-nav">
             <li ng-class="{ active: header.isActive('/')}"><a ui-sref="home">Home</a></li>
             <li ng-class="{ active: header.isActive('/about')}"><a ui-sref="about">About</a></li>
-            <li ng-class="{ active: header.isActive('/guidelines')}"><a ui-sref="guidelines">OPNFV Powered&#8482; Guidelines</a></li>
+            <li ng-class="{ active: header.isActive('/pods')}"><a ui-sref="pods">Pods</a></li>
             <li ng-class="{ active: header.isActive('/community_results')}"><a ui-sref="communityResults">Community Results</a></li>
             <!--
             <li ng-class="{ active: header.isCatalogActive('public')}" class="dropdown" uib-dropdown>
@@ -33,6 +33,7 @@ TestAPI
           </ul>
           <ul class="nav navbar-nav navbar-right">
             <li ng-class="{ active: header.isActive('/user_results')}" ng-if="auth.isAuthenticated"><a ui-sref="userResults">My Results</a></li>
+            <!--
             <li ng-if="auth.isAuthenticated" ng-class="{ active: header.isCatalogActive('user')}" class="dropdown" uib-dropdown>
                 <a role="button" class="dropdown-toggle" uib-dropdown-toggle>
                     My Catalog <strong class="caret"></strong>
@@ -42,6 +43,7 @@ TestAPI
                     <li><a ui-sref="userProducts">My Products</a></li>
                 </ul>
             </li>
+            -->
             <li ng-class="{ active: header.isActive('/profile')}" ng-if="auth.isAuthenticated"><a ui-sref="profile">Profile</a></li>
             <li ng-if="auth.isAuthenticated"><a href="" ng-click="auth.doSignOut()">Sign Out</a></li>
             <li ng-if="!auth.isAuthenticated"><a href="" ng-click="auth.doSignIn()">Sign In / Sign Up</a></li>
index 4f1be7d..b14bc24 100755 (executable)
@@ -8,7 +8,6 @@ fi
 
 if [ "$base_url" != "" ]; then
     sudo crudini --set --existing $FILE api url $base_url/api/v1
-    sudo crudini --set --existing $FILE swagger base_url $base_url
     sudo crudini --set --existing $FILE ui url $base_url
     sudo echo "{\"testapiApiUrl\": \"$base_url/api/v1\"}" > \
         /usr/local/lib/python2.7/dist-packages/opnfv_testapi/static/testapi-ui/config.json
index 9ae2520..1ec899f 100644 (file)
@@ -12,15 +12,12 @@ url = http://localhost:8000/api/v1
 port = 8000
 
 # Number of results for one page (integer value)
-#results_per_page = 20
+results_per_page = 20
 
 # With debug_on set to true, error traces will be shown in HTTP responses
 debug = True
 authenticate = False
 
-[swagger]
-base_url = http://localhost:8000
-
 [ui]
 url = http://localhost:8000
 
index 4576d9b..da6a6cf 100644 (file)
@@ -33,6 +33,7 @@ def main(args):
     else:
         exit(1)
 
+
 if __name__ == '__main__':
     parser = argparse.ArgumentParser(description='Create \
                                       Swagger Spec documentation')
index 545d5e3..50ac049 100644 (file)
@@ -29,40 +29,18 @@ TODOs :
 
 """
 
-import argparse
-import sys
-
-import motor
 import tornado.ioloop
 
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
 from opnfv_testapi.router import url_mappings
 from opnfv_testapi.tornado_swagger import swagger
 
-CONF = None
-
-
-def parse_config(argv=[]):
-    global CONF
-    parser = argparse.ArgumentParser()
-    parser.add_argument("-c", "--config-file", dest='config_file',
-                        help="Config file location")
-    args = parser.parse_args(argv)
-    if args.config_file:
-        config.Config.CONFIG = args.config_file
-    CONF = config.Config()
-
-
-def get_db():
-    return motor.MotorClient(CONF.mongo_url)[CONF.mongo_dbname]
-
 
 def make_app():
-    swagger.docs(base_url=CONF.swagger_base_url,
+    swagger.docs(base_url=CONF.ui_url,
                  static_path=CONF.static_path)
     return swagger.Application(
         url_mappings.mappings,
-        db=get_db(),
         debug=CONF.api_debug,
         auth=CONF.api_authenticate,
         cookie_secret='opnfv-testapi',
@@ -70,7 +48,6 @@ def make_app():
 
 
 def main():
-    parse_config(sys.argv[1:])
     application = make_app()
     application.listen(CONF.api_port)
     tornado.ioloop.IOLoop.current().start()
index 67e8fbd..24ba876 100644 (file)
@@ -13,6 +13,7 @@ from tornado import web
 
 from opnfv_testapi.common import message
 from opnfv_testapi.common import raises
+from opnfv_testapi.db import api as dbapi
 
 
 def authenticate(method):
@@ -26,7 +27,7 @@ def authenticate(method):
             except KeyError:
                 raises.Unauthorized(message.unauthorized())
             query = {'access_token': token}
-            check = yield self._eval_db_find_one(query, 'tokens')
+            check = yield dbapi.db_find_one('tokens', query)
             if not check:
                 raises.Forbidden(message.invalid_token())
         ret = yield gen.coroutine(method)(self, *args, **kwargs)
@@ -38,7 +39,7 @@ def not_exist(xstep):
     @functools.wraps(xstep)
     def wrap(self, *args, **kwargs):
         query = kwargs.get('query')
-        data = yield self._eval_db_find_one(query)
+        data = yield dbapi.db_find_one(self.table, query)
         if not data:
             raises.NotFound(message.not_found(self.table, query))
         ret = yield gen.coroutine(xstep)(self, data, *args, **kwargs)
@@ -78,7 +79,7 @@ def carriers_exist(xstep):
         carriers = kwargs.pop('carriers', {})
         if carriers:
             for table, query in carriers:
-                exist = yield self._eval_db_find_one(query(), table)
+                exist = yield dbapi.db_find_one(table, query())
                 if not exist:
                     raises.Forbidden(message.not_found(table, query()))
         ret = yield gen.coroutine(xstep)(self, *args, **kwargs)
@@ -91,7 +92,7 @@ def new_not_exists(xstep):
     def wrap(self, *args, **kwargs):
         query = kwargs.get('query')
         if query:
-            to_data = yield self._eval_db_find_one(query())
+            to_data = yield dbapi.db_find_one(self.table, query())
             if to_data:
                 raises.Forbidden(message.exist(self.table, query()))
         ret = yield gen.coroutine(xstep)(self, *args, **kwargs)
@@ -105,7 +106,7 @@ def updated_one_not_exist(xstep):
         db_keys = kwargs.pop('db_keys', [])
         query = self._update_query(db_keys, data)
         if query:
-            to_data = yield self._eval_db_find_one(query)
+            to_data = yield dbapi.db_find_one(self.table, query)
             if to_data:
                 raises.Forbidden(message.exist(self.table, query))
         ret = yield gen.coroutine(xstep)(self, data, *args, **kwargs)
index f73c0ab..4cd53c6 100644 (file)
@@ -8,14 +8,16 @@
 # feng.xiaowei@zte.com.cn remove prepare_put_request            5-30-2016
 ##############################################################################
 import ConfigParser
+import argparse
 import os
+import sys
 
 
 class Config(object):
-    CONFIG = None
 
     def __init__(self):
-        self.file = self.CONFIG if self.CONFIG else self._default_config()
+        self.config_file = None
+        self._set_config_file()
         self._parse()
         self._parse_per_page()
         self.static_path = os.path.join(
@@ -24,11 +26,11 @@ class Config(object):
             'static')
 
     def _parse(self):
-        if not os.path.exists(self.file):
-            raise Exception("%s not found" % self.file)
+        if not os.path.exists(self.config_file):
+            raise Exception("%s not found" % self.config_file)
 
         config = ConfigParser.RawConfigParser()
-        config.read(self.file)
+        config.read(self.config_file)
         self._parse_section(config)
 
     def _parse_section(self, config):
@@ -53,8 +55,24 @@ class Config(object):
                 value = False
         return value
 
-    @staticmethod
-    def _default_config():
+    def _set_config_file(self):
+        if not self._set_sys_config_file():
+            self._set_default_config_file()
+
+    def _set_sys_config_file(self):
+        parser = argparse.ArgumentParser()
+        parser.add_argument("-c", "--config-file", dest='config_file',
+                            help="Config file location", metavar="FILE")
+        args, _ = parser.parse_known_args(sys.argv)
+        try:
+            self.config_file = args.config_file
+        finally:
+            return self.config_file is not None
+
+    def _set_default_config_file(self):
         is_venv = os.getenv('VIRTUAL_ENV')
-        return os.path.join('/' if not is_venv else is_venv,
-                            'etc/opnfv_testapi/config.ini')
+        self.config_file = os.path.join('/' if not is_venv else is_venv,
+                                        'etc/opnfv_testapi/config.ini')
+
+
+CONF = Config()
index 98536ff..951cbaf 100644 (file)
@@ -10,6 +10,10 @@ not_found_base = 'Could Not Found'
 exist_base = 'Already Exists'
 
 
+def key_error(key):
+    return "KeyError: '{}'".format(key)
+
+
 def no_body():
     return 'No Body'
 
diff --git a/utils/test/testapi/opnfv_testapi/db/__init__.py b/utils/test/testapi/opnfv_testapi/db/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/utils/test/testapi/opnfv_testapi/db/api.py b/utils/test/testapi/opnfv_testapi/db/api.py
new file mode 100644 (file)
index 0000000..c057480
--- /dev/null
@@ -0,0 +1,38 @@
+import motor
+
+from opnfv_testapi.common.config import CONF
+
+DB = motor.MotorClient(CONF.mongo_url)[CONF.mongo_dbname]
+
+
+def db_update(collection, query, update_req):
+    return _eval_db(collection, 'update', query, update_req, check_keys=False)
+
+
+def db_delete(collection, query):
+    return _eval_db(collection, 'remove', query)
+
+
+def db_aggregate(collection, pipelines):
+    return _eval_db(collection, 'aggregate', pipelines, allowDiskUse=True)
+
+
+def db_list(collection, query):
+    return _eval_db(collection, 'find', query)
+
+
+def db_save(collection, data):
+    return _eval_db(collection, 'insert', data, check_keys=False)
+
+
+def db_find_one(collection, query):
+    return _eval_db(collection, 'find_one', query)
+
+
+def _eval_db(collection, method, *args, **kwargs):
+    exec_collection = DB.__getattr__(collection)
+    return exec_collection.__getattribute__(method)(*args, **kwargs)
+
+
+def _eval_db_find_one(query, table=None):
+    return _eval_db(table, 'find_one', query)
index c7fed8f..8a3a2db 100644 (file)
@@ -20,8 +20,8 @@
 # feng.xiaowei@zte.com.cn remove DashboardHandler            5-30-2016
 ##############################################################################
 
-from datetime import datetime
 import json
+from datetime import datetime
 
 from tornado import gen
 from tornado import web
@@ -29,6 +29,7 @@ from tornado import web
 from opnfv_testapi.common import check
 from opnfv_testapi.common import message
 from opnfv_testapi.common import raises
+from opnfv_testapi.db import api as dbapi
 from opnfv_testapi.resources import models
 from opnfv_testapi.tornado_swagger import swagger
 
@@ -38,7 +39,6 @@ DEFAULT_REPRESENTATION = "application/json"
 class GenericApiHandler(web.RequestHandler):
     def __init__(self, application, request, **kwargs):
         super(GenericApiHandler, self).__init__(application, request, **kwargs)
-        self.db = self.settings["db"]
         self.json_args = None
         self.table = None
         self.table_cls = None
@@ -90,8 +90,7 @@ class GenericApiHandler(web.RequestHandler):
 
         if self.table != 'results':
             data.creation_date = datetime.now()
-        _id = yield self._eval_db(self.table, 'insert', data.format(),
-                                  check_keys=False)
+        _id = yield dbapi.db_save(self.table, data.format())
         if 'name' in self.json_args:
             resource = data.name
         else:
@@ -107,17 +106,17 @@ class GenericApiHandler(web.RequestHandler):
         per_page = kwargs.get('per_page', 0)
         if query is None:
             query = {}
-        cursor = self._eval_db(self.table, 'find', query)
-        records_count = yield cursor.count()
-        total_pages = self._calc_total_pages(records_count,
-                                             last,
-                                             page,
-                                             per_page)
+
+        total_pages = 0
+        if page > 0:
+            cursor = dbapi.db_list(self.table, query)
+            records_count = yield cursor.count()
+            total_pages = self._calc_total_pages(records_count,
+                                                 last,
+                                                 page,
+                                                 per_page)
         pipelines = self._set_pipelines(query, sort, last, page, per_page)
-        cursor = self._eval_db(self.table,
-                               'aggregate',
-                               pipelines,
-                               allowDiskUse=True)
+        cursor = dbapi.db_aggregate(self.table, pipelines)
         data = list()
         while (yield cursor.fetch_next):
             data.append(self.format_data(cursor.next_object()))
@@ -125,7 +124,7 @@ class GenericApiHandler(web.RequestHandler):
             res = {self.table: data}
         else:
             res = res_op(data, *args)
-        if total_pages > 0:
+        if page > 0:
             res.update({
                 'pagination': {
                     'current_page': kwargs.get('page'),
@@ -140,12 +139,10 @@ class GenericApiHandler(web.RequestHandler):
         if (records_count > last) and (last > 0):
             records_nr = last
 
-        total_pages = 0
-        if page > 0:
-            total_pages, remainder = divmod(records_nr, per_page)
-            if remainder > 0:
-                total_pages += 1
-        if page > total_pages:
+        total_pages, remainder = divmod(records_nr, per_page)
+        if remainder > 0:
+            total_pages += 1
+        if page > 1 and page > total_pages:
             raises.BadRequest(
                 'Request page > total_pages [{}]'.format(total_pages))
         return total_pages
@@ -175,7 +172,7 @@ class GenericApiHandler(web.RequestHandler):
     @check.authenticate
     @check.not_exist
     def _delete(self, data, query=None):
-        yield self._eval_db(self.table, 'remove', query)
+        yield dbapi.db_delete(self.table, query)
         self.finish_request()
 
     @check.authenticate
@@ -185,8 +182,7 @@ class GenericApiHandler(web.RequestHandler):
     def _update(self, data, query=None, **kwargs):
         data = self.table_cls.from_dict(data)
         update_req = self._update_requests(data)
-        yield self._eval_db(self.table, 'update', query, update_req,
-                            check_keys=False)
+        yield dbapi.db_update(self.table, query, update_req)
         update_req['_id'] = str(data._id)
         self.finish_request(update_req)
 
@@ -229,23 +225,6 @@ class GenericApiHandler(web.RequestHandler):
             query[key] = new
         return query if not equal else dict()
 
-    def _eval_db(self, table, method, *args, **kwargs):
-        exec_collection = self.db.__getattr__(table)
-        return exec_collection.__getattribute__(method)(*args, **kwargs)
-
-    def _eval_db_find_one(self, query, table=None):
-        if table is None:
-            table = self.table
-        return self._eval_db(table, 'find_one', query)
-
-    def db_save(self, collection, data):
-        self._eval_db(collection, 'insert', data, check_keys=False)
-
-    def db_find_one(self, query, collection=None):
-        if not collection:
-            collection = self.table
-        return self._eval_db(collection, 'find_one', query)
-
 
 class VersionHandler(GenericApiHandler):
     @swagger.operation(nickname='listAllVersions')
index 1773216..2bf1792 100644 (file)
@@ -6,19 +6,20 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+import logging
 from datetime import datetime
 from datetime import timedelta
+import json
 
 from bson import objectid
 
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
 from opnfv_testapi.common import message
 from opnfv_testapi.common import raises
 from opnfv_testapi.resources import handlers
 from opnfv_testapi.resources import result_models
 from opnfv_testapi.tornado_swagger import swagger
-
-CONF = config.Config()
+from opnfv_testapi.ui.auth import constants as auth_const
 
 
 class GenericResultHandler(handlers.GenericApiHandler):
@@ -40,6 +41,7 @@ class GenericResultHandler(handlers.GenericApiHandler):
         query = dict()
         date_range = dict()
 
+        query['public'] = {'$not': {'$eq': 'false'}}
         for k in self.request.query_arguments.keys():
             v = self.get_query_argument(k)
             if k == 'project' or k == 'pod' or k == 'case':
@@ -56,10 +58,24 @@ class GenericResultHandler(handlers.GenericApiHandler):
                 date_range.update({'$gte': str(v)})
             elif k == 'to':
                 date_range.update({'$lt': str(v)})
-            elif k != 'last' and k != 'page':
+            elif k == 'signed':
+                openid = self.get_secure_cookie(auth_const.OPENID)
+                role = self.get_secure_cookie(auth_const.ROLE)
+                logging.info('role:%s', role)
+                if role:
+                    del query['public']
+                    if role != "reviewer":
+                        query['user'] = openid
+            elif k not in ['last', 'page', 'descend']:
                 query[k] = v
             if date_range:
                 query['start_date'] = date_range
+
+            # if $lt is not provided,
+            # empty/None/null/'' start_date will also be returned
+            if 'start_date' in query and '$lt' not in query['start_date']:
+                query['start_date'].update({'$lt': str(datetime.now())})
+
         return query
 
 
@@ -84,9 +100,10 @@ class ResultsCLHandler(GenericResultHandler):
                  - criteria : the global criteria status passed or failed
                  - trust_indicator : evaluate the stability of the test case
                    to avoid running systematically long and stable test case
+                 - signed : get logined user result
 
                 GET /results/project=functest&case=vPing&version=Arno-R1 \
-                &pod=pod_name&period=15
+                &pod=pod_name&period=15&signed
             @return 200: all test results consist with query,
                          empty list if no result is found
             @rtype: L{TestResults}
@@ -146,18 +163,31 @@ class ResultsCLHandler(GenericResultHandler):
             @type trust_indicator: L{float}
             @in trust_indicator: query
             @required trust_indicator: False
+            @param signed: user results or all results
+            @type signed: L{string}
+            @in signed: query
+            @required signed: False
+            @param descend: true, newest2oldest; false, oldest2newest
+            @type descend: L{string}
+            @in descend: query
+            @required descend: False
         """
-        limitations = {'sort': {'start_date': -1}}
-        last = self.get_query_argument('last', 0)
-        if last is not None:
-            last = self.get_int('last', last)
-            limitations.update({'last': last})
-
-        page = self.get_query_argument('page', None)
-        if page is not None:
-            page = self.get_int('page', page)
-            limitations.update({'page': page,
-                                'per_page': CONF.api_results_per_page})
+        def descend_limit():
+            descend = self.get_query_argument('descend', 'true')
+            return -1 if descend.lower() == 'true' else 1
+
+        def last_limit():
+            return self.get_int('last', self.get_query_argument('last', 0))
+
+        def page_limit():
+            return self.get_int('page', self.get_query_argument('page', 0))
+
+        limitations = {
+            'sort': {'_id': descend_limit()},
+            'last': last_limit(),
+            'page': page_limit(),
+            'per_page': CONF.api_results_per_page
+        }
 
         self._list(query=self.set_query(), **limitations)
 
@@ -173,6 +203,9 @@ class ResultsCLHandler(GenericResultHandler):
             @raise 404: pod/project/testcase not exist
             @raise 400: body/pod_name/project_name/case_name not provided
         """
+        self._post()
+
+    def _post(self):
         def pod_query():
             return {'name': self.json_args.get('pod_name')}
 
@@ -187,9 +220,39 @@ class ResultsCLHandler(GenericResultHandler):
         carriers = [('pods', pod_query),
                     ('projects', project_query),
                     ('testcases', testcase_query)]
+
         self._create(miss_fields=miss_fields, carriers=carriers)
 
 
+class ResultsUploadHandler(ResultsCLHandler):
+    @swagger.operation(nickname="uploadTestResult")
+    def post(self):
+        """
+            @description: upload and create a test result
+            @param body: result to be created
+            @type body: L{ResultCreateRequest}
+            @in body: body
+            @rtype: L{CreateResponse}
+            @return 200: result is created.
+            @raise 404: pod/project/testcase not exist
+            @raise 400: body/pod_name/project_name/case_name not provided
+        """
+        logging.info('file upload')
+        fileinfo = self.request.files['file'][0]
+        is_public = self.get_body_argument('public')
+        logging.warning('public:%s', is_public)
+        logging.info('results is :%s', fileinfo['filename'])
+        logging.info('results is :%s', fileinfo['body'])
+        self.json_args = json.loads(fileinfo['body']).copy()
+        self.json_args['public'] = is_public
+
+        openid = self.get_secure_cookie(auth_const.OPENID)
+        if openid:
+            self.json_args['user'] = openid
+
+        super(ResultsUploadHandler, self)._post()
+
+
 class ResultsGURHandler(GenericResultHandler):
     @swagger.operation(nickname='getTestResultById')
     def get(self, result_id):
index 62a6dac..890bf82 100644 (file)
@@ -54,6 +54,8 @@ class ResultCreateRequest(models.ModelBase):
                  build_tag=None,
                  scenario=None,
                  criteria=None,
+                 user=None,
+                 public="true",
                  trust_indicator=None):
         self.pod_name = pod_name
         self.project_name = project_name
@@ -66,6 +68,8 @@ class ResultCreateRequest(models.ModelBase):
         self.build_tag = build_tag
         self.scenario = scenario
         self.criteria = criteria
+        self.user = user
+        self.public = public
         self.trust_indicator = trust_indicator if trust_indicator else TI(0)
 
 
@@ -89,7 +93,7 @@ class TestResult(models.ModelBase):
                  pod_name=None, installer=None, version=None,
                  start_date=None, stop_date=None, details=None,
                  build_tag=None, scenario=None, criteria=None,
-                 trust_indicator=None):
+                 user=None, public="true", trust_indicator=None):
         self._id = _id
         self.case_name = case_name
         self.project_name = project_name
@@ -102,6 +106,8 @@ class TestResult(models.ModelBase):
         self.build_tag = build_tag
         self.scenario = scenario
         self.criteria = criteria
+        self.user = user
+        self.public = public
         self.trust_indicator = trust_indicator
 
     @staticmethod
index a2312de..562fa5e 100644 (file)
@@ -8,7 +8,7 @@
 ##############################################################################
 import tornado.web
 
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
 from opnfv_testapi.resources import handlers
 from opnfv_testapi.resources import pod_handlers
 from opnfv_testapi.resources import project_handlers
@@ -48,6 +48,7 @@ mappings = [
     # Push results with mandatory request payload parameters
     # (project, case, and pod)
     (r"/api/v1/results", result_handlers.ResultsCLHandler),
+    (r'/api/v1/results/upload', result_handlers.ResultsUploadHandler),
     (r"/api/v1/results/([^/]+)", result_handlers.ResultsGURHandler),
 
     # scenarios
@@ -57,11 +58,12 @@ mappings = [
     # static path
     (r'/(.*\.(css|png|gif|js|html|json|map|woff2|woff|ttf))',
      tornado.web.StaticFileHandler,
-     {'path': config.Config().static_path}),
+     {'path': CONF.static_path}),
 
     (r'/', root.RootHandler),
     (r'/api/v1/auth/signin', sign.SigninHandler),
     (r'/api/v1/auth/signin_return', sign.SigninReturnHandler),
     (r'/api/v1/auth/signout', sign.SignoutHandler),
     (r'/api/v1/profile', user.ProfileHandler),
+
 ]
index fda2a09..be7f2b9 100644 (file)
@@ -12,5 +12,5 @@ port = 8000
 debug = True
 authenticate = False
 
-[swagger]
-base_url = http://localhost:8000
+[ui]
+url = http://localhost:8000
index 77cc6c6..c81c6c5 100644 (file)
@@ -13,5 +13,5 @@ port = 8000
 debug = True
 authenticate = False
 
-[swagger]
-base_url = http://localhost:8000
+[ui]
+url = http://localhost:8000
index 9988fc0..a9ed49c 100644 (file)
@@ -7,5 +7,5 @@ port = 8000
 debug = True
 authenticate = False
 
-[swagger]
-base_url = http://localhost:8000
+[ui]
+url = http://localhost:8000
index b3f3276..3a11f9d 100644 (file)
@@ -13,5 +13,5 @@ port = 8000
 debug = True
 authenticate = notboolean
 
-[swagger]
-base_url = http://localhost:8000
+[ui]
+url = http://localhost:8000
index d1b752a..8180719 100644 (file)
@@ -13,5 +13,5 @@ port = notint
 debug = True
 authenticate = False
 
-[swagger]
-base_url = http://localhost:8000
+[ui]
+url = http://localhost:8000
index 446b944..8cfc513 100644 (file)
@@ -1,16 +1,15 @@
-import os
+import argparse
 
-from opnfv_testapi.common import config
 
-
-def test_config_success():
-    config_file = os.path.join(os.path.dirname(__file__),
-                               '../../../../etc/config.ini')
-    config.Config.CONFIG = config_file
-    conf = config.Config()
-    assert conf.mongo_url == 'mongodb://127.0.0.1:27017/'
-    assert conf.mongo_dbname == 'test_results_collection'
-    assert conf.api_port == 8000
-    assert conf.api_debug is True
-    assert conf.api_authenticate is False
-    assert conf.swagger_base_url == 'http://localhost:8000'
+def test_config_normal(mocker, config_normal):
+    mocker.patch(
+        'argparse.ArgumentParser.parse_known_args',
+        return_value=(argparse.Namespace(config_file=config_normal), None))
+    from opnfv_testapi.common import config
+    CONF = config.Config()
+    assert CONF.mongo_url == 'mongodb://127.0.0.1:27017/'
+    assert CONF.mongo_dbname == 'test_results_collection'
+    assert CONF.api_port == 8000
+    assert CONF.api_debug is True
+    assert CONF.api_authenticate is False
+    assert CONF.ui_url == 'http://localhost:8000'
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/conftest.py b/utils/test/testapi/opnfv_testapi/tests/unit/conftest.py
new file mode 100644 (file)
index 0000000..feff1da
--- /dev/null
@@ -0,0 +1,8 @@
+from os import path
+
+import pytest
+
+
+@pytest.fixture
+def config_normal():
+    return path.join(path.dirname(__file__), 'common/normal.ini')
index b30c325..b8f696c 100644 (file)
@@ -10,6 +10,20 @@ import functools
 import httplib
 
 
+def upload(excepted_status, excepted_response):
+    def _upload(create_request):
+        @functools.wraps(create_request)
+        def wrap(self):
+            request = create_request(self)
+            status, body = self.upload(request)
+            if excepted_status == httplib.OK:
+                getattr(self, excepted_response)(body)
+            else:
+                self.assertIn(excepted_response, body)
+        return wrap
+    return _upload
+
+
 def create(excepted_status, excepted_response):
     def _create(create_request):
         @functools.wraps(create_request)
index adaf6f7..0ca83df 100644 (file)
@@ -6,9 +6,10 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from operator import itemgetter
+
 from bson.objectid import ObjectId
 from concurrent.futures import ThreadPoolExecutor
-from operator import itemgetter
 
 
 def thread_execute(method, *args, **kwargs):
@@ -119,10 +120,14 @@ class MemDb(object):
 
     @staticmethod
     def _compare_date(spec, value):
+        gte = True
+        lt = False
         for k, v in spec.iteritems():
-            if k == '$gte' and value >= v:
-                return True
-        return False
+            if k == '$gte' and value < v:
+                gte = False
+            elif k == '$lt' and value < v:
+                lt = True
+        return gte and lt
 
     def _in(self, content, *args):
         if self.name == 'scenarios':
@@ -185,9 +190,8 @@ class MemDb(object):
                 elif k == 'trust_indicator.current':
                     if content.get('trust_indicator').get('current') != v:
                         return False
-                elif content.get(k, None) != v:
+                elif not isinstance(v, dict) and content.get(k, None) != v:
                     return False
-
         return True
 
     def _find(self, *args):
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/resources/__init__.py b/utils/test/testapi/opnfv_testapi/tests/unit/resources/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
@@ -12,13 +12,9 @@ from os import path
 import mock
 from tornado import testing
 
-from opnfv_testapi.common import config
 from opnfv_testapi.resources import models
 from opnfv_testapi.tests.unit import fake_pymongo
 
-config.Config.CONFIG = path.join(path.dirname(__file__),
-                                 '../../../etc/config.ini')
-
 
 class TestBase(testing.AsyncHTTPTestCase):
     headers = {'Content-Type': 'application/json; charset=UTF-8'}
@@ -37,20 +33,21 @@ class TestBase(testing.AsyncHTTPTestCase):
 
     def tearDown(self):
         self.db_patcher.stop()
+        self.config_patcher.stop()
 
     def _patch_server(self):
-        from opnfv_testapi.cmd import server
-        server.parse_config([
-            '--config-file',
-            path.join(path.dirname(__file__), 'common/normal.ini')
-        ])
-        self.db_patcher = mock.patch('opnfv_testapi.cmd.server.get_db',
-                                     self._fake_pymongo)
+        import argparse
+        config = path.join(path.dirname(__file__), '../common/normal.ini')
+        self.config_patcher = mock.patch(
+            'argparse.ArgumentParser.parse_known_args',
+            return_value=(argparse.Namespace(config_file=config), None))
+        self.db_patcher = mock.patch('opnfv_testapi.db.api.DB',
+                                     fake_pymongo)
+        self.config_patcher.start()
         self.db_patcher.start()
 
-    @staticmethod
-    def _fake_pymongo():
-        return fake_pymongo
+    def set_config_file(self):
+        self.config_file = 'normal.ini'
 
     def get_app(self):
         from opnfv_testapi.cmd import server
@@ -12,7 +12,7 @@ import unittest
 from opnfv_testapi.common import message
 from opnfv_testapi.resources import pod_models
 from opnfv_testapi.tests.unit import executor
-from opnfv_testapi.tests.unit import test_base as base
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class TestPodBase(base.TestBase):
@@ -85,5 +85,6 @@ class TestPodGet(TestPodBase):
             else:
                 self.assert_get_body(pod, self.req_e)
 
+
 if __name__ == '__main__':
     unittest.main()
@@ -4,7 +4,7 @@ import unittest
 from opnfv_testapi.common import message
 from opnfv_testapi.resources import project_models
 from opnfv_testapi.tests.unit import executor
-from opnfv_testapi.tests.unit import test_base as base
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class TestProjectBase(base.TestBase):
@@ -132,5 +132,6 @@ class TestProjectDelete(TestProjectBase):
         code, body = self.get(self.req_d.name)
         self.assertEqual(code, httplib.NOT_FOUND)
 
+
 if __name__ == '__main__':
     unittest.main()
@@ -7,17 +7,18 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 import copy
-from datetime import datetime, timedelta
 import httplib
 import unittest
+from datetime import datetime, timedelta
+import json
 
 from opnfv_testapi.common import message
 from opnfv_testapi.resources import pod_models
 from opnfv_testapi.resources import project_models
 from opnfv_testapi.resources import result_models
 from opnfv_testapi.resources import testcase_models
-from opnfv_testapi.tests.unit import test_base as base
 from opnfv_testapi.tests.unit import executor
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class Details(object):
@@ -60,9 +61,9 @@ class TestResultBase(base.TestBase):
         self.scenario = 'odl-l2'
         self.criteria = 'passed'
         self.trust_indicator = result_models.TI(0.7)
-        self.start_date = "2016-05-23 07:16:09.477097"
-        self.stop_date = "2016-05-23 07:16:19.477097"
-        self.update_date = "2016-05-24 07:16:19.477097"
+        self.start_date = str(datetime.now())
+        self.stop_date = str(datetime.now() + timedelta(minutes=1))
+        self.update_date = str(datetime.now() + timedelta(days=1))
         self.update_step = -0.05
         super(TestResultBase, self).setUp()
         self.details = Details(timestart='0', duration='9s', status='OK')
@@ -131,6 +132,22 @@ class TestResultBase(base.TestBase):
         _, res = self.create_d()
         return res.href.split('/')[-1]
 
+    def upload(self, req):
+        if req and not isinstance(req, str) and hasattr(req, 'format'):
+            req = req.format()
+        res = self.fetch(self.basePath + '/upload',
+                         method='POST',
+                         body=json.dumps(req),
+                         headers=self.headers)
+
+        return self._get_return(res, self.create_res)
+
+
+class TestResultUpload(TestResultBase):
+    @executor.upload(httplib.BAD_REQUEST, message.key_error('file'))
+    def test_filenotfind(self):
+        return None
+
 
 class TestResultCreate(TestResultBase):
     @executor.create(httplib.BAD_REQUEST, message.no_body())
@@ -208,9 +225,9 @@ class TestResultCreate(TestResultBase):
 class TestResultGet(TestResultBase):
     def setUp(self):
         super(TestResultGet, self).setUp()
+        self.req_10d_before = self._create_changed_date(days=-10)
         self.req_d_id = self._create_d()
         self.req_10d_later = self._create_changed_date(days=10)
-        self.req_10d_before = self._create_changed_date(days=-10)
 
     @executor.get(httplib.OK, 'assert_res')
     def test_getOne(self):
@@ -256,9 +273,9 @@ class TestResultGet(TestResultBase):
     def test_queryPeriodNotInt(self):
         return self._set_query('period=a')
 
-    @executor.query(httplib.OK, '_query_last_one', 1)
+    @executor.query(httplib.OK, '_query_period_one', 1)
     def test_queryPeriodSuccess(self):
-        return self._set_query('period=1')
+        return self._set_query('period=5')
 
     @executor.query(httplib.BAD_REQUEST, message.must_int('last'))
     def test_queryLastNotInt(self):
@@ -268,7 +285,17 @@ class TestResultGet(TestResultBase):
     def test_queryLast(self):
         return self._set_query('last=1')
 
-    @executor.query(httplib.OK, '_query_last_one', 1)
+    @executor.query(httplib.OK, '_query_success', 4)
+    def test_queryPublic(self):
+        self._create_public_data()
+        return self._set_query('')
+
+    @executor.query(httplib.OK, '_query_success', 1)
+    def test_queryPrivate(self):
+        self._create_private_data()
+        return self._set_query('public=false')
+
+    @executor.query(httplib.OK, '_query_period_one', 1)
     def test_combination(self):
         return self._set_query('pod',
                                'project',
@@ -279,7 +306,7 @@ class TestResultGet(TestResultBase):
                                'scenario',
                                'trust_indicator',
                                'criteria',
-                               'period=1')
+                               'period=5')
 
     @executor.query(httplib.OK, '_query_success', 0)
     def test_notFound(self):
@@ -294,6 +321,14 @@ class TestResultGet(TestResultBase):
                                'criteria',
                                'period=1')
 
+    @executor.query(httplib.OK, '_query_success', 1)
+    def test_filterErrorStartdate(self):
+        self._create_error_start_date(None)
+        self._create_error_start_date('None')
+        self._create_error_start_date('null')
+        self._create_error_start_date('')
+        return self._set_query('period=5')
+
     def _query_success(self, body, number):
         self.assertEqual(number, len(body.results))
 
@@ -301,6 +336,16 @@ class TestResultGet(TestResultBase):
         self.assertEqual(number, len(body.results))
         self.assert_res(body.results[0], self.req_10d_later)
 
+    def _query_period_one(self, body, number):
+        self.assertEqual(number, len(body.results))
+        self.assert_res(body.results[0], self.req_d)
+
+    def _create_error_start_date(self, start_date):
+        req = copy.deepcopy(self.req_d)
+        req.start_date = start_date
+        self.create(req)
+        return req
+
     def _create_changed_date(self, **kwargs):
         req = copy.deepcopy(self.req_d)
         req.start_date = datetime.now() + timedelta(**kwargs)
@@ -309,16 +354,29 @@ class TestResultGet(TestResultBase):
         self.create(req)
         return req
 
+    def _create_public_data(self, **kwargs):
+        req = copy.deepcopy(self.req_d)
+        req.public = 'true'
+        self.create(req)
+        return req
+
+    def _create_private_data(self, **kwargs):
+        req = copy.deepcopy(self.req_d)
+        req.public = 'false'
+        self.create(req)
+        return req
+
     def _set_query(self, *args):
         def get_value(arg):
             return self.__getattribute__(arg) \
                 if arg != 'trust_indicator' else self.trust_indicator.current
         uri = ''
         for arg in args:
-            if '=' in arg:
-                uri += arg + '&'
-            else:
-                uri += '{}={}&'.format(arg, get_value(arg))
+            if arg:
+                if '=' in arg:
+                    uri += arg + '&'
+                else:
+                    uri += '{}={}&'.format(arg, get_value(arg))
         return uri[0: -1]
 
 
@@ -1,13 +1,13 @@
-from copy import deepcopy
-from datetime import datetime
 import functools
 import httplib
 import json
 import os
+from copy import deepcopy
+from datetime import datetime
 
-from opnfv_testapi.common import message
 import opnfv_testapi.resources.scenario_models as models
-from opnfv_testapi.tests.unit import test_base as base
+from opnfv_testapi.common import message
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class TestScenarioBase(base.TestBase):
@@ -13,8 +13,8 @@ import unittest
 from opnfv_testapi.common import message
 from opnfv_testapi.resources import project_models
 from opnfv_testapi.resources import testcase_models
-from opnfv_testapi.tests.unit import test_base as base
 from opnfv_testapi.tests.unit import executor
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class TestCaseBase(base.TestBase):
@@ -10,14 +10,14 @@ from tornado import web
 
 from opnfv_testapi.common import message
 from opnfv_testapi.resources import project_models
-from opnfv_testapi.router import url_mappings
 from opnfv_testapi.tests.unit import executor
 from opnfv_testapi.tests.unit import fake_pymongo
-from opnfv_testapi.tests.unit import test_base as base
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class TestToken(base.TestBase):
     def get_app(self):
+        from opnfv_testapi.router import url_mappings
         return web.Application(
             url_mappings.mappings,
             db=fake_pymongo,
@@ -109,5 +109,6 @@ class TestTokenUpdateProject(TestToken):
     def _update_success(self, request, body):
         self.assertIn(request.name, body)
 
+
 if __name__ == '__main__':
     unittest.main()
@@ -11,7 +11,7 @@ import unittest
 
 from opnfv_testapi.resources import models
 from opnfv_testapi.tests.unit import executor
-from opnfv_testapi.tests.unit import test_base as base
+from opnfv_testapi.tests.unit.resources import test_base as base
 
 
 class TestVersionBase(base.TestBase):
index 43f69d7..44ccb46 100644 (file)
@@ -1,4 +1,6 @@
 OPENID = 'openid'
+ROLE = 'role'
+DEFAULT_ROLE = 'user'
 
 # OpenID parameters
 OPENID_MODE = 'openid.mode'
index 6a9d94e..4623952 100644 (file)
@@ -1,11 +1,12 @@
 from six.moves.urllib import parse
+from tornado import gen
+from tornado import web
 
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
+from opnfv_testapi.db import api as dbapi
 from opnfv_testapi.ui.auth import base
 from opnfv_testapi.ui.auth import constants as const
 
-CONF = config.Config()
-
 
 class SigninHandler(base.BaseHandler):
     def get(self):
@@ -31,20 +32,30 @@ class SigninHandler(base.BaseHandler):
 
 
 class SigninReturnHandler(base.BaseHandler):
+    @web.asynchronous
+    @gen.coroutine
     def get(self):
         if self.get_query_argument(const.OPENID_MODE) == 'cancel':
             self._auth_failure('Authentication canceled.')
 
         openid = self.get_query_argument(const.OPENID_CLAIMED_ID)
-        user_info = {
+        role = const.DEFAULT_ROLE
+        new_user_info = {
             'openid': openid,
             'email': self.get_query_argument(const.OPENID_NS_SREG_EMAIL),
-            'fullname': self.get_query_argument(const.OPENID_NS_SREG_FULLNAME)
+            'fullname': self.get_query_argument(const.OPENID_NS_SREG_FULLNAME),
+            const.ROLE: role
         }
+        user = yield dbapi.db_find_one(self.table, {'openid': openid})
+        if not user:
+            dbapi.db_save(self.table, new_user_info)
+        else:
+            role = user.get(const.ROLE)
 
-        self.db_save(self.table, user_info)
-        if not self.get_secure_cookie('openid'):
-            self.set_secure_cookie('openid', openid)
+        self.clear_cookie(const.OPENID)
+        self.clear_cookie(const.ROLE)
+        self.set_secure_cookie(const.OPENID, openid)
+        self.set_secure_cookie(const.ROLE, role)
         self.redirect(url=CONF.ui_url)
 
     def _auth_failure(self, message):
@@ -57,9 +68,8 @@ class SigninReturnHandler(base.BaseHandler):
 class SignoutHandler(base.BaseHandler):
     def get(self):
         """Handle signout request."""
-        openid = self.get_secure_cookie(const.OPENID)
-        if openid:
-            self.clear_cookie(const.OPENID)
+        self.clear_cookie(const.OPENID)
+        self.clear_cookie(const.ROLE)
         params = {'openid_logout': CONF.osid_openid_logout_endpoint}
         url = parse.urljoin(CONF.ui_url,
                             '/#/logout?' + parse.urlencode(params))
index 140bca5..955cdee 100644 (file)
@@ -2,6 +2,7 @@ from tornado import gen
 from tornado import web
 
 from opnfv_testapi.common import raises
+from opnfv_testapi.db import api as dbapi
 from opnfv_testapi.ui.auth import base
 
 
@@ -12,12 +13,12 @@ class ProfileHandler(base.BaseHandler):
         openid = self.get_secure_cookie('openid')
         if openid:
             try:
-                user = yield self.db_find_one({'openid': openid})
+                user = yield dbapi.db_find_one(self.table, {'openid': openid})
                 self.finish_request({
                     "openid": user.get('openid'),
                     "email": user.get('email'),
                     "fullname": user.get('fullname'),
-                    "is_admin": False
+                    "role": user.get('role', 'user')
                 })
             except Exception:
                 pass
index bba7a86..5b2c922 100644 (file)
@@ -1,10 +1,10 @@
 from opnfv_testapi.resources.handlers import GenericApiHandler
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
 
 
 class RootHandler(GenericApiHandler):
     def get_template_path(self):
-        return config.Config().static_path
+        return CONF.static_path
 
     def get(self):
         self.render('testapi-ui/index.html')
index 955ffc8..4b6f75c 100644 (file)
@@ -2,9 +2,9 @@
 # of appearance. Changing the order has an impact on the overall integration
 # process, which may cause wedges in the gate later.
 
-pbr>=1.6
-setuptools>=16.0
-tornado>=3.1,<=4.3
+pbr>=2.0.0,!=2.1.0  # Apache-2.0
+setuptools>=16.0,!=24.0.0,!=34.0.0,!=34.0.1,!=34.0.2,!=34.0.3,!=34.1.0,!=34.1.1,!=34.2.0,!=34.3.0,!=34.3.1,!=34.3.2  # PSF/ZPL
+tornado>=3.1,<=4.3  # Apache-2.0
 epydoc>=0.3.1
-six>=1.9.0
-motor
+six>=1.9.0  # MIT
+motor  # Apache-2.0
diff --git a/utils/test/testapi/run_test.sh b/utils/test/testapi/run_test.sh
deleted file mode 100755 (executable)
index 1e05dd6..0000000
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-set -o errexit
-
-# Get script directory
-SCRIPTDIR=`dirname $0`
-
-echo "Running unit tests..."
-
-# Creating virtual environment
-if [ ! -z $VIRTUAL_ENV ]; then
-    venv=$VIRTUAL_ENV
-else
-    venv=$SCRIPTDIR/.venv
-    virtualenv $venv
-fi
-source $venv/bin/activate
-
-# Install requirements
-pip install -r $SCRIPTDIR/requirements.txt
-pip install -r $SCRIPTDIR/test-requirements.txt
-
-find . -type f -name "*.pyc" -delete
-
-nosetests --with-xunit \
-    --with-coverage \
-    --cover-erase \
-    --cover-package=$SCRIPTDIR/opnfv_testapi/cmd \
-    --cover-package=$SCRIPTDIR/opnfv_testapi/common \
-    --cover-package=$SCRIPTDIR/opnfv_testapi/resources \
-    --cover-package=$SCRIPTDIR/opnfv_testapi/router \
-    --cover-xml \
-    --cover-html \
-    $SCRIPTDIR/opnfv_testapi/tests
-
-exit_code=$?
-
-deactivate
-
-exit $exit_code
index 15dda96..f689cb3 100644 (file)
@@ -3,7 +3,11 @@ import setuptools
 
 __author__ = 'serena'
 
+try:
+    import multiprocessing  # noqa
+except ImportError:
+    pass
 
 setuptools.setup(
-    setup_requires=['pbr>=1.8'],
+    setup_requires=['pbr==2.0.0'],
     pbr=True)
index 645687b..233f465 100644 (file)
@@ -2,7 +2,9 @@
 # of appearance. Changing the order has an impact on the overall integration
 # process, which may cause wedges in the gate later.
 
-mock
-pytest
-coverage
-nose>=1.3.1
+coverage>=4.0,!=4.4  # Apache-2.0
+mock>=2.0  # BSD
+nose  # LGPL
+pytest  # MIT
+pytest-cov  # MIT
+pytest-mock  # MIT
index 81c9dfa..d300f1a 100644 (file)
@@ -4,7 +4,7 @@
 # and then run "tox" from this directory.
 
 [tox]
-envlist = py27,pep8
+envlist = pep8,py27
 skipsdist = True
 sitepackages = True
 
@@ -16,9 +16,11 @@ deps =
   -rtest-requirements.txt
 commands=
   py.test \
-    --basetemp={envtmpdir} \
-    --cov \
-    {posargs}
+  --basetemp={envtmpdir} \
+  --cov \
+  --cov-report term-missing \
+  --cov-report xml \
+  {posargs}
 setenv=
   HOME = {envtmpdir}
   PYTHONPATH = {toxinidir}
index 7e0dd55..9c24377 100644 (file)
@@ -40,5 +40,6 @@ def backup(args):
     cmd = ['mongodump', '-o', '%s' % out]
     execute(cmd, args)
 
+
 if __name__ == '__main__':
     main(backup, parser)
index ba4334a..f759592 100644 (file)
@@ -85,5 +85,6 @@ def update(args):
     rename_fields(fields_old2New)
     rename_collections(collections_old2New)
 
+
 if __name__ == '__main__':
     main(update, parser)
diff --git a/utils/upload-artifact.sh b/utils/upload-artifact.sh
new file mode 100644 (file)
index 0000000..b66cdb7
--- /dev/null
@@ -0,0 +1,48 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 Orange and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+set -e
+set -o pipefail
+
+export PATH=$PATH:/usr/local/bin/
+
+# 2 paramters
+# - $1: the source directory where the files to be uploaded are located
+# - $2: the target on artifact http://artifact.opnfv.org/<project>/$2
+#   if not set, default value is <project>/docs
+project=$PROJECT
+if [ -z "$2" ]
+  then
+      artifact_dir="$project/docs"
+  else
+      artifact_dir="$project/$2"
+fi
+DIRECTORY="$1"
+
+
+# check that the API doc directory does exist before pushing it to artifact
+if [ ! -d "$DIRECTORY" ]; then
+    echo "Directory to be uploaded "$DIRECTORY" does not exist"
+    exit 1
+fi
+set +e
+gsutil&>/dev/null
+if [ $? != 0 ]; then
+    echo "Not possible to push results to artifact: gsutil not installed"
+    exit 1
+else
+    gsutil ls gs://artifacts.opnfv.org/"$project"/ &>/dev/null
+    if [ $? != 0 ]; then
+        echo "Not possible to push results to artifact: gsutil not installed."
+        exit 1
+    else
+        echo "Uploading file(s) to artifact $artifact_dir"
+        gsutil -m cp -r "$DIRECTORY"/* gs://artifacts.opnfv.org/"$artifact_dir"/ >/dev/null 2>&1
+    fi
+fi