Merge "vswitchperf: Use POD12 for VERIFY and MERGE jobs"
authorFatih Degirmenci <fdegir@gmail.com>
Mon, 12 Feb 2018 07:56:01 +0000 (07:56 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Mon, 12 Feb 2018 07:56:01 +0000 (07:56 +0000)
107 files changed:
.yamllint
INFO
INFO.yaml [new file with mode: 0644]
jjb/apex/apex-project-jobs.yml
jjb/apex/apex-verify-jobs.yml
jjb/apex/apex.yml
jjb/apex/apex.yml.j2
jjb/apex/scenarios.yaml.hidden
jjb/armband/armband-ci-jobs.yml
jjb/auto/auto.yml
jjb/availability/availability.yml
jjb/barometer/barometer-build.sh
jjb/barometer/barometer-upload-artifact.sh
jjb/ci_gate_security/anteater-clone-all-repos.sh [deleted file]
jjb/ci_gate_security/anteater-security-audit-weekly.sh
jjb/ci_gate_security/opnfv-ci-gate-security.yml
jjb/compass4nfv/compass-ci-jobs.yml
jjb/compass4nfv/compass-deploy.sh
jjb/compass4nfv/compass-logs.sh [new file with mode: 0644]
jjb/compass4nfv/compass-verify-jobs.yml
jjb/conductor/conductor.yml
jjb/container4nfv/arm64/compass-build.sh [new file with mode: 0755]
jjb/container4nfv/arm64/compass-deploy.sh [new file with mode: 0755]
jjb/container4nfv/arm64/yardstick-arm64.sh [new file with mode: 0755]
jjb/container4nfv/container4nfv-arm64.yml [new file with mode: 0644]
jjb/container4nfv/container4nfv-project.yml
jjb/container4nfv/yardstick-arm64.yml [new file with mode: 0644]
jjb/copper/copper.yml
jjb/daisy4nfv/daisy-daily-jobs.yml
jjb/daisy4nfv/daisy-deploy.sh
jjb/daisy4nfv/daisy4nfv-build-kolla-image.sh
jjb/daisy4nfv/daisy4nfv-download-artifact.sh
jjb/doctor/doctor.yml
jjb/dovetail/dovetail-ci-jobs.yml
jjb/dovetail/dovetail-project-jobs.yml
jjb/dovetail/dovetail-run.sh
jjb/dpacc/dpacc.yml
jjb/fuel/fuel-daily-jobs.yml
jjb/fuel/fuel-deploy.sh
jjb/functest/functest-alpine.sh
jjb/functest/functest-cleanup.sh
jjb/functest/functest-daily-jobs.yml
jjb/functest/functest-env-presetup.sh
jjb/functest/functest-k8.sh [new file with mode: 0755]
jjb/global/basic-jobs.yml [new file with mode: 0644]
jjb/global/installer-params.yml
jjb/global/installer-report.sh [new file with mode: 0755]
jjb/global/releng-macros.yml
jjb/global/slave-params.yml
jjb/ipv6/ipv6.yml
jjb/joid/joid-daily-jobs.yml
jjb/kvmfornfv/kvmfornfv.yml
jjb/models/models.yml
jjb/netready/netready.yml
jjb/octopus/octopus.yml
jjb/opera/opera-daily-jobs.yml
jjb/opera/opera-verify-jobs.yml
jjb/opnfvdocs/opnfvdocs.yml
jjb/pharos/check-jinja2.yml [new file with mode: 0644]
jjb/pharos/pharos.yml
jjb/prediction/prediction.yml
jjb/promise/promise.yml
jjb/qtip/qtip-experimental-jobs.yml
jjb/qtip/qtip-validate-jobs.yml
jjb/qtip/qtip-verify-jobs.yml
jjb/releng/automate.yml
jjb/releng/compass4nfv-docker.yml [new file with mode: 0644]
jjb/releng/functest-docker.yml
jjb/releng/opnfv-docker-arm.yml
jjb/releng/opnfv-docker.sh
jjb/releng/opnfv-docker.yml
jjb/releng/opnfv-lint.yml
jjb/releng/opnfv-repo-archiver.sh
jjb/releng/opnfv-utils.yml
jjb/releng/releng-ci-jobs.yml
jjb/snaps/snaps-verify-jobs.yml
jjb/stor4nfv/stor4nfv-project.yml
jjb/ves/ves.yml
jjb/vnf_forwarding_graph/vnf_forwarding_graph.yml
jjb/vswitchperf/vswitchperf.yml
jjb/xci/bifrost-verify-jobs.yml
jjb/xci/bifrost-verify.sh
jjb/xci/osa-periodic-jobs.yml
jjb/xci/xci-cleanup.sh [new file with mode: 0755]
jjb/xci/xci-merge-jobs.yml [new file with mode: 0644]
jjb/xci/xci-promote.sh [new file with mode: 0755]
jjb/xci/xci-run-functest.sh [new file with mode: 0755]
jjb/xci/xci-start-deployment.sh [new file with mode: 0755]
jjb/xci/xci-start-new-vm.sh [new file with mode: 0755]
jjb/xci/xci-verify-jobs.yml
jjb/yardstick/yardstick-daily.sh
jjb/yardstick/yardstick-get-k8s-conf.sh
jjb/yardstick/yardstick-project-jobs.yml
modules/opnfv/deployment/daisy/__init__.py [new file with mode: 0644]
modules/opnfv/deployment/daisy/adapter.py [new file with mode: 0644]
modules/opnfv/deployment/factory.py
modules/opnfv/utils/ssh_utils.py
utils/build-server-ansible/inventory.ini [new file with mode: 0644]
utils/build-server-ansible/main.yml [new file with mode: 0644]
utils/build-server-ansible/vars/CentOS.yml [new file with mode: 0644]
utils/build-server-ansible/vars/Ubuntu.yml [new file with mode: 0644]
utils/build-server-ansible/vars/defaults.yml [new file with mode: 0644]
utils/build-server-ansible/vars/docker-compose-Centos.yml [new file with mode: 0644]
utils/build-server-ansible/vars/docker-compose-Ubuntu.yml [new file with mode: 0644]
utils/create_pod_file.py
utils/fetch_k8_conf.sh [new file with mode: 0755]
utils/push-test-logs.sh

index 4402f17..6ac3724 100644 (file)
--- a/.yamllint
+++ b/.yamllint
@@ -2,7 +2,9 @@
 extends: default
 
 rules:
-  # 120 chars should be enough and don't fail if a line is longer
+  # ONLY 'max' length is configure, other parameters are default values.
   line-length:
     max: 120
     level: warning
+    allow-non-breakable-words: true
+    allow-non-breakable-inline-mappings: false
diff --git a/INFO b/INFO
index 18c8cf2..d9051ab 100644 (file)
--- a/INFO
+++ b/INFO
@@ -16,7 +16,6 @@ Fatih Degirmenci (Ericsson, fatih.degirmenci@ericsson.com)
 Aric Gardner (Linux Foundation, agardner@linuxfoundation.org)
 Tim Rozet (Red Hat, trozet@redhat.com)
 Morgan Richomme (Orange, morgan.richomme@orange.com)
-Matthew Lijun (Huawei, matthew.lijun@huawei.com)
 Jose Lausuch (Ericsson, jose.lausuch@ericsson.com)
 Ryota Mibu (NEC, r-mibu@cq.jp.nec.com)
 Mei Mei (Huawei, meimei@huawei.com)
diff --git a/INFO.yaml b/INFO.yaml
new file mode 100644 (file)
index 0000000..3bb3cbe
--- /dev/null
+++ b/INFO.yaml
@@ -0,0 +1,125 @@
+---
+project: 'Release Engineering (Releng)'
+project_creation_date: '2015-06-14'
+project_category: 'Integration & Testing'
+lifecycle_state: 'Incubation'
+project_lead: &opnfv_releng_ptl
+    name: 'Fatih Degirmenci'
+    email: 'fatih.degirmenci@ericsson.com'
+    id: 'fdegir'
+    company: 'Ericsson'
+    timezone: 'Europe/Stockholm'
+primary_contact: *opnfv_releng_ptl
+issue_tracking:
+    type: 'jira'
+    url: 'https://jira.opnfv.org/projects/RELENG'
+    key: 'RELENG'
+mailing_list:
+    type: 'mailman2'
+    url: 'opnfv-tech-discuss@lists.opnfv.org'
+    tag: '[releng]'
+realtime_discussion:
+    type: 'irc'
+    server: 'freenode.net'
+    channel: '#lf-releng'
+meetings:
+    - type: 'gotomeeting+irc'
+      agenda: 'https://wiki.opnfv.org/display/INF/Infra+Working+Group'
+      url: 'https://global.gotomeeting.com/join/819733085'
+      server: 'freenode.net'
+      channel: '#opnfv-meeting'
+      repeats: 'weekly'
+      time: '16:00 UTC'
+repositories:
+    - 'releng'
+    - 'releng-anteater'
+    - 'releng-testresults'
+    - 'releng-utils'
+    - 'releng-xci'
+committers:
+    - <<: *opnfv_releng_ptl
+    - name: 'Aric Gardner'
+      email: 'agardner@linuxfoundation.org'
+      company: 'The Linux Foundation'
+      id: 'agardner'
+      timezone: 'Canada/Atlantic'
+    - name: 'Tim Rozet'
+      email: 'trozet@redhat.com'
+      company: 'Red Hat'
+      id: 'trozet'
+      timezone: 'America/New_York'
+    - name: 'Morgan Richomme'
+      email: 'morgan.richomme@orange.com'
+      company: 'Orange'
+      id: 'mrichomme'
+      timezone: 'Europe/Paris'
+    - name: 'Jose Lausuch'
+      company: 'SUSE'
+      email: 'jose.lausuch@ericsson.com'
+      id: 'jose.lausuch'
+      timezone: 'Europe/Madrid'
+    - name: 'Ryota Mibu'
+      company: 'NEC'
+      email: 'r-mibu@cq.jp.nec.com'
+      id: 'r-mibu'
+      timezone: 'Asia/Tokyo'
+    - name: 'Mei Mei'
+      company: 'Huawei'
+      email: 'meimei@huawei.com'
+      id: 'm00133142'
+      timezone: 'Asia/Shanghai'
+    - name: 'Trevor Bramwell'
+      company: 'The Linux Foundation'
+      email: 'tbramwell@linuxfoundation.org'
+      id: 'bramwelt'
+      timezone: 'America/Los_Angeles'
+    - name: 'Serena Feng'
+      company: 'ZTE'
+      email: 'feng.xiaowei@zte.com.cn'
+      id: 'SerenaFeng'
+      timezone: 'Asia/Shanghai'
+    - name: 'Yolanda Robla Mota'
+      company: 'Red Hat'
+      email: 'yroblamo@redhat.com'
+      id: 'yrobla'
+      timezone: 'America/New_York'
+    - name: 'Markos Chandras'
+      company: 'SUSE'
+      email: 'mchandras@suse.de'
+      id: 'mchandras'
+      timezone: 'Europe/Berlin'
+    - name: 'Luke Hinds'
+      company: 'Red Hat'
+      email: 'lhinds@redhat.com'
+      id: 'lukehinds'
+      timezone: 'Europe/London'
+tsc:
+    approval: 'http://ircbot.wl.linuxfoundation.org/meetings/opnfv-meeting/2015/opnfv-meeting.2015-07-14-14.00.html'
+    changes:
+        - type: 'removal'
+          name: 'Guy Rodrigue Koffi'
+          link: ''
+        - type: 'removal'
+          name: 'Victor Laza'
+          link: 'http://meetbot.opnfv.org/meetings/opnfv-meeting/2016/opnfv-meeting.2016-02-16-14.59.html'
+        - type: 'promotion'
+          name: 'Mei Mei'
+          link: 'http://lists.opnfv.org/pipermail/opnfv-tsc/2016-March/002228.html'
+        - type: 'removal'
+          name: 'Peter Banzi'
+          link: ''
+        - type: 'promotion'
+          name: 'Trevor Bramwell'
+          link: 'http://lists.opnfv.org/pipermail/opnfv-tech-discuss/2016-July/011659.html'
+        - type: 'promotion'
+          name: 'Serena Feng'
+          link: ''
+        - type: 'promotion'
+          name: 'Yolanda Robla Mota'
+          link: ''
+        - type: 'promotion'
+          name: 'Markos'
+          link: ''
+        - type: 'promotion'
+          name: 'Luke Hinds'
+          link: ''
index b12b101..5807286 100644 (file)
           branch: '{branch}'
       - apex-parameter:
           gs-pathname: '{gs-pathname}'
-      - string:
-          name: GERRIT_REFSPEC
-          default: 'refs/heads/{branch}'
-          description: "JJB configured GERRIT_REFSPEC parameter"
 
     scm:
       - git-scm-gerrit
index c57ac1d..421a3fa 100644 (file)
 - job-template:
     name: 'apex-verify-{stream}'
 
-    node: 'apex-virtual-master'
-
     concurrent: true
 
     disabled: '{obj:disabled}'
     project-type: 'multijob'
 
     parameters:
+      - '{project}-virtual-{stream}-defaults'
       - apex-parameter:
           gs-pathname: '{gs-pathname}/dev'
       - project-parameter:
                   pattern: 'apex/*'
                 - compare-type: ANT
                   pattern: 'build/**'
+                - compare-type: ANT
+                  pattern: 'ci/**'
                 - compare-type: ANT
                   pattern: 'lib/**'
                 - compare-type: ANT
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
+              node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
               git-revision: true
 - job-template:
     name: 'apex-verify-gate-{stream}'
 
-    node: 'apex-build-{stream}'
-
     concurrent: true
 
     disabled: '{obj:disabled}'
     project-type: 'multijob'
 
     parameters:
+      - '{project}-virtual-{stream}-defaults'
       - apex-parameter:
           gs-pathname: '{gs-pathname}/dev'
       - project-parameter:
index 1c04260..b07ccd6 100644 (file)
@@ -26,7 +26,7 @@
           baremetal-slave: 'apex-baremetal-master'
           verify-scenario: 'os-odl-nofeature-ha'
           scenario_stream: 'master'
-          disable_daily: true
+          disable_daily: false
       - euphrates: &euphrates
           branch: 'stable/euphrates'
           gs-pathname: '/euphrates'
@@ -35,7 +35,7 @@
           baremetal-slave: 'apex-baremetal-master'
           verify-scenario: 'os-odl-nofeature-ha'
           scenario_stream: 'euphrates'
-          disable_daily: false
+          disable_daily: true
       - danube: &danube
           branch: 'stable/danube'
           gs-pathname: '/danube'
           <<: *master
       - 'os-odl-bgpvpn-ha':
           <<: *master
+      - 'os-odl-bgpvpn-noha':
+          <<: *master
       - 'os-ovn-nofeature-noha':
           <<: *master
       - 'os-nosdn-fdio-noha':
           <<: *euphrates
       - 'os-nosdn-fdio-ha':
           <<: *euphrates
-      - 'os-odl-fdio-noha':
-          <<: *euphrates
-      - 'os-odl-fdio-ha':
-          <<: *euphrates
       - 'os-nosdn-bar-ha':
           <<: *euphrates
       - 'os-nosdn-bar-noha':
           <<: *euphrates
       - 'os-odl-sfc-ha':
           <<: *euphrates
-      - 'os-odl-fdio_dvr-noha':
-          <<: *euphrates
-      - 'os-odl-fdio_dvr-ha':
-          <<: *euphrates
       - 'os-nosdn-calipso-noha':
           <<: *euphrates
 
 - job-template:
     name: 'apex-deploy-{platform}-{stream}'
 
-    node: 'apex-{platform}-{stream}'
-
     concurrent: true
 
     disabled: false
 - job-template:
     name: 'apex-virtual-{stream}'
 
-    node: 'apex-virtual-master'
-
     project-type: 'multijob'
 
     disabled: false
               predefined-parameters:
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
               kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
+              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
               abort-all-job: false
               git-revision: false
       - multijob:
     #     branch:    branch (eg. stable)
     project-type: 'multijob'
 
-    node: '{baremetal-slave}'
-
     disabled: '{obj:disable_daily}'
 
     scm:
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
+            - name: 'apex-os-odl-bgpvpn-noha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
             - name: 'apex-os-ovn-nofeature-noha-baremetal-master'
               node-parameters: false
               current-parameters: false
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-odl-fdio-noha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-fdio-ha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
             - name: 'apex-os-nosdn-bar-ha-baremetal-euphrates'
               node-parameters: false
               current-parameters: false
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-odl-fdio_dvr-noha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-fdio_dvr-ha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
             - name: 'apex-os-nosdn-calipso-noha-baremetal-euphrates'
               node-parameters: false
               current-parameters: false
 - trigger:
     name: 'apex-master'
     triggers:
-      - timed: '0 3 1 1 7'
+      - timed: '0 0 1-31/2 * *'
 
 - trigger:
     name: 'apex-euphrates'
     triggers:
-      - timed: '0 12 * * *'
+      - timed: '0 0 2-30/2 * *'
 
 - trigger:
     name: 'apex-danube'
index 900002c..b9cbd02 100644 (file)
@@ -26,7 +26,7 @@
           baremetal-slave: 'apex-baremetal-master'
           verify-scenario: 'os-odl-nofeature-ha'
           scenario_stream: 'master'
-          disable_daily: true
+          disable_daily: false
       - euphrates: &euphrates
           branch: 'stable/euphrates'
           gs-pathname: '/euphrates'
@@ -35,7 +35,7 @@
           baremetal-slave: 'apex-baremetal-master'
           verify-scenario: 'os-odl-nofeature-ha'
           scenario_stream: 'euphrates'
-          disable_daily: false
+          disable_daily: true
       - danube: &danube
           branch: 'stable/danube'
           gs-pathname: '/danube'
 - job-template:
     name: 'apex-deploy-{platform}-{stream}'
 
-    node: 'apex-{platform}-{stream}'
-
     concurrent: true
 
     disabled: false
 - job-template:
     name: 'apex-virtual-{stream}'
 
-    node: 'apex-virtual-master'
-
     project-type: 'multijob'
 
     disabled: false
               predefined-parameters:
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
               kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
+              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
               abort-all-job: false
               git-revision: false
       - multijob:
     #     branch:    branch (eg. stable)
     project-type: 'multijob'
 
-    node: '{baremetal-slave}'
-
     disabled: '{obj:disable_daily}'
 
     scm:
 - trigger:
     name: 'apex-master'
     triggers:
-      - timed: '0 3 1 1 7'
+      - timed: '0 0 1-31/2 * *'
 
 - trigger:
     name: 'apex-euphrates'
     triggers:
-      - timed: '0 12 * * *'
+      - timed: '0 0 2-30/2 * *'
 
 - trigger:
     name: 'apex-danube'
index 3b6b485..789ca7f 100644 (file)
@@ -4,6 +4,7 @@ master:
   - 'os-odl-nofeature-ha'
   - 'os-odl-nofeature-noha'
   - 'os-odl-bgpvpn-ha'
+  - 'os-odl-bgpvpn-noha'
   - 'os-ovn-nofeature-noha'
   - 'os-nosdn-fdio-noha'
   - 'os-nosdn-fdio-ha'
@@ -29,8 +30,6 @@ euphrates:
   - 'os-ovn-nofeature-noha'
   - 'os-nosdn-fdio-noha'
   - 'os-nosdn-fdio-ha'
-  - 'os-odl-fdio-noha'
-  - 'os-odl-fdio-ha'
   - 'os-nosdn-bar-ha'
   - 'os-nosdn-bar-noha'
   - 'os-nosdn-nofeature-ha-ipv6'
@@ -40,8 +39,6 @@ euphrates:
   - 'os-nosdn-kvm_ovs_dpdk-ha'
   - 'os-odl-sfc-noha'
   - 'os-odl-sfc-ha'
-  - 'os-odl-fdio_dvr-noha'
-  - 'os-odl-fdio_dvr-ha'
   - 'os-nosdn-calipso-noha'
 danube:
   - 'os-nosdn-nofeature-noha'
index 0202ef0..da3992e 100644 (file)
                     build-step-failure-threshold: 'never'
                     failure-threshold: 'never'
                     unstable-threshold: 'FAILURE'
-      # 1.dovetail only master, based on D release
-      # 2.here the stream means the SUT stream,
-      #   dovetail stream is defined in its own job
-      # 3.only proposed_tests testsuite here(refstack, ha, ipv6, bgpvpn)
-      # 4.not used for release criteria or compliance,
+      # 1.here the stream means the SUT stream, dovetail stream is defined in its own job
+      # 2.only debug testsuite here(refstack, ha, vping, ipv6, tempest, bgpvpn)
+      # 3.not used for release criteria or compliance,
       #   only to debug the dovetail tool bugs with arm pods
-      # 5.only run against scenario os-(nosdn|odl)-(nofeature-bgpvpn)-ha
-      - conditional-step:
-          condition-kind: and
-          condition-operands:
-            - condition-kind: regex-match
-              regex: os-(nosdn|odl)-(nofeature|bgpvpn)-ha
-              label: '{scenario}'
-            - condition-kind: regex-match
-              regex: 'danube'
-              label: '{stream}'
-          steps:
-            - trigger-builds:
-                - project: 'dovetail-{installer}-{pod}-proposed_tests-master'
-                  current-parameters: false
-                  predefined-parameters:
-                    DEPLOY_SCENARIO={scenario}
-                  block: true
-                  same-node: true
-                  block-thresholds:
-                    build-step-failure-threshold: 'never'
-                    failure-threshold: 'never'
-                    unstable-threshold: 'FAILURE'
+      - trigger-builds:
+          - project: 'dovetail-{installer}-{pod}-proposed_tests-{stream}'
+            current-parameters: false
+            predefined-parameters:
+              DEPLOY_SCENARIO={scenario}
+            block: true
+            same-node: true
+            block-thresholds:
+              build-step-failure-threshold: 'never'
+              failure-threshold: 'never'
+              unstable-threshold: 'FAILURE'
       # Armband uses Fuel's log collection project job, no need to duplicate
       - conditional-step:
           condition-kind: not
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-      - timed: '0 16 * * 2,4,6'
+      - timed: '0 1 * * 2,4,6'
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-armband-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-odl-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-      - timed: '0 16 * * 1,3,5,7'
+      - timed: '0 1 * * 1,3,5,7'
 # ---------------------------------------------------------------------
 # Enea Armband CI Baremetal Triggers running against euphrates branch
 # ---------------------------------------------------------------------
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-euphrates-trigger'
     triggers:
-      - timed: '0 1 * * 2,4,6'
+      - timed: ''
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-armband-baremetal-euphrates-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-odl-nofeature-ha-armband-baremetal-euphrates-trigger'
     triggers:
-      - timed: '0 1 * * 1,3,5,7'
+      - timed: ''
 # --------------------------------------------------------------
 # Enea Armband CI Virtual Triggers running against master branch
 # --------------------------------------------------------------
index fefa376..c28dc56 100644 (file)
@@ -4,53 +4,5 @@
 
     project: '{name}'
 
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-
     jobs:
-      - 'auto-verify-{stream}'
-
-- job-template:
-    name: 'auto-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - shell: |
-          echo "Nothing to verify!"
+      - '{project}-verify-basic'
index a8f6297..2d34734 100644 (file)
@@ -1,64 +1,8 @@
 ---
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
 - project:
     name: availability
 
     project: '{name}'
 
     jobs:
-      - 'availability-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: 'false'
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: 'false'
-
-- job-template:
-    name: 'availability-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - shell: |
-          echo "Nothing to verify!"
+      - '{project}-verify-basic'
index e40841b..5f78aae 100644 (file)
@@ -11,6 +11,7 @@ echo
 cd ci
 ./install_dependencies.sh
 ./build_rpm.sh
+cp utility/rpms_list $WORKSPACE
 cd $WORKSPACE
 
 # save information regarding artifact into file
index 0f639b5..f05dc2a 100644 (file)
@@ -2,7 +2,7 @@
 set -o nounset
 set -o pipefail
 
-RPM_LIST=$WORKSPACE/ci/utilities/rpms_list
+RPM_LIST=$WORKSPACE/rpms_list
 RPM_WORKDIR=$WORKSPACE/rpmbuild
 RPM_DIR=$RPM_WORKDIR/RPMS/x86_64/
 cd $WORKSPACE/
diff --git a/jjb/ci_gate_security/anteater-clone-all-repos.sh b/jjb/ci_gate_security/anteater-clone-all-repos.sh
deleted file mode 100755 (executable)
index 8a9e73d..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-set -o errexit
-set -o pipefail
-set -o nounset
-export PATH=$PATH:/usr/local/bin/
-
-
-#WORKSPACE="$(pwd)"
-
-cd $WORKSPACE
-if [ ! -d "$WORKSPACE/allrepos" ]; then
-  mkdir $WORKSPACE/allrepos
-fi
-
-cd $WORKSPACE/allrepos
-
-declare -a PROJECT_LIST
-EXCLUDE_PROJECTS="All-Projects|All-Users|securedlab"
-
-PROJECT_LIST=($(ssh gerrit.opnfv.org -p 29418 gerrit ls-projects | egrep -v $EXCLUDE_PROJECTS))
-echo "PROJECT_LIST=(${PROJECT_LIST[*]})" > $WORKSPACE/opnfv-projects.sh
-
-for PROJECT in ${PROJECT_LIST[@]}; do
-  echo "> Cloning $PROJECT"
-  if [ ! -d "$PROJECT" ]; then
-    git clone "https://gerrit.opnfv.org/gerrit/$PROJECT.git"
-  else
-    pushd "$PROJECT" > /dev/null
-    git pull -f
-    popd > /dev/null
-  fi
-done
index 1190963..6caa131 100644 (file)
@@ -1,37 +1,50 @@
 #!/bin/bash
 # SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2017 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+ANTEATER_SCAN_PATCHSET="${ANTEATER_SCAN_PATCHSET:-true}"
+
+cd $WORKSPACE
+REPORTDIR='.reports'
+mkdir -p $REPORTDIR
+# Ensure any user can read the reports directory
+chmod 777 $REPORTDIR
+
+ANTEATER_FILES="--patchset /home/opnfv/anteater/$PROJECT/patchset"
+
+if [[ "$ANTEATER_SCAN_PATCHSET" == "true" ]]; then
+    echo "Generating patchset file to list changed files"
+    git diff HEAD^1 --name-only | sed "s#^#/home/opnfv/anteater/$PROJECT/#" > $WORKSPACE/patchset
+    echo "Changed files are"
+    echo "--------------------------------------------------------"
+    cat $WORKSPACE/patchset
+    echo "--------------------------------------------------------"
+else
+    echo "Checking full project $PROJECT"
+    ANTEATER_FILES="--path /home/opnfv/anteater/$PROJECT"
+fi
+
+vols="-v $WORKSPACE:/home/opnfv/anteater/$PROJECT -v $WORKSPACE/$REPORTDIR:/home/opnfv/anteater/$REPORTDIR"
+envs="-e PROJECT=$PROJECT"
 
-echo "--------------------------------------------------------"
-vols="-v $WORKSPACE/allrepos/:/home/opnfv/anteater/allrepos/"
 echo "Pulling releng-anteater docker image"
 echo "--------------------------------------------------------"
 docker pull opnfv/releng-anteater
 echo "--------------------------------------------------------"
-cmd="docker run -id $vols opnfv/releng-anteater /bin/bash"
-echo "Running docker command $cmd"
-container_id=$($cmd)
-echo "Container ID is $container_id"
-source $WORKSPACE/opnfv-projects.sh
-for project in "${PROJECT_LIST[@]}"
-
-do
-  cmd="/home/opnfv/venv/bin/anteater --project testproj --path /home/opnfv/anteater/allrepos/$project"
-  echo "Executing command inside container"
-  echo "$cmd"
-  echo "--------------------------------------------------------"
-  docker exec $container_id $cmd > $WORKSPACE/"$project".securityaudit.log 2>&1
-done
 
+cmd="docker run -i $envs $vols --rm opnfv/releng-anteater \
+/home/opnfv/venv/bin/anteater --project $PROJECT $ANTEATER_FILES"
+echo "Running docker container"
+echo "$cmd"
+$cmd > $WORKSPACE/securityaudit.log 2>&1
 exit_code=$?
 echo "--------------------------------------------------------"
-echo "Stopping docker container with ID $container_id"
-docker stop $container_id
-
-
-#gsutil cp $WORKSPACE/securityaudit.log \
-#    gs://$GS_URL/$PROJECT-securityaudit-weekly.log 2>&1
-#
-#gsutil -m setmeta \
-#    -h "Content-Type:text/html" \
-#    -h "Cache-Control:private, max-age=0, no-transform" \
-#    gs://$GS_URL/$PROJECT-securityaudit-weekly.log > /dev/null 2>&1
+echo "Docker container exited with code: $exit_code"
+echo "--------------------------------------------------------"
+exit 0
index 0a412c2..56aee17 100644 (file)
@@ -9,9 +9,76 @@
 
     project: anteaterfw
 
+    repo:
+      - apex
+      - apex-os-net-config
+      - apex-puppet-tripleo
+      - apex-tripleo-heat-templates
+      - armband
+      - auto
+      - availability
+      - bamboo
+      - barometer
+      - bottlenecks
+      - calipso
+      - clover
+      - compass-containers
+      - compass4nfv
+      - conductor
+      - container4nfv
+      - copper
+      - cperf
+      - daisy
+      - doctor
+      - domino
+      - dovetail
+      - dpacc
+      - enfv
+      - fastpathmetrics
+      - fds
+      - fuel
+      - functest
+      - ipv6
+      - joid
+      - kvmfornfv
+      - models
+      - moon
+      - multisite
+      - netready
+      - nfvbench
+      - octopus
+      - onosfw
+      - openretriever
+      - opera
+      - opnfvdocs
+      - orchestra
+      - ovn4nfv
+      - ovno
+      - ovsnfv
+      - parser
+      - pharos
+      - pharos-tools
+      - promise
+      - qtip
+      - releng
+      - releng-anteater
+      - releng-testresults
+      - releng-utils
+      - releng-xci
+      - samplevnf
+      - sdnvpn
+      - securityscanning
+      - sfc
+      - snaps
+      - stor4nfv
+      - storperf
+      - ves
+      - vswitchperf
+      - yardstick
+
     jobs:
       - 'opnfv-security-audit-verify-{stream}'
-      - 'opnfv-security-audit-weekly-{stream}'
+      - 'opnfv-security-audit-{repo}-weekly-{stream}'
 
     stream:
       - master:
 # job templates
 ########################
 - job-template:
-    name: 'opnfv-security-audit-weekly-{stream}'
+    name: 'opnfv-security-audit-{repo}-weekly-{stream}'
 
     disabled: '{obj:disabled}'
 
     parameters:
-      - label:
-          name: SLAVE_LABEL
-          default: 'ericsson-build3'
-          description: 'Slave label on Jenkins'
+      - ericsson-build3-defaults
+      - string:
+          name: ANTEATER_SCAN_PATCHSET
+          default: "false"
+          description: "Have anteater scan patchsets (true) or full project (false)"
       - project-parameter:
-          project: releng
+          project: '{repo}'
           branch: '{branch}'
 
+    scm:
+      - git-scm-gerrit
+
     triggers:
       - timed: '@weekly'
 
     builders:
       - anteater-security-audit-weekly
 
+    publishers:
+      # defined in jjb/global/releng-macros.yml
+      - 'email-{repo}-ptl':
+          subject: 'OPNFV Security Scan Result: {repo}'
+      - workspace-cleanup:
+          fail-build: false
+
 - job-template:
     name: 'opnfv-security-audit-verify-{stream}'
 
                 comment-contains-value: 'reverify'
           projects:
             - project-compare-type: 'REG_EXP'
-              project-pattern: 'apex|armband|bamboo|barometer|bottlenecks|calipso|compass4nfv|conductor|cooper|cperf|daisy|doctor|dovetail|dpacc|enfv|escalator|fds|fuel|functest|octopus|pharos|releng|sandbox|yardstick|infra|ipv6|kvmfornfv|lsoapi|models|moon|multisite|netready'
+              project-pattern: 'apex|armband|bamboo|barometer|bottlenecks|calipso|compass4nfv|conductor|copper|cperf|daisy|doctor|dovetail|dpacc|enfv|escalator|fds|fuel|functest|octopus|pharos|releng|sandbox|yardstick|infra|ipv6|kvmfornfv|lsoapi|models|moon|multisite|netready'
               branches:
                 - branch-compare-type: 'ANT'
                   branch-pattern: '**/{branch}'
       - shell:
           !include-raw: ./anteater-report-to-gerrit.sh
 
-# yamllint disable rule:indentation
 - builder:
     name: anteater-security-audit-weekly
     builders:
       - shell:
-          !include-raw:
-              - ./anteater-clone-all-repos.sh
-              - ./anteater-security-audit-weekly.sh
-# yamllint enable rule:indentation
+          !include-raw: ./anteater-security-audit-weekly.sh
index 4adfc2a..81d76d5 100644 (file)
@@ -13,7 +13,7 @@
       gs-pathname: ''
       ppa-pathname: '/{stream}'
       disabled: false
-      openstack-version: ocata
+      openstack-version: pike
     euphrates: &euphrates
       stream: euphrates
       branch: 'stable/{stream}'
@@ -54,7 +54,7 @@
       #        master
       # -------------------------------
       - baremetal-centos:
-          slave-label: 'intel-pod8'
+          slave-label: 'intel-pod17'
           os-version: 'centos7'
           <<: *master
       # -------------------------------
       - 'os-nosdn-ovs_dpdk-noha':
           disabled: false
           auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+      - 'os-nosdn-bar-ha':
+          disabled: false
+          auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
 
     jobs:
       - 'compass-{scenario}-{pod}-daily-{stream}'
       - 'compass-deploy-{pod}-daily-{stream}'
+      - 'compass-collect-logs-{pod}-daily-{stream}'
 
 ########################
 # job templates
           use-build-blocker: true
           blocking-jobs:
             - 'compass-os-.*?-{pod}-daily-.*?'
+            - 'compass-k8-.*?-{pod}-daily-.*?'
             - 'compass-os-.*?-baremetal-daily-.*?'
+            - 'compass-k8-.*?-baremetal-daily-.*?'
             - 'compass-verify-[^-]*-[^-]*'
           block-level: 'NODE'
 
               build-step-failure-threshold: 'never'
               failure-threshold: 'never'
               unstable-threshold: 'FAILURE'
-      # dovetail only master by now, not sync with A/B/C branches
       # here the stream means the SUT stream, dovetail stream is defined in its own job
-      # only run on os-(nosdn|odl_l2|onos|odl_l3)-nofeature-ha scenario
-      # run against SUT master branch, dovetail docker image with latest tag
-      # run against SUT danube branch, dovetail docker image with latest tag(Monday and Sunday)
-      # run against SUT danube branch, dovetail docker image with cvp.X.X.X tag(Tuesday, Thursday, Friday and Saturday)
+      # only run on os-(nosdn|odl_l3)-nofeature-ha scenario
+      # run with testsuite default, dovetail docker image with latest tag(Monday, Tuesday)
+      # run with testsuite proposed_tests, dovetail docker image with latest tag(Thursday, Friday)
       - conditional-step:
           condition-kind: and
           condition-operands:
             - condition-kind: regex-match
-              regex: danube
-              label: '{stream}'
-            - condition-kind: regex-match
-              regex: os-(nosdn|odl_l2|odl_l3)-nofeature-ha
+              regex: os-(nosdn|odl_l3)-nofeature-ha
               label: '{scenario}'
             - condition-kind: day-of-week
               day-selector: select-days
               days:
                 MON: true
-                SUN: true
+                TUES: true
               use-build-time: true
           steps:
             - trigger-builds:
-                - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
+                - project: 'dovetail-compass-{pod}-default-{stream}'
                   current-parameters: false
                   predefined-parameters: |
                     DOCKER_TAG=latest
           condition-kind: and
           condition-operands:
             - condition-kind: regex-match
-              regex: danube
-              label: '{stream}'
-            - condition-kind: regex-match
-              regex: os-(nosdn|odl_l2|odl_l3)-nofeature-ha
+              regex: os-(nosdn|odl_l3)-nofeature-ha
               label: '{scenario}'
             - condition-kind: day-of-week
               day-selector: select-days
               days:
-                TUES: true
-                WED: true
                 THURS: true
                 FRI: true
-                SAT: true
               use-build-time: true
           steps:
             - trigger-builds:
           condition-kind: and
           condition-operands:
             - condition-kind: regex-match
-              regex: os-(nosdn|odl_l2|odl_l3)-nofeature-ha
+              regex: os-nosdn-nofeature-ha
               label: '{scenario}'
-            - condition-kind: regex-match
-              regex: master
-              label: '{stream}'
           steps:
             - trigger-builds:
-                - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
+                - project: 'bottlenecks-compass-posca_stress_ping-{pod}-daily-{stream}'
                   current-parameters: false
                   predefined-parameters:
                     DEPLOY_SCENARIO={scenario}
           condition-kind: and
           condition-operands:
             - condition-kind: regex-match
-              regex: os-nosdn-nofeature-ha
-              label: '{scenario}'
+              regex: master
+              label: '{stream}'
           steps:
             - trigger-builds:
-                - project: 'bottlenecks-compass-posca_stress_ping-{pod}-daily-{stream}'
+                - project: 'compass-collect-logs-{pod}-daily-{stream}'
                   current-parameters: false
                   predefined-parameters:
                     DEPLOY_SCENARIO={scenario}
                     failure-threshold: 'never'
                     unstable-threshold: 'FAILURE'
 
+
 - job-template:
     name: 'compass-deploy-{pod}-daily-{stream}'
 
       - build-name:
           name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
       - timeout:
-          timeout: 240
+          timeout: 360
           abort: true
       - fix-workspace-permissions
 
             - shell:
                 !include-raw-escape: ./compass-deploy.sh
 
+- job-template:
+    name: 'compass-collect-logs-{pod}-daily-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    properties:
+      - logrotate-default
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - compass-ci-parameter:
+          installer: '{installer}'
+          gs-pathname: '{gs-pathname}'
+          ppa-pathname: '{ppa-pathname}'
+      - '{slave-label}-defaults'
+      - '{installer}-defaults'
+
+    scm:
+      - git-scm
+
+    wrappers:
+      - build-name:
+          name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+      - fix-workspace-permissions
+
+    builders:
+      - description-setter:
+          description: "Built on $NODE_NAME"
+      - shell:
+          !include-raw-escape: ./compass-logs.sh
+
 ########################
 # parameter macros
 ########################
       - string:
           name: PPA_CACHE
           default: "$WORKSPACE/work/repo/"
+      - string:
+          name: LOG_DIRECTORY
+          default: $WORKSPACE/log_output
+          description: "Directory where the logs will be located upon the completion of the collection."
 
 ########################
 # trigger macros
     name: 'compass-k8-nosdn-nofeature-ha-baremetal-centos-master-trigger'
     triggers:
       - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-bar-ha-baremetal-centos-master-trigger'
+    triggers:
+      - timed: ''  # '0 19 * * *'
 
 # ----------------------------
 # noha-baremetal-centos-master
     name: 'compass-k8-nosdn-nofeature-ha-huawei-pod7-danube-trigger'
     triggers:
       - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-bar-ha-huawei-pod7-danube-trigger'
+    triggers:
+      - timed: ''  # '0 19 * * *'
 
 # ----------------------------
 # noha-huawei-pod7-danube
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-baremetal-master-trigger'
     triggers:
-      - timed: '0 20 * * *'
+      - timed: '0 20 2-30/2 * *'
 - trigger:
     name: 'compass-os-nosdn-openo-ha-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl_l3-nofeature-ha-baremetal-master-trigger'
     triggers:
-      - timed: '0 18 * * *'
+      - timed: '0 18 1-29/2 * *'
 - trigger:
     name: 'compass-os-onos-nofeature-ha-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-baremetal-master-trigger'
     triggers:
-      - timed: '0 12 * * *'
+      - timed: '0 12 2-30/2 * *'
 - trigger:
     name: 'compass-os-nosdn-kvm-ha-baremetal-master-trigger'
     triggers:
-      - timed: '0 14 * * *'
+      - timed: '0 14 1-29/2 * *'
 - trigger:
     name: 'compass-os-nosdn-ovs_dpdk-ha-baremetal-master-trigger'
     triggers:
-      - timed: '0 16 * * *'
+      - timed: '0 16 2-30/2 * *'
 - trigger:
     name: 'compass-k8-nosdn-nofeature-ha-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl-sfc-ha-baremetal-master-trigger'
     triggers:
-      - timed: '0 10 * * *'
+      - timed: '0 10 1-29/2 * *'
+- trigger:
+    name: 'compass-os-nosdn-bar-ha-baremetal-master-trigger'
+    triggers:
+      - timed: '0 2 2-30/2 * *'
 
 # ---------------------
 # noha-baremetal-master
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-baremetal-euphrates-trigger'
     triggers:
-      - timed: '0 1 * * *'
+      - timed: '0 1 1-29/2 * *'
 - trigger:
     name: 'compass-os-nosdn-openo-ha-baremetal-euphrates-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl_l3-nofeature-ha-baremetal-euphrates-trigger'
     triggers:
-      - timed: '0 21 * * *'
+      - timed: '0 21 2-30/2 * *'
 - trigger:
     name: 'compass-os-onos-nofeature-ha-baremetal-euphrates-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-baremetal-euphrates-trigger'
     triggers:
-      - timed: '0 5 * * *'
+      - timed: '0 5 1-29/2 * *'
 - trigger:
     name: 'compass-os-nosdn-kvm-ha-baremetal-euphrates-trigger'
     triggers:
-      - timed: '0 13 * * *'
+      - timed: '0 13 2-30/2 * *'
 - trigger:
     name: 'compass-os-nosdn-ovs_dpdk-ha-baremetal-euphrates-trigger'
     triggers:
-      - timed: '0 9 * * *'
+      - timed: '0 9 1-29/2 * *'
 - trigger:
     name: 'compass-k8-nosdn-nofeature-ha-baremetal-euphrates-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl-sfc-ha-baremetal-euphrates-trigger'
     triggers:
-      - timed: '0 17 * * *'
+      - timed: '0 17 2-30/2 * *'
+- trigger:
+    name: 'compass-os-nosdn-bar-ha-baremetal-euphrates-trigger'
+    triggers:
+      - timed: '0 21 1-29/2 * *'
 
 # ---------------------
 # noha-baremetal-euphrates
 - trigger:
     name: 'compass-os-odl_l3-nofeature-ha-virtual-master-trigger'
     triggers:
-      - timed: '0 19 * * *'
+      - timed: '0 19 2-30/2 * *'
 - trigger:
     name: 'compass-os-onos-nofeature-ha-virtual-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-virtual-master-trigger'
     triggers:
-      - timed: '30 12 * * *'
+      - timed: '30 12 1-29/2 * *'
 - trigger:
     name: 'compass-os-nosdn-kvm-ha-virtual-master-trigger'
     triggers:
-      - timed: '0 13 * * *'
+      - timed: '0 13 1-29/2 * *'
 - trigger:
     name: 'compass-os-nosdn-ovs_dpdk-ha-virtual-master-trigger'
     triggers:
-      - timed: '0 17 * * *'
+      - timed: '0 17 2-30/2 * *'
 - trigger:
     name: 'compass-k8-nosdn-nofeature-ha-virtual-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl-sfc-ha-virtual-master-trigger'
     triggers:
-      - timed: '0 16 * * *'
+      - timed: '0 16 2-30/2 * *'
+- trigger:
+    name: 'compass-os-nosdn-bar-ha-virtual-master-trigger'
+    triggers:
+      - timed: '0 17 1-29/2 * *'
 
 # -------------------
 # noha-virtual-master
 - trigger:
     name: 'compass-os-nosdn-kvm-noha-virtual-master-trigger'
     triggers:
-      - timed: '30 13 * * *'
+      - timed: '30 13 1-29/2 * *'
 - trigger:
     name: 'compass-os-nosdn-nofeature-noha-virtual-master-trigger'
     triggers:
-      - timed: '0 14 * * *'
+      - timed: '0 14 2-30/2 * *'
 - trigger:
     name: 'compass-os-odl_l3-nofeature-noha-virtual-master-trigger'
     triggers:
-      - timed: '0 15 * * *'
+      - timed: '0 15 1-29/2 * *'
 - trigger:
     name: 'compass-os-odl_l2-moon-noha-virtual-master-trigger'
     triggers:
-      - timed: '0 18 * * *'
+      - timed: '0 18 2-30/2 * *'
 - trigger:
     name: 'compass-os-odl-sfc-noha-virtual-master-trigger'
     triggers:
-      - timed: '0 20 * * *'
+      - timed: '0 20 1-29/2 * *'
 - trigger:
     name: 'compass-os-nosdn-ovs_dpdk-noha-virtual-master-trigger'
     triggers:
-      - timed: '0 11 * * *'
+      - timed: '0 11 2-30/2 * *'
 
 # -----------------
 # ha-virtual-euphrates
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-virtual-euphrates-trigger'
     triggers:
-      - timed: '0 23 * * *'
+      - timed: '0 23 1-29/2 * *'
 - trigger:
     name: 'compass-os-nosdn-openo-ha-virtual-euphrates-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl_l3-nofeature-ha-virtual-euphrates-trigger'
     triggers:
-      - timed: '0 22 * * *'
+      - timed: '0 22 2-30/2 * *'
 - trigger:
     name: 'compass-os-onos-nofeature-ha-virtual-euphrates-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-virtual-euphrates-trigger'
     triggers:
-      - timed: '0 20 * * *'
+      - timed: '0 20 1-29/2 * *'
 - trigger:
     name: 'compass-os-nosdn-kvm-ha-virtual-euphrates-trigger'
     triggers:
-      - timed: '0 16 * * *'
+      - timed: '0 16 2-30/2 * *'
 - trigger:
     name: 'compass-os-nosdn-ovs_dpdk-ha-virtual-euphrates-trigger'
     triggers:
-      - timed: '0 14 * * *'
+      - timed: '0 14 1-29/2 * *'
 - trigger:
     name: 'compass-os-odl-sfc-ha-virtual-euphrates-trigger'
     triggers:
-      - timed: '0 18 * * *'
+      - timed: '0 18 2-30/2 * *'
 - trigger:
     name: 'compass-k8-nosdn-nofeature-ha-virtual-euphrates-trigger'
     triggers:
-      - timed: '5 1 * * *'
+      - timed: '5 1 2-30/2 * *'
+- trigger:
+    name: 'compass-os-nosdn-bar-ha-virtual-euphrates-trigger'
+    triggers:
+      - timed: '0 19 1-29/2 * *'
 
 # -------------------
 # noha-virtual-euphrates
 - trigger:
     name: 'compass-os-nosdn-kvm-noha-virtual-euphrates-trigger'
     triggers:
-      - timed: '0 15 * * *'
+      - timed: '0 15 1-29/2 * *'
 - trigger:
     name: 'compass-os-nosdn-nofeature-noha-virtual-euphrates-trigger'
     triggers:
-      - timed: '0 17 * * *'
+      - timed: '0 17 2-30/2 * *'
 - trigger:
     name: 'compass-os-odl_l3-nofeature-noha-virtual-euphrates-trigger'
     triggers:
-      - timed: '0 23 * * *'
+      - timed: '0 23 1-29/2 * *'
 - trigger:
     name: 'compass-os-odl_l2-moon-noha-virtual-euphrates-trigger'
     triggers:
-      - timed: '0 21 * * *'
+      - timed: '0 21 2-30/2 * *'
 - trigger:
     name: 'compass-os-odl-sfc-noha-virtual-euphrates-trigger'
     triggers:
-      - timed: '0 19 * * *'
+      - timed: '0 19 1-29/2 * *'
 - trigger:
     name: 'compass-os-nosdn-ovs_dpdk-noha-virtual-euphrates-trigger'
     triggers:
-      - timed: '0 12 * * *'
+      - timed: '0 12 2-30/2 * *'
index ad069a5..ac649b9 100644 (file)
@@ -45,10 +45,6 @@ else
     export NETWORK_CONF_FILE=network.yml
 fi
 
-if [[ "$NODE_NAME" =~ "intel-pod8" ]]; then
-    export OS_MGMT_NIC=em4
-fi
-
 if [[ "$NODE_NAME" =~ "-virtual" ]]; then
     export NETWORK_CONF=$CONFDIR/vm_environment/$NODE_NAME/${NETWORK_CONF_FILE}
     export DHA_CONF=$CONFDIR/vm_environment/${DEPLOY_SCENARIO}.yml
@@ -58,7 +54,11 @@ if [[ "$NODE_NAME" =~ "-virtual" ]]; then
         export VIRT_NUMBER=2
     fi
 else
-    export INSTALL_NIC=eth1
+    if [[ "$NODE_NAME" =~ "intel-pod17" ]]; then
+        export INSTALL_NIC=eno2
+    else
+        export INSTALL_NIC=eth1
+    fi
     export NETWORK_CONF=$CONFDIR/hardware_environment/$NODE_NAME/${NETWORK_CONF_FILE}
     export DHA_CONF=$CONFDIR/hardware_environment/$NODE_NAME/${DEPLOY_SCENARIO}.yml
 fi
diff --git a/jjb/compass4nfv/compass-logs.sh b/jjb/compass4nfv/compass-logs.sh
new file mode 100644 (file)
index 0000000..c028194
--- /dev/null
@@ -0,0 +1,28 @@
+#!/bin/bash
+set -o nounset
+set -o pipefail
+
+# log info to console
+echo "Uploading the logs $INSTALLER_TYPE artifact. This could take some time..."
+echo "--------------------------------------------------------"
+echo
+
+# create the log directory if it doesn't exist
+[[ -d $LOG_DIRECTORY ]] || mkdir -p $LOG_DIRECTORY
+
+OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
+COMPASS_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}_${OPNFV_ARTIFACT_VERSION}.log.tar.gz"
+
+
+sudo docker exec compass-tasks /bin/bash /opt/collect-log.sh
+sudo docker cp compass-tasks:/opt/log.tar.gz ${LOG_DIRECTORY}/${COMPASS_LOG_FILENAME}
+
+sudo chown $(whoami):$(whoami) ${LOG_DIRECTORY}/${COMPASS_LOG_FILENAME}
+
+gsutil cp "${LOG_DIRECTORY}/${COMPASS_LOG_FILENAME}" \
+     "gs://${GS_URL}/logs/${COMPASS_LOG_FILENAME}" > /dev/null 2>&1
+
+echo
+echo "--------------------------------------------------------"
+echo "Done!"
+echo "Artifact is available as http://${GS_URL}/logs/${COMPASS_LOG_FILENAME}"
index 6927145..444b173 100644 (file)
           disabled: false
           openstack-version: 'ocata'
           branch-type: 'master'
-      - danube:
+      - euphrates:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           ppa-pathname: '/{stream}'
           disabled: false
-          openstack-version: 'newton'
-          branch-type: 'branch'
+          openstack-version: 'ocata'
+          branch-type: 'master'
 
     distro:
       - 'xenial':
           blocking-jobs:
             - 'compass-verify-[^-]*-[^-]*'
             - 'compass-os-.*?-virtual-daily-.*?'
+            - 'compass-k8-.*?-virtual-daily-.*?'
           block-level: 'NODE'
 
     wrappers:
       - ssh-agent-wrapper
       - timeout:
-          timeout: 240
+          timeout: 360
           fail: true
       - fix-workspace-permissions
 
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
-            - name: 'opnfv-yamllint-verify-{stream}'
-              current-parameters: true
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
       - multijob:
           name: deploy-virtual
           condition: SUCCESSFUL
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
-            - name: 'opnfv-yamllint-verify-{stream}'
-              current-parameters: true
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
       - multijob:
           name: deploy-virtual
           condition: SUCCESSFUL
     wrappers:
       - ssh-agent-wrapper
       - timeout:
-          timeout: 240
+          timeout: 360
           fail: true
       - fix-workspace-permissions
 
index 3e5e5de..8a128da 100644 (file)
@@ -1,64 +1,8 @@
 ---
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
 - project:
     name: conductor
 
     project: '{name}'
 
     jobs:
-      - 'conductor-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-- job-template:
-    name: 'conductor-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - shell: |
-          echo "Nothing to verify!"
+      - '{project}-verify-basic'
diff --git a/jjb/container4nfv/arm64/compass-build.sh b/jjb/container4nfv/arm64/compass-build.sh
new file mode 100755 (executable)
index 0000000..696f7ff
--- /dev/null
@@ -0,0 +1,14 @@
+#!/bin/bash
+set -e
+
+cd compass4nfv
+
+COMPASS_WORK_DIR=$WORKSPACE/../compass-work
+mkdir -p $COMPASS_WORK_DIR
+ln -s $COMPASS_WORK_DIR work
+
+#TODO: remove workaround after all arm64 patches merged
+curl -s http://people.linaro.org/~yibo.cai/compass/compass4nfv-arm64-fixup.sh | bash -s {scenario}
+
+# build tarball
+COMPASS_ISO_REPO='http://people.linaro.org/~yibo.cai/compass' ./build.sh
diff --git a/jjb/container4nfv/arm64/compass-deploy.sh b/jjb/container4nfv/arm64/compass-deploy.sh
new file mode 100755 (executable)
index 0000000..3c59927
--- /dev/null
@@ -0,0 +1,13 @@
+#!/bin/bash
+set -e
+
+cd compass4nfv
+
+export ADAPTER_OS_PATTERN='(?i)CentOS-7.*arm.*'
+export OS_VERSION="centos7"
+export KUBERNETES_VERSION="v1.7.3"
+export DHA="deploy/conf/vm_environment/k8-nosdn-nofeature-noha.yml"
+export NETWORK="deploy/conf/vm_environment/network.yml"
+export VIRT_NUMBER=2 VIRT_CPUS=2 VIRT_MEM=4096 VIRT_DISK=50G
+
+./deploy.sh
diff --git a/jjb/container4nfv/arm64/yardstick-arm64.sh b/jjb/container4nfv/arm64/yardstick-arm64.sh
new file mode 100755 (executable)
index 0000000..26c6fdc
--- /dev/null
@@ -0,0 +1,93 @@
+#!/bin/bash
+set -e
+
+sshpass -p root ssh root@10.1.0.50 \
+  "mkdir -p /etc/yardstick; rm -rf /etc/yardstick/admin.conf"
+
+
+sshpass -p root ssh root@10.1.0.50 \
+  kubectl config set-cluster yardstick --server=127.0.0.1:8080 --insecure-skip-tls-verify=true --kubeconfig=/etc/yardstick/admin.conf
+sshpass -p root ssh root@10.1.0.50 \
+  kubectl config set-context yardstick --cluster=yardstick --kubeconfig=/etc/yardstick/admin.conf
+sshpass -p root ssh root@10.1.0.50 \
+  kubectl config use-context yardstick --kubeconfig=/etc/yardstick/admin.conf 
+
+
+
+if [ ! -n "$redirect" ]; then
+  redirect="/dev/stdout"
+fi
+
+if [ ! -n "$DOCKER_TAG" ]; then
+  DOCKER_TAG='latest'
+fi
+
+if [ ! -n "$NODE_NAME" ]; then
+  NODE_NAME='arm-virutal03'
+fi
+
+if [ ! -n "$DEPLOY_SCENARIO" ]; then
+  DEPLOY_SCENARIO='k8-nosdn-lb-noha_daily'
+fi
+
+if [ ! -n "$YARDSTICK_DB_BACKEND" ]; then
+  YARDSTICK_DB_BACKEND='-i 104.197.68.199:8086'
+fi
+
+# Pull the image with correct tag
+DOCKER_REPO='opnfv/yardstick'
+if [ "$(uname -m)" = 'aarch64' ]; then
+    DOCKER_REPO="${DOCKER_REPO}_$(uname -m)"
+fi
+echo "Yardstick: Pulling image ${DOCKER_REPO}:${DOCKER_TAG}"
+sshpass -p root ssh root@10.1.0.50 \
+  docker pull ${DOCKER_REPO}:$DOCKER_TAG >$redirect
+
+if [ ! -n "$BRANCH" ]; then
+  BRANCH=master
+fi
+
+opts="--name=yardstick --privileged=true --net=host -d -it "
+envs="-e YARDSTICK_BRANCH=${BRANCH} -e BRANCH=${BRANCH} \
+  -e NODE_NAME=${NODE_NAME} \
+  -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}"
+rc_file_vol="-v /etc/yardstick/admin.conf:/etc/yardstick/admin.conf"
+cacert_file_vol=""
+map_log_dir=""
+sshkey=""
+YARDSTICK_SCENARIO_SUITE_NAME="opnfv_k8-nosdn-lb-noha_daily.yaml"
+
+# map log directory
+branch=${BRANCH##*/}
+#branch="master"
+dir_result="${HOME}/opnfv/yardstick/results/${branch}"
+mkdir -p ${dir_result}
+sudo rm -rf ${dir_result}/*
+map_log_dir="-v ${dir_result}:/tmp/yardstick"
+
+# Run docker
+cmd="docker rm -f yardstick || true"
+sshpass -p root ssh root@10.1.0.50 \
+  ${cmd}
+
+cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} ${DOCKER_REPO}:${DOCKER_TAG} /bin/bash"
+echo "Yardstick: Running docker cmd: ${cmd}"
+sshpass -p root ssh root@10.1.0.50 \
+  ${cmd}
+
+
+cmd='sudo docker exec yardstick sed -i.bak "/# execute tests/i\sed -i.bak \"s/openretriever\\\/yardstick/openretriever\\\/yardstick_aarch64/g\" \
+    $\{YARDSTICK_REPO_DIR\}/tests/opnfv/test_cases/opnfv_yardstick_tc080.yaml" /usr/local/bin/exec_tests.sh'
+sshpass -p root ssh root@10.1.0.50 \
+  ${cmd}
+
+echo "Yardstick: run tests: ${YARDSTICK_SCENARIO_SUITE_NAME}"
+cmd="sudo docker exec yardstick exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
+sshpass -p root ssh root@10.1.0.50 \
+  ${cmd}
+
+cmd="docker rm -f yardstick"
+sshpass -p root ssh root@10.1.0.50 \
+  ${cmd}
+
+echo "Yardstick: done!"
diff --git a/jjb/container4nfv/container4nfv-arm64.yml b/jjb/container4nfv/container4nfv-arm64.yml
new file mode 100644 (file)
index 0000000..16a10ea
--- /dev/null
@@ -0,0 +1,52 @@
+---
+
+- project:
+    name: 'container4nfv-arm64'
+    project: 'container4nfv'
+    installer: 'compass'
+    scenario:
+      - 'k8-multus-nofeature-noha':
+          disabled: false
+      - 'k8-sriov-nofeature-noha':
+          disabled: false
+    jobs:
+      - 'container4nfv-{scenario}-virtual-daily-master'
+
+
+- job-template:
+    name: 'container4nfv-{scenario}-virtual-daily-master'
+    disabled: '{obj:disabled}'
+    concurrent: false
+    node: arm-packet01
+
+    scm:
+      - git:
+          url: https://gerrit.opnfv.org/gerrit/compass4nfv
+          branches:
+            - origin/master
+          basedir: compass4nfv
+          wipe-workspace: true
+
+    triggers:
+      - 'trigger-{scenario}-virtual'
+
+    wrappers:
+      - timeout:
+          timeout: 120
+          fail: true
+
+    builders:
+      - shell:
+          !include-raw: arm64/compass-build.sh
+      - shell:
+          !include-raw: arm64/compass-deploy.sh
+
+
+- trigger:
+    name: 'trigger-k8-multus-nofeature-noha-virtual'
+    triggers:
+      - timed: '0 12 * * *'
+- trigger:
+    name: 'trigger-k8-sriov-nofeature-noha-virtual'
+    triggers:
+      - timed: '0 16 * * *'
index 9e2d313..03bbb65 100644 (file)
@@ -10,7 +10,8 @@
 
     jobs:
       - 'container4nfv-verify-{stream}'
-      - 'container4nfv-daily-{stream}'
+      - 'container4nfv-daily-upload-{stream}'
+      - 'container4nfv-daily-deploy-{stream}'
 
     stream:
       - master:
           cd $WORKSPACE/ci
           ./build.sh
 
+- job-template:
+    name: 'container4nfv-daily-upload-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    concurrent: false
+
+    scm:
+      - git-scm
+
+    wrappers:
+      - fix-workspace-permissions
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - 'opnfv-build-ubuntu-defaults'
+      - 'container4nfv-defaults':
+          gs-pathname: '{gs-pathname}'
+
+    builders:
+      - shell: |
+          cd $WORKSPACE/ci
+          ./upload.sh
 
 - job-template:
-    name: 'container4nfv-daily-{stream}'
+    name: 'container4nfv-daily-deploy-{stream}'
 
     project-type: freestyle
 
     properties:
       - logrotate-default
 
+    wrappers:
+      - fix-workspace-permissions
+
     parameters:
       - project-parameter:
           project: '{project}'
       - shell: |
           cd $WORKSPACE/ci
           ./deploy.sh
+
+###################
+# parameter macros
+###################
+- parameter:
+    name: 'container4nfv-defaults'
+    parameters:
+      - string:
+          name: GS_URL
+          default: artifacts.opnfv.org/$PROJECT{gs-pathname}
+          description: "URL to Google Storage."
diff --git a/jjb/container4nfv/yardstick-arm64.yml b/jjb/container4nfv/yardstick-arm64.yml
new file mode 100644 (file)
index 0000000..bd1d8aa
--- /dev/null
@@ -0,0 +1,121 @@
+---
+###################################
+# job configuration for yardstick
+###################################
+- project:
+    name: yardstick-arm64
+
+    project: '{name}'
+
+    # -------------------------------
+    # BRANCH ANCHORS
+    # -------------------------------
+    master: &master
+      stream: master
+      branch: '{stream}'
+      gs-pathname: ''
+      docker-tag: 'latest'
+    # -------------------------------
+    # POD, INSTALLER, AND BRANCH MAPPING
+    # -------------------------------
+    #    Installers using labels
+    #            CI PODs
+    # This section should only contain the installers
+    # that have been switched using labels for slaves
+    # -------------------------------
+    pod:
+      # apex CI PODs
+      - arm-virtual03:
+          slave-label: arm-packet01
+          installer: compass
+          auto-trigger-name: 'daily-trigger-disabled'
+          <<: *master
+    # -------------------------------
+    testsuite:
+      - 'daily'
+
+    jobs:
+      - 'yardstick-arm64-{installer}-{pod}-{testsuite}-{stream}'
+
+################################
+# job templates
+################################
+- job-template:
+    name: 'yardstick-arm64-{installer}-{pod}-{testsuite}-{stream}'
+    disabled: false
+
+    concurrent: true
+
+    properties:
+      - logrotate-default
+      - throttle:
+          enabled: true
+          max-per-node: 1
+          option: 'project'
+
+    wrappers:
+      - build-name:
+          name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+      - timeout:
+          timeout: 60
+          abort: true
+
+    triggers:
+      - '{auto-trigger-name}'
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - '{installer}-defaults'
+      - 'yardstick-params-{slave-label}'
+      - string:
+          name: DEPLOY_SCENARIO
+          default: 'k8-nosdn-lb-noha_daily'
+      - string:
+          name: DOCKER_TAG
+          default: '{docker-tag}'
+          description: 'Tag to pull docker image'
+      - string:
+          name: YARDSTICK_SCENARIO_SUITE_NAME
+          default: opnfv_${{DEPLOY_SCENARIO}}_{testsuite}.yaml
+          description: 'Path to test scenario suite'
+      - string:
+          name: CI_DEBUG
+          default: 'false'
+          description: "Show debut output information"
+
+    scm:
+      - git-scm
+
+    builders:
+      - description-setter:
+          description: "POD: $NODE_NAME"
+      - 'yardstick-arm64'
+
+    publishers:
+      - email:
+          recipients: trevor.tao@arm.com yibo.cai@arm.com
+      - email-jenkins-admins-on-failure
+
+########################
+# builder macros
+########################
+- builder:
+    name: yardstick-arm64
+    builders:
+      - shell:
+          !include-raw: arm64/yardstick-arm64.sh
+
+########################
+# parameter macros
+########################
+
+
+- parameter:
+    name: 'yardstick-params-arm-packet01'
+    parameters:
+      - string:
+          name: YARDSTICK_DB_BACKEND
+          default: '-i 104.197.68.199:8086'
+          description: 'Arguments to use in order to choose the backend DB'
index d9ac0b8..620aefd 100644 (file)
@@ -1,69 +1,8 @@
 ---
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
 - project:
     name: copper
 
     project: '{name}'
 
     jobs:
-      - 'copper-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-- job-template:
-    name: 'copper-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-          set -o nounset
-          set -o pipefail
-
-         # shellcheck -f tty tests/*.sh
+      - '{project}-verify-basic'
index 090d2e1..6f4643a 100644 (file)
@@ -50,7 +50,9 @@
       - baremetal:
           slave-label: zte-pod3
           <<: *master
-
+      - zte-pod9:
+          slave-label: zte-pod9
+          <<: *master
     # -------------------------------
     #        None-CI PODs
     # -------------------------------
@@ -68,6 +70,9 @@
       # ODL_L3 scenarios
       - 'os-odl-nofeature-ha':
           auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
+      # ovs_dpdk scenarios
+      - 'os-nosdn-ovs_dpdk-noha':
+          auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
 
     jobs:
       - '{project}-{scenario}-{pod}-daily-{stream}'
       - trigger-builds:
           - project: 'daisy-deploy-{pod}-daily-{stream}'
             current-parameters: false
-            predefined-parameters:
+            predefined-parameters: |
               DEPLOY_SCENARIO={scenario}
+              INSTALLER_VERSION={stream}
+              UPSTREAM_JOB_NAME=$JOB_NAME
+              UPSTREAM_BUILD_ID=$BUILD_ID
             same-node: true
             block: true
       - trigger-builds:
           - project: 'functest-daisy-{pod}-daily-{stream}'
             current-parameters: false
-            predefined-parameters:
+            predefined-parameters: |
               DEPLOY_SCENARIO={scenario}
+              INSTALLER_VERSION={stream}
+              UPSTREAM_JOB_NAME=$JOB_NAME
+              UPSTREAM_BUILD_ID=$BUILD_ID
             same-node: true
             block: true
             block-thresholds:
             - trigger-builds:
                 - project: 'yardstick-daisy-{pod}-daily-{stream}'
                   current-parameters: false
-                  predefined-parameters:
+                  predefined-parameters: |
                     DEPLOY_SCENARIO={scenario}
+                    INSTALLER_VERSION={stream}
+                    UPSTREAM_JOB_NAME=$JOB_NAME
+                    UPSTREAM_BUILD_ID=$BUILD_ID
                   block: true
                   same-node: true
                   block-thresholds:
     builders:
       - description-setter:
           description: "POD: $NODE_NAME"
+      - 'track-begin-timestamp'
       - shell:
           !include-raw-escape: ./daisy4nfv-download-artifact.sh
       - shell:
           !include-raw-escape: ./daisy-deploy.sh
+    publishers:
+      - 'report-provision-result'
 
 ########################
 # trigger macros
 - trigger:
     name: 'daisy-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-      - timed: '0 18 * * *'
+      - timed: '0 12 * * *'
 # Basic NOHA Scenarios
 - trigger:
     name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
 - trigger:
     name: 'daisy-os-odl-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-      - timed: '0 12 * * *'
+      - timed: '0 18 * * *'
+# ovs_dpdk Scenarios
+- trigger:
+    name: 'daisy-os-nosdn-ovs_dpdk-noha-baremetal-daily-master-trigger'
+    triggers:
+      - timed: ''
 
 # ----------------------------------------------
 # Triggers for job running on daisy-virtual against master branch
     name: 'daisy-os-odl-nofeature-ha-virtual-daily-master-trigger'
     triggers:
       - timed: '0 12 * * *'
+# ovs_dpdk Scenarios
+- trigger:
+    name: 'daisy-os-nosdn-ovs_dpdk-noha-virtual-daily-master-trigger'
+    triggers:
+      - timed: ''
 
 # ----------------------------------------------
 # Triggers for job running on daisy-baremetal against euphrates branch
     name: 'daisy-os-odl-nofeature-ha-baremetal-daily-euphrates-trigger'
     triggers:
       - timed: '0 20 * * *'
+# ovs_dpdk Scenarios
+- trigger:
+    name: 'daisy-os-nosdn-ovs_dpdk-noha-baremetal-daily-euphrates-trigger'
+    triggers:
+      - timed: ''
 
 # ----------------------------------------------
 # Triggers for job running on daisy-virtual against euphrates branch
     name: 'daisy-os-odl-nofeature-ha-virtual-daily-euphrates-trigger'
     triggers:
       - timed: '0 20 * * *'
+# ovs_dpdk Scenarios
+- trigger:
+    name: 'daisy-os-nosdn-ovs_dpdk-noha-virtual-daily-euphrates-trigger'
+    triggers:
+      - timed: ''
+
+# ----------------------------------------------
+# ZTE POD9 Triggers running against master branch
+# ----------------------------------------------
+# ovs_dpdk Scenarios
+- trigger:
+    name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod9-daily-master-trigger'
+    triggers:
+      - timed: '0 10 * * *'
+# Basic HA Scenarios
+- trigger:
+    name: 'daisy-os-nosdn-nofeature-ha-zte-pod9-daily-master-trigger'
+    triggers:
+      - timed: ''
+# Basic NOHA Scenarios
+- trigger:
+    name: 'daisy-os-nosdn-nofeature-noha-zte-pod9-daily-master-trigger'
+    triggers:
+      - timed: ''
+# ODL Scenarios
+- trigger:
+    name: 'daisy-os-odl-nofeature-ha-zte-pod9-daily-master-trigger'
+    triggers:
+      - timed: ''
index 803ff5b..f729c38 100755 (executable)
@@ -19,14 +19,22 @@ fi
 
 # clone the securedlab repo
 cd $WORKSPACE
-SECURELAB_DIR=/var/tmp/opnfv-securedlab
 
-echo "Cloning securedlab repo $BRANCH to $SECURELAB_DIR"
-rm -rf $SECURELAB_DIR
-git clone ssh://jenkins-zte@gerrit.opnfv.org:29418/securedlab --quiet \
-    --branch $BRANCH $SECURELAB_DIR
+# There are no PDFs in euphrates branch of pharos repo.
+if [[  "$BRANCH" =~ "euphrates" ]]; then
+    CONFIG_REPO_NAME=securedlab
+else
+    CONFIG_REPO_NAME=pharos
+fi
+
+LABS_DIR=/var/tmp/opnfv-${CONFIG_REPO_NAME}
+
+echo "Cloning ${CONFIG_REPO_NAME} repo $BRANCH to $LABS_DIR"
+sudo rm -rf $LABS_DIR
+git clone ssh://jenkins-zte@gerrit.opnfv.org:29418/${CONFIG_REPO_NAME} \
+    --quiet --branch $BRANCH $LABS_DIR
 
-DEPLOY_COMMAND="sudo -E ./ci/deploy/deploy.sh -L $SECURELAB_DIR \
+DEPLOY_COMMAND="sudo -E ./ci/deploy/deploy.sh -L $LABS_DIR \
                 -l $LAB_NAME -p $POD_NAME -B $BRIDGE -s $DEPLOY_SCENARIO"
 
 # log info to console
index 9a1e2fc..0441ea1 100755 (executable)
@@ -14,35 +14,9 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
-importkey () {
-    # clone releng repository
-    echo "Cloning releng repository..."
-    [ -d releng ] && rm -rf releng
-    git clone https://gerrit.opnfv.org/gerrit/releng ./releng/ &> /dev/null
-    #this is where we import the siging key
-    if [ -f ./releng/utils/gpg_import_key.sh ]; then
-        source ./releng/utils/gpg_import_key.sh
-    fi
-}
-
 upload_image_to_opnfv () {
     image=$1
 
-    importkey
-    if gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
-        echo "Signing Key avaliable"
-        SIGN_ARTIFACT="true"
-    fi
-
-    if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
-        gpg2 -vvv --batch --yes --no-tty \
-            --default-key opnfv-helpdesk@rt.linuxfoundation.org  \
-            --passphrase besteffort \
-            --detach-sig $image
-        gsutil cp $image.sig gs://$GS_URL/upstream/$image.sig
-        echo "Image signature upload complete!"
-    fi
-
     sha512sum -b $image > $image.sha512sum
     gsutil cp $image.sha512sum gs://$GS_URL/upstream/$image.sha512sum
 
index a64c80e..ae5ca38 100755 (executable)
@@ -68,7 +68,15 @@ else
     DOWNLOAD_CMD="curl -L -s -o $WORKSPACE/opnfv.bin"
 fi
 
-$DOWNLOAD_CMD http://$OPNFV_ARTIFACT_URL > gsutil.bin.log 2>&1
+maxretries=3
+cnt=0
+rc=1
+while [ $cnt -lt $maxretries ] && [ $rc -ne 0 ]
+do
+    cnt=$[cnt + 1]
+    $DOWNLOAD_CMD http://$OPNFV_ARTIFACT_URL > gsutil.bin.log 2>&1
+    rc=$?
+done
 
 # list the file
 ls -al $WORKSPACE/opnfv.bin
index e28f744..d8f43c9 100644 (file)
           disabled: false
 
     installer:
-      - apex:
-          slave-label: 'doctor-apex-verify'
-      - fuel:
-          slave-label: 'doctor-fuel-verify'
-    # - joid:
-    #     slave-label: 'ool-virtual3'
-    #     pod: 'ool-virtual3'
+      - 'apex'
+      - 'fuel'
+    # - 'joid'
 
-    phase:
-      - 'build-x86_64':
-          slave-label: 'opnfv-build-ubuntu'
-      - 'build-aarch64':
-          slave-label: 'opnfv-build-ubuntu-arm'
+    arch:
+      - 'x86_64'
+      - 'aarch64'
 
     inspector:
       - 'sample'
     task:
       - verify:
           auto-trigger-name: 'doctor-verify'
-          is-python: false
-      - python-verify:
-          auto-trigger-name: 'doctor-verify'
-          is-python: true
 
     exclude:
       - installer: 'apex'
-        phase: 'build-aarch64'
+        arch: 'aarch64'
 
     jobs:
       - 'doctor-verify-{stream}'
       - 'doctor-verify-unit-test-{stream}'
-      - 'doctor-{task}-{installer}-{inspector}-{stream}'
-      - 'doctor-{task}-{installer}-{inspector}-{phase}-{stream}'
+      - 'doctor-{task}-{inspector}-{stream}'
+      - 'doctor-{task}-{installer}-{inspector}-{arch}-{stream}'
 
 - job-template:
     name: 'doctor-verify-{stream}'
-
     disabled: '{obj:disabled}'
-
     project-type: 'multijob'
-
     parameters:
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
       - 'opnfv-build-ubuntu-defaults'
-
     scm:
       - git-scm-gerrit
-
     triggers:
       - gerrit:
           server-name: 'gerrit.opnfv.org'
@@ -91,7 +76,6 @@
               file-paths:
                 - compare-type: ANT
                   pattern: 'doctor_tests/**'
-
     builders:
       - shell: |
          #!/bin/bash
           execution-type: PARALLEL
           projects:
             - name: 'doctor-verify-unit-test-{stream}'
-              current-parameters: false
               predefined-parameters: |
                 GERRIT_BRANCH=$GERRIT_BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              git-revision: true
-              node-parameters: false
               kill-phase-on: FAILURE
-              abort-all-job: false
 
 - job-template:
     name: 'doctor-verify-unit-test-{stream}'
     publishers:
       - 'doctor-verify-unit-test-publishers-macro'
 
-
 - job-template:
-    name: 'doctor-{task}-{installer}-{inspector}-{stream}'
-
+    name: 'doctor-{task}-{inspector}-{stream}'
     disabled: '{obj:disabled}'
-
     project-type: 'multijob'
+    scm:
+      - git-scm-gerrit
+    triggers:
+      - '{auto-trigger-name}':
+          project: '{project}'
+          branch: '{branch}'
+          files: 'doctor_tests/**'
+    builders:
+      - shell: |
+         #!/bin/bash
+         # we do nothing here as the main stuff will be done
+         # in phase jobs
+         echo "Triggering phase jobs!"
+      - multijob:
+          name: 'doctor-verify-apex-inspector'
+          execution-type: PARALLEL
+          projects:
+            - name: 'doctor-{task}-apex-{inspector}-x86_64-{stream}'
+              predefined-parameters: |
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+              kill-phase-on: FAILURE
+      - multijob:
+          name: 'doctor-verify-fuel-inspector'
+          execution-type: PARALLEL
+          projects:
+            - name: 'doctor-{task}-fuel-{inspector}-x86_64-{stream}'
+              predefined-parameters: |
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+              kill-phase-on: FAILURE
+      - multijob:
+          name: 'doctor-verify-fuel-inspector'
+          execution-type: PARALLEL
+          projects:
+            - name: 'doctor-{task}-fuel-{inspector}-aarch64-{stream}'
+              predefined-parameters: |
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+              kill-phase-on: FAILURE
 
-    node: '{slave-label}'
-
+- job-template:
+    name: 'doctor-{task}-{installer}-{inspector}-{arch}-{stream}'
+    disabled: '{obj:disabled}'
+    node: 'doctor-{installer}-{arch}'
+    wrappers:
+      - ssh-agent-wrapper
+      - build-timeout:
+          timeout: 30
     parameters:
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
+      - '{installer}-defaults'
+      - 'doctor-slave-parameter'
+      - 'doctor-parameter'
+      - 'doctor-functest-parameter'
+    scm:
+      - git-scm-gerrit
+    builders:
+      - 'doctor-verify-installer-inspector-builders-macro'
+    publishers:
+      - 'doctor-verify-publishers-macro'
+
+
+# -------------------------------
+# parameter macros
+# -------------------------------
+- parameter:
+    name: 'doctor-parameter'
+    parameters:
       - string:
           name: OS_CREDS
           default: /home/jenkins/openstack.creds
           description: 'OpenStack credentials'
-      - '{slave-label}-defaults'
-      - '{installer}-defaults'
       - string:
           name: DOCKER_TAG
           default: '{docker-tag}'
           name: DEPLOY_SCENARIO
           default: 'os-nosdn-nofeature-ha'
           description: 'Scenario to deploy and test'
+
+- parameter:
+    name: 'doctor-functest-parameter'
+    parameters:
       # functest-suite-parameter
       - string:
           name: FUNCTEST_MODE
       - string:
           name: TESTCASE_OPTIONS
           # yamllint disable rule:line-length
-          default: '-e INSPECTOR_TYPE={inspector} -e PYTHON_ENABLE={is-python} -v $WORKSPACE:/home/opnfv/repos/doctor'
+          default: '-e INSPECTOR_TYPE={inspector} -v $WORKSPACE:/home/opnfv/repos/doctor'
           # yamllint enable rule:line-length
           description: 'Addtional parameters specific to test case(s)'
       # functest-parameter
           name: CI_DEBUG
           default: 'true'
           description: "Show debug output information"
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - '{auto-trigger-name}':
-          project: '{project}'
-          branch: '{branch}'
-          files: 'doctor_tests/**'
-
-    builders:
-      - shell: |
-         #!/bin/bash
-         # we do nothing here as the main stuff will be done
-         # in phase jobs
-         echo "Triggering phase jobs!"
-      - multijob:
-          name: 'doctor-verify-installer-inspector'
-          execution-type: PARALLEL
-          projects:
-            - name: 'doctor-{task}-{installer}-{inspector}-build-x86_64-{stream}'
-              current-parameters: false
-              git-revision: true
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: false
-            - name: 'doctor-{task}-{installer}-{inspector}-build-aarch64-{stream}'
-              current-parameters: false
-              git-revision: true
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: false
-
-- job-template:
-    name: 'doctor-{task}-{installer}-{inspector}-{phase}-{stream}'
-    disabled: '{obj:disabled}'
-    wrappers:
-      - ssh-agent-wrapper
-      - build-timeout:
-          timeout: 30
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{slave-label}-defaults'
-    scm:
-      - git-scm-gerrit
-    builders:
-      - 'doctor-verify-installer-inspector-builders-macro'
-    publishers:
-      - 'doctor-verify-publishers-macro'
 # -------------------------------
 # builder macros
 # -------------------------------
 - builder:
     name: 'doctor-verify-unit-test-builders-macro'
     builders:
-      - shell: "[ -e tests/run.sh ] && bash -n ./tests/run.sh"
+      - shell: "tox -e pep8"
+
 - builder:
     name: 'doctor-verify-installer-inspector-builders-macro'
     builders:
       - archive:
           artifacts: 'functest_results/$FUNCTEST_SUITE_NAME.log'
       - email-jenkins-admins-on-failure
+
 - publisher:
     name: 'doctor-verify-unit-test-publishers-macro'
     publishers:
       - email-jenkins-admins-on-failure
+      - archive:
+          artifacts: '.tox/'
+
 
 #####################################
 # trigger macros
index 6efe128..5e5b6e1 100644 (file)
       dovetail-branch: '{stream}'
       gs-pathname: ''
       docker-tag: 'latest'
-    danube: &danube
-      stream: danube
+    euphrates: &euphrates
+      stream: euphrates
       branch: 'stable/{stream}'
       dovetail-branch: master
       gs-pathname: '/{stream}'
-      docker-tag: 'cvp.0.8.0'
+      docker-tag: 'latest'
 
     # ----------------------------------
     # POD, PLATFORM, AND BRANCH MAPPING
           slave-label: fuel-baremetal
           SUT: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *danube
+          <<: *euphrates
       - virtual:
           slave-label: fuel-virtual
           SUT: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *danube
+          <<: *euphrates
       # compass CI PODs
       - baremetal:
           slave-label: compass-baremetal
           slave-label: compass-baremetal
           SUT: compass
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *danube
+          <<: *euphrates
       - virtual:
           slave-label: compass-virtual
           SUT: compass
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *danube
+          <<: *euphrates
       # -------------------------------
       #    Installers not using labels
       #            CI PODs
           auto-trigger-name: 'daily-trigger-disabled'
           <<: *master
       - virtual:
-          slave-label: apex-virtual-danube
+          slave-label: apex-virtual-master
           SUT: apex
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *danube
+          <<: *euphrates
       - baremetal:
-          slave-label: apex-baremetal-danube
+          slave-label: apex-baremetal-master
           SUT: apex
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *danube
+          <<: *euphrates
       # armband CI PODs
       - armband-baremetal:
           slave-label: armband-baremetal
           slave-label: armband-baremetal
           SUT: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *danube
+          <<: *euphrates
       - armband-virtual:
           slave-label: armband-virtual
           SUT: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *danube
+          <<: *euphrates
       # -------------------------------
       #        None-CI PODs
       # -------------------------------
           slave-label: zte-pod1
           SUT: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *danube
+          <<: *euphrates
       - zte-pod3:
           slave-label: zte-pod3
           SUT: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *danube
+          <<: *euphrates
       - huawei-pod4:
           slave-label: huawei-pod4
           SUT: apex
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *danube
+          <<: *euphrates
       - huawei-pod7:
           slave-label: huawei-pod7
           SUT: compass
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *danube
+          <<: *euphrates
 
     # -------------------------------
     testsuite:
-      - 'compliance_set'
+      - 'default'
       - 'proposed_tests'
 
     jobs:
index c38ec96..1accffc 100644 (file)
       - master:
           branch: '{stream}'
           disabled: false
+      - danube:
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
 
 ################################
 # job templates
index 2cbb947..451662a 100755 (executable)
@@ -13,6 +13,9 @@
 set -e
 [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
+DEPLOY_TYPE=baremetal
+[[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
+
 DOVETAIL_HOME=${WORKSPACE}/cvp
 [ -d ${DOVETAIL_HOME} ] && sudo rm -rf ${DOVETAIL_HOME}
 
@@ -21,9 +24,12 @@ mkdir -p ${DOVETAIL_HOME}
 DOVETAIL_CONFIG=${DOVETAIL_HOME}/pre_config
 mkdir -p ${DOVETAIL_CONFIG}
 
+ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+
 sshkey=""
 # The path of openrc.sh is defined in fetch_os_creds.sh
 OPENRC=${DOVETAIL_CONFIG}/env_config.sh
+CACERT=${DOVETAIL_CONFIG}/os_cacert
 if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
     instack_mac=$(sudo virsh domiflist undercloud | grep default | \
                   grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
@@ -56,50 +62,93 @@ if [[ ${INSTALLER_TYPE} != 'joid' ]]; then
     echo "dovetail branch is $BRANCH"
     BRANCH_BACKUP=$BRANCH
     export BRANCH=$SUT_BRANCH
-    ${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} >${redirect}
+    ${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} -o ${CACERT} >${redirect}
     export BRANCH=$BRANCH_BACKUP
 fi
 
 if [[ -f $OPENRC ]]; then
     echo "INFO: openstack credentials path is $OPENRC"
-    cat $OPENRC
+    if [[ ! "${SUT_BRANCH}" =~ "danube" && ${INSTALLER_TYPE} == "compass" ]]; then
+        if [[ -f ${CACERT} ]]; then
+            echo "INFO: ${INSTALLER_TYPE} openstack cacert file is ${CACERT}"
+            echo "export OS_CACERT=${CACERT}" >> ${OPENRC}
+        else
+            echo "ERROR: Can't find ${INSTALLER_TYPE} openstack cacert file. Please check if it is existing."
+            sudo ls -al ${DOVETAIL_CONFIG}
+            exit 1
+        fi
+    fi
 else
     echo "ERROR: cannot find file $OPENRC. Please check if it is existing."
     sudo ls -al ${DOVETAIL_CONFIG}
     exit 1
 fi
 
-set +e
+if [[ ! "${SUT_BRANCH}" =~ "danube" && ${INSTALLER_TYPE} == "fuel" ]]; then
+    sed -i "s#/etc/ssl/certs/mcp_os_cacert#${CACERT}#g" ${OPENRC}
+fi
+cat $OPENRC
 
-sudo pip install virtualenv
+if [[ ! "${SUT_BRANCH}" =~ "danube" && ${INSTALLER_TYPE} == "compass" ]]; then
+    cat << EOF >${DOVETAIL_CONFIG}/pod.yaml
+nodes:
+- {ip: 10.1.0.52, name: node1, password: root, role: controller, user: root}
+- {ip: 10.1.0.51, name: node2, password: root, role: controller, user: root}
+- {ip: 10.1.0.50, name: node3, password: root, role: controller, user: root}
+- {ip: 10.1.0.54, name: node4, password: root, role: compute, user: root}
+- {ip: 10.1.0.53, name: node5, password: root, role: compute, user: root}
 
-cd ${releng_repo}/modules
-sudo virtualenv venv
-source venv/bin/activate
-sudo pip install -e ./ >/dev/null
-sudo pip install netaddr
+EOF
+fi
 
-if [[ ${INSTALLER_TYPE} == compass ]]; then
-    options="-u root -p root"
-elif [[ ${INSTALLER_TYPE} == fuel ]]; then
-    options="-u root -p r00tme"
-elif [[ ${INSTALLER_TYPE} == apex ]]; then
-    options="-u stack -k /root/.ssh/id_rsa"
-else
-    echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently."
-    echo "HA test cases may not run properly."
+if [[ ! "${SUT_BRANCH}" =~ "danube" && ${INSTALLER_TYPE} == 'fuel' && ${DEPLOY_TYPE} == 'baremetal' ]]; then
+    fuel_ctl_ssh_options="${ssh_options} -i ${SSH_KEY}"
+    ssh_user="ubuntu"
+    fuel_ctl_ip=$(ssh 2>/dev/null ${fuel_ctl_ssh_options} "${ssh_user}@${INSTALLER_IP}" \
+            "sudo salt --out yaml 'ctl*' pillar.get _param:openstack_control_address | \
+                awk '{print \$2; exit}'") &> /dev/null
+    cat << EOF >${DOVETAIL_CONFIG}/pod.yaml
+nodes:
+- {ip: ${fuel_ctl_ip}, name: node1, key_filename: /root/.ssh/id_rsa, role: controller, user: ${ssh_user}}
+
+EOF
 fi
 
-cmd="sudo python ${releng_repo}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \
-     -i ${INSTALLER_IP} ${options} -f ${DOVETAIL_CONFIG}/pod.yaml"
-echo ${cmd}
-${cmd}
+if [[ ! -f ${DOVETAIL_CONFIG}/pod.yaml ]]; then
+    set +e
+
+    sudo pip install virtualenv
+
+    cd ${releng_repo}/modules
+    sudo virtualenv venv
+    source venv/bin/activate
+    sudo pip install -e ./ >/dev/null
+    sudo pip install netaddr
+
+    if [[ ${INSTALLER_TYPE} == compass ]]; then
+        options="-u root -p root"
+    elif [[ ${INSTALLER_TYPE} == fuel ]]; then
+        options="-u root -p r00tme"
+    elif [[ ${INSTALLER_TYPE} == apex ]]; then
+        options="-u stack -k /root/.ssh/id_rsa"
+    elif [[ ${INSTALLER_TYPE} == daisy ]]; then
+        options="-u root -p r00tme"
+    else
+        echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently."
+        echo "HA test cases may not run properly."
+    fi
 
-deactivate
+    cmd="sudo python ${releng_repo}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \
+         -i ${INSTALLER_IP} ${options} -f ${DOVETAIL_CONFIG}/pod.yaml"
+    echo ${cmd}
+    ${cmd}
 
-set -e
+    deactivate
+
+    set -e
 
-cd ${WORKSPACE}
+    cd ${WORKSPACE}
+fi
 
 if [ -f ${DOVETAIL_CONFIG}/pod.yaml ]; then
     echo "file ${DOVETAIL_CONFIG}/pod.yaml:"
@@ -110,11 +159,13 @@ else
     echo "HA test cases may not run properly."
 fi
 
-ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-
 if [ "$INSTALLER_TYPE" == "fuel" ]; then
-    echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
-    sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
+    if [[ "${SUT_BRANCH}" =~ "danube" ]]; then
+        echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
+        sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
+    else
+        cp ${SSH_KEY} ${DOVETAIL_CONFIG}/id_rsa
+    fi
 fi
 
 if [ "$INSTALLER_TYPE" == "apex" ]; then
@@ -122,6 +173,12 @@ if [ "$INSTALLER_TYPE" == "apex" ]; then
     sudo scp $ssh_options stack@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
 fi
 
+if [ "$INSTALLER_TYPE" == "daisy" ]; then
+    echo "Fetching id_dsa file from jump_server $INSTALLER_IP..."
+    sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_dsa ${DOVETAIL_CONFIG}/id_rsa
+fi
+
+
 image_path=${HOME}/opnfv/dovetail/images
 if [[ ! -d ${image_path} ]]; then
     mkdir -p ${image_path}
@@ -149,20 +206,26 @@ docker_volume="-v /var/run/docker.sock:/var/run/docker.sock"
 dovetail_home_volume="-v ${DOVETAIL_HOME}:${DOVETAIL_HOME}"
 
 # Pull the image with correct tag
-echo "Dovetail: Pulling image opnfv/dovetail:${DOCKER_TAG}"
-docker pull opnfv/dovetail:$DOCKER_TAG >$redirect
+DOCKER_REPO='opnfv/dovetail'
+if [ "$(uname -m)" = 'aarch64' ]; then
+    DOCKER_REPO="${DOCKER_REPO}_$(uname -m)"
+    DOCKER_TAG="latest"
+fi
+
+echo "Dovetail: Pulling image ${DOCKER_REPO}:${DOCKER_TAG}"
+docker pull ${DOCKER_REPO}:$DOCKER_TAG >$redirect
 
 env4bgpvpn="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP}"
 
 cmd="docker run ${opts} -e DOVETAIL_HOME=${DOVETAIL_HOME} ${docker_volume} ${dovetail_home_volume} \
-     ${sshkey} ${env4bgpvpn} opnfv/dovetail:${DOCKER_TAG} /bin/bash"
+     ${sshkey} ${env4bgpvpn} ${DOCKER_REPO}:${DOCKER_TAG} /bin/bash"
 echo "Dovetail: running docker run command: ${cmd}"
 ${cmd} >${redirect}
 sleep 5
-container_id=$(docker ps | grep "opnfv/dovetail:${DOCKER_TAG}" | awk '{print $1}' | head -1)
+container_id=$(docker ps | grep "${DOCKER_REPO}:${DOCKER_TAG}" | awk '{print $1}' | head -1)
 echo "Container ID=${container_id}"
 if [ -z ${container_id} ]; then
-    echo "Cannot find opnfv/dovetail container ID ${container_id}. Please check if it is existing."
+    echo "Cannot find ${DOCKER_REPO} container ID ${container_id}. Please check if it is existing."
     docker ps -a
     exit 1
 fi
@@ -170,11 +233,23 @@ echo "Container Start: docker start ${container_id}"
 docker start ${container_id}
 sleep 5
 docker ps >${redirect}
-if [ $(docker ps | grep "opnfv/dovetail:${DOCKER_TAG}" | wc -l) == 0 ]; then
-    echo "The container opnfv/dovetail with ID=${container_id} has not been properly started. Exiting..."
+if [ $(docker ps | grep "${DOCKER_REPO}:${DOCKER_TAG}" | wc -l) == 0 ]; then
+    echo "The container ${DOCKER_REPO} with ID=${container_id} has not been properly started. Exiting..."
     exit 1
 fi
 
+if [[ ! "${SUT_BRANCH}" =~ "danube" && ${INSTALLER_TYPE} == 'fuel' && ${DEPLOY_TYPE} == 'baremetal' ]]; then
+    source_cmd="source ${OPENRC}"
+    get_public_url_cmd="openstack --insecure endpoint list --service keystone --interface public | sed -n 4p | awk '{print \$14}'"
+    public_url=$(sudo docker exec "$container_id" /bin/bash -c "${source_cmd} && ${get_public_url_cmd}")
+    sed -i 's#OS_AUTH_URL=.*#OS_AUTH_URL='"${public_url}"'#g' ${OPENRC}
+    sed -i 's/internal/public/g' ${OPENRC}
+    if [[ ${public_url} =~ 'v2' ]]; then
+        sed -i "s/OS_IDENTITY_API_VERSION=3/OS_IDENTITY_API_VERSION=2.0/g" ${OPENRC}
+    fi
+    cat ${OPENRC}
+fi
+
 # Modify tempest_conf.yaml file
 tempest_conf_file=${DOVETAIL_CONFIG}/tempest_conf.yaml
 if [[ ${INSTALLER_TYPE} == 'compass' || ${INSTALLER_TYPE} == 'apex' ]]; then
@@ -198,10 +273,13 @@ cp_tempest_cmd="docker cp ${DOVETAIL_CONFIG}/tempest_conf.yaml $container_id:/ho
 echo "exec command: ${cp_tempest_cmd}"
 $cp_tempest_cmd
 
-list_cmd="dovetail list ${TESTSUITE}"
-run_cmd="dovetail run --testsuite ${TESTSUITE} -d"
-echo "Container exec command: ${list_cmd}"
-docker exec $container_id ${list_cmd}
+if [[ ${TESTSUITE} == 'default' ]]; then
+    testsuite=''
+else
+    testsuite="--testsuite ${TESTSUITE}"
+fi
+
+run_cmd="dovetail run ${testsuite} -d"
 echo "Container exec command: ${run_cmd}"
 docker exec $container_id ${run_cmd}
 
index 3501d27..a9a0914 100644 (file)
@@ -1,64 +1,8 @@
 ---
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
 - project:
     name: dpacc
 
     project: '{name}'
 
     jobs:
-      - 'dpacc-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-- job-template:
-    name: 'dpacc-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - shell: |
-          echo "Nothing to verify!"
+      - '{project}-verify-basic'
index 5dc8a72..1cb29d0 100644 (file)
       - zte-pod1:
           slave-label: zte-pod1
           <<: *master
-      - zte-pod3:
-          slave-label: zte-pod3
-          <<: *master
-      - zte-pod1:
-          slave-label: zte-pod1
-          <<: *euphrates
-      - zte-pod3:
-          slave-label: zte-pod3
-          <<: *euphrates
-      - zte-pod1:
-          slave-label: zte-pod1
-          <<: *danube
     # -------------------------------
     #       scenarios
     # -------------------------------
@@ -94,6 +82,8 @@
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
       - 'os-onos-nofeature-noha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
+      - 'os-ovn-nofeature-noha':
+          auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
       - 'os-nosdn-kvm-noha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
       - 'os-nosdn-ovs-noha':
                     build-step-failure-threshold: 'never'
                     failure-threshold: 'never'
                     unstable-threshold: 'FAILURE'
-      # 1.dovetail only has master, based on D release
-      # 2.here the stream means the SUT stream, dovetail stream is defined in its own job
-      # 3.only debug testsuite here(refstack, ha, ipv6, bgpvpn)
-      # 4.not used for release criteria or compliance,
-      #   only to debug the dovetail tool bugs with bgpvpn and nosdn-nofeature
-      # 5.only run against scenario os-odl-bgpvpn-ha(regex used here, can extend to more scenarios future)
-      # 6.ZTE pod1, os-nosdn-nofeature-ha and os-odl-bgpvpn-ha, run against danube
-      - conditional-step:
-          condition-kind: and
-          condition-operands:
-            - condition-kind: regex-match
-              regex: os-(nosdn-nofeature|odl_l2-bgpvpn)-ha
-              label: '{scenario}'
-            - condition-kind: regex-match
-              regex: 'danube'
-              label: '{stream}'
-          steps:
-            - trigger-builds:
-                - project: 'dovetail-fuel-{pod}-proposed_tests-master'
-                  current-parameters: false
-                  predefined-parameters:
-                    DEPLOY_SCENARIO={scenario}
-                  block: true
-                  same-node: true
-                  block-thresholds:
-                    build-step-failure-threshold: 'never'
-                    failure-threshold: 'never'
-                    unstable-threshold: 'FAILURE'
+      # 1.here the stream means the SUT stream, dovetail stream is defined in its own job
+      # 2.only debug testsuite here(refstack, ha, vping, ipv6, tempest, bgpvpn)
+      # 3.not used for release criteria or compliance, only to debug the dovetail tool bugs
+      # 4.ZTE pod1, os-nosdn-nofeature-ha and os-odl-bgpvpn-ha, run against danube
+      - trigger-builds:
+          - project: 'dovetail-fuel-{pod}-proposed_tests-{stream}'
+            current-parameters: false
+            predefined-parameters:
+              DEPLOY_SCENARIO={scenario}
+            block: true
+            same-node: true
+            block-thresholds:
+              build-step-failure-threshold: 'never'
+              failure-threshold: 'never'
+              unstable-threshold: 'FAILURE'
       - conditional-step:
           condition-kind: not
           condition-operand:
     name: 'fuel-os-onos-nofeature-noha-baremetal-daily-master-trigger'
     triggers:
       - timed: ''
+- trigger:
+    name: 'fuel-os-ovn-nofeature-noha-baremetal-daily-master-trigger'
+    triggers:
+      - timed: ''
 - trigger:
     name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-master-trigger'
     triggers:
     name: 'fuel-os-onos-nofeature-noha-baremetal-daily-euphrates-trigger'
     triggers:
       - timed: ''
+- trigger:
+    name: 'fuel-os-ovn-nofeature-noha-baremetal-daily-euphrates-trigger'
+    triggers:
+      - timed: ''
 - trigger:
     name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-euphrates-trigger'
     triggers:
     name: 'fuel-os-onos-nofeature-noha-virtual-daily-master-trigger'
     triggers:
       - timed: ''  # '5 23 * * *'
+- trigger:
+    name: 'fuel-os-ovn-nofeature-noha-virtual-daily-master-trigger'
+    triggers:
+      - timed: '5 23 * * *'
 - trigger:
     name: 'fuel-os-nosdn-kvm-noha-virtual-daily-master-trigger'
     triggers:
     name: 'fuel-os-onos-nofeature-noha-virtual-daily-euphrates-trigger'
     triggers:
       - timed: ''  # '0 23 * * *'
+- trigger:
+    name: 'fuel-os-ovn-nofeature-noha-virtual-daily-euphrates-trigger'
+    triggers:
+      - timed: ''
 - trigger:
     name: 'fuel-os-nosdn-kvm-noha-virtual-daily-euphrates-trigger'
     triggers:
     name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-master-trigger'
     triggers:
       - timed: ''
+- trigger:
+    name: 'fuel-os-ovn-nofeature-noha-zte-pod1-daily-master-trigger'
+    triggers:
+      - timed: ''
 - trigger:
     name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-master-trigger'
     triggers:
     name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-master-trigger'
     triggers:
       - timed: ''
-# ----------------------------------------------
-# ZTE POD3 Triggers running against master branch
-# ----------------------------------------------
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''    # '0 10 * * *'
-- trigger:
-    name: 'fuel-os-odl-nofeature-ha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-ha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-ha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-# NOHA Scenarios
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-odl-nofeature-noha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-noha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-noha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-noha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-# ----------------------------------------------
-# ZTE POD1 Triggers running against euphrates branch
-# ----------------------------------------------
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-odl-nofeature-ha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-# NOHA Scenarios
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-odl-nofeature-noha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-noha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-# ----------------------------------------------
-# ZTE POD3 Triggers running against euphrates branch
-# ----------------------------------------------
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''  # '0 18 * * *'
-- trigger:
-    name: 'fuel-os-odl-nofeature-ha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-ha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-ha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''  # '0 2 * * *'
-- trigger:
-    name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-# NOHA Scenarios
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-odl-nofeature-noha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-noha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-noha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-noha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-euphrates-trigger'
-    triggers:
-      - timed: ''
-# -----------------------------------------------
-# ZTE POD1 Triggers running against danube branch
-# -----------------------------------------------
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-odl-nofeature-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-# NOHA Scenarios
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-odl-nofeature-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-danube-trigger'
-    triggers:
-      - timed: ''
index 3a52d1d..35bf3dc 100755 (executable)
@@ -38,7 +38,6 @@ fi
 
 # set deployment parameters
 export TMPDIR=${HOME}/tmpdir
-BRIDGE=${BRIDGE:-pxebr}
 # shellcheck disable=SC2153
 LAB_NAME=${NODE_NAME/-*}
 # shellcheck disable=SC2153
@@ -49,10 +48,6 @@ LAB_CONFIG_URL=${LAB_CONFIG_URL:-'ssh://jenkins-ericsson@gerrit.opnfv.org:29418/
 # Fuel requires deploy script to be ran with sudo, Armband does not
 SUDO='sudo -E'
 if [ "${PROJECT}" = 'fuel' ]; then
-    # Fuel does not use any POD-specific configuration for virtual deploys
-    if [[ "${NODE_NAME}" =~ "virtual" ]]; then
-        POD_NAME="virtual_kvm"
-    fi
     # Fuel currently supports ericsson, intel, lf and zte labs
     if [[ ! "${LAB_NAME}" =~ (ericsson|intel|lf|zte) ]]; then
         echo "Unsupported/unidentified lab ${LAB_NAME}. Cannot continue!"
@@ -74,21 +69,26 @@ mkdir -p "${TMPDIR}"
 chmod a+x "${HOME}" "${TMPDIR}"
 
 cd "${WORKSPACE}" || exit 1
-if [[ "${LAB_CONFIG_URL}" =~ ^(git|ssh):// ]]; then
-    echo "Cloning securedlab repo ${BRANCH}"
-    LOCAL_CFG="${TMPDIR}/securedlab"
-    rm -rf "${LOCAL_CFG}"
-    git clone --quiet --branch "${BRANCH}" "${LAB_CONFIG_URL}" "${LOCAL_CFG}"
-    LAB_CONFIG_URL="file://${LOCAL_CFG}"
+if [[ "$BRANCH" =~ (danube|euphrates) ]]; then
+    if [[ "${LAB_CONFIG_URL}" =~ ^(git|ssh):// ]]; then
+        echo "Cloning securedlab repo ${BRANCH}"
+        LOCAL_CFG="${TMPDIR}/securedlab"
+        rm -rf "${LOCAL_CFG}"
+        git clone --quiet --branch "${BRANCH}" "${LAB_CONFIG_URL}" "${LOCAL_CFG}"
+        LAB_CONFIG_ARG="-b file://${LOCAL_CFG}"
+        BRIDGE_ARG="-B ${BRIDGE:-pxebr}"
+    else
+        LAB_CONFIG_ARG="-b ${LAB_CONFIG_URL}"
+    fi
 fi
 
 # log file name
 FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
 
 # construct the command
-DEPLOY_COMMAND="${SUDO} ${WORKSPACE}/ci/deploy.sh -b ${LAB_CONFIG_URL} \
+DEPLOY_COMMAND="${SUDO} ${WORKSPACE}/ci/deploy.sh ${LAB_CONFIG_ARG:-} \
     -l ${LAB_NAME} -p ${POD_NAME} -s ${DEPLOY_SCENARIO} ${ISO_FILE_ARG:-} \
-    -B ${DEFAULT_BRIDGE:-${BRIDGE}} -S ${TMPDIR} \
+    -S ${TMPDIR} ${BRIDGE_ARG:-} \
     -L ${WORKSPACE}/${FUEL_LOG_FILENAME}"
 
 # log info to console
index 3c1ac28..432bbbb 100755 (executable)
@@ -4,10 +4,37 @@ set -e
 set +u
 set +o pipefail
 
+CI_LOOP=${CI_LOOP:-daily}
+TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results
+ENERGY_RECORDER_API_URL=http://energy.opnfv.fr/resources
+
+check_os_deployment() {
+    FUNCTEST_IMAGE=opnfv/functest-healthcheck:${DOCKER_TAG}
+    echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
+    docker pull ${FUNCTEST_IMAGE}>/dev/null
+    cmd="docker run --rm --privileged=true ${volumes} ${FUNCTEST_IMAGE} check_deployment"
+    echo "Checking deployment, CMD: ${cmd}"
+    eval ${cmd}
+    ret_value=$?
+    if [ ${ret_value} != 0 ]; then
+        echo "ERROR: Problem while checking OpenStack deployment."
+        exit 1
+    else
+        echo "OpenStack deployment OK."
+    fi
+
+}
+
+
 run_tiers() {
     tiers=$1
-    cmd_opt="prepare_env start && run_tests -r -t all"
-    [[ $BUILD_TAG =~ "suite" ]] && cmd_opt="prepare_env start && run_tests -t all"
+    if [[ ${BRANCH##*/} == "master" ]]; then
+        cmd_opt="run_tests -r -t all"
+        [[ $BUILD_TAG =~ "suite" ]] && cmd_opt="run_tests -t all"
+    else
+        cmd_opt="prepare_env start && run_tests -r -t all"
+        [[ $BUILD_TAG =~ "suite" ]] && cmd_opt="prepare_env start && run_tests -t all"
+    fi
     ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
     echo 0 > ${ret_val_file}
 
@@ -15,7 +42,7 @@ run_tiers() {
         FUNCTEST_IMAGE=opnfv/functest-${tier}:${DOCKER_TAG}
         echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
         docker pull ${FUNCTEST_IMAGE}>/dev/null
-        cmd="docker run --privileged=true ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
+        cmd="docker run --rm  --privileged=true ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
         echo "Running Functest tier '${tier}'. CMD: ${cmd}"
         eval ${cmd}
         ret_value=$?
@@ -31,21 +58,24 @@ run_tiers() {
 
 run_test() {
     test_name=$1
-    cmd_opt="prepare_env start && run_tests -t ${test_name}"
-    [[ $BUILD_TAG =~ "suite" ]] && cmd_opt="prepare_env start && run_tests -t ${test_name}"
+    if [[ ${BRANCH##*/} == "master" ]]; then
+        cmd_opt="run_tests -t ${test_name}"
+    else
+        cmd_opt="prepare_env start && run_tests -t ${test_name}"
+    fi
     ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
     echo 0 > ${ret_val_file}
     # Determine which Functest image should be used for the test case
     case ${test_name} in
         connection_check|api_check|snaps_health_check)
             FUNCTEST_IMAGE=opnfv/functest-healthcheck:${DOCKER_TAG} ;;
-        vping_ssh|vping_userdata|tempest_smoke_serial|rally_sanity|refstack_defcore|odl|odl_netvirt|fds|snaps_smoke)
+        vping_ssh|vping_userdata|tempest_smoke_serial|rally_sanity|refstack_defcore|odl|odl_netvirt|snaps_smoke)
             FUNCTEST_IMAGE=opnfv/functest-smoke:${DOCKER_TAG} ;;
-        tempest_full_parallel|tempest_custom|rally_full)
+        tempest_full_parallel|rally_full)
             FUNCTEST_IMAGE=opnfv/functest-components:${DOCKER_TAG} ;;
         cloudify_ims|orchestra_openims|orchestra_clearwaterims|vyos_vrouter)
             FUNCTEST_IMAGE=opnfv/functest-vnf:${DOCKER_TAG} ;;
-        promise|doctor-notification|bgpvpn|functest-odl-sfc|domino-multinode|barometercollectd)
+        promise|doctor-notification|bgpvpn|functest-odl-sfc|domino-multinode|barometercollectd|fds)
             FUNCTEST_IMAGE=opnfv/functest-features:${DOCKER_TAG} ;;
         parser-basics)
             FUNCTEST_IMAGE=opnfv/functest-parser:${DOCKER_TAG} ;;
@@ -56,7 +86,7 @@ run_test() {
     esac
     echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
     docker pull ${FUNCTEST_IMAGE}>/dev/null
-    cmd="docker run --privileged=true ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
+    cmd="docker run --rm --privileged=true ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
     echo "Running Functest test case '${test_name}'. CMD: ${cmd}"
     eval ${cmd}
     ret_value=$?
@@ -84,8 +114,12 @@ elif [[ ${INSTALLER_TYPE} == 'compass' ]]; then
 elif [[ ${INSTALLER_TYPE} == 'fuel' && ${DEPLOY_TYPE} == 'baremetal' ]]; then
     cacert_file_vol="-v ${HOME}/os_cacert:/etc/ssl/certs/mcp_os_cacert"
 fi
-rc_file_vol="-v ${rc_file}:${FUNCTEST_DIR}/conf/openstack.creds"
 
+if [[ ${BRANCH} == "stable/euphrates" ]]; then
+    rc_file_vol="-v ${rc_file}:${FUNCTEST_DIR}/conf/openstack.creds"
+else
+    rc_file_vol="-v ${rc_file}:${FUNCTEST_DIR}/conf/env_file"
+fi
 
 # Set iptables rule to allow forwarding return traffic for container
 if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
@@ -115,7 +149,8 @@ test -f ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG} && custom_params=$(ca
 
 envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
     -e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
-    -e BUILD_TAG=${BUILD_TAG} -e DEPLOY_TYPE=${DEPLOY_TYPE}"
+    -e BUILD_TAG=${BUILD_TAG} -e DEPLOY_TYPE=${DEPLOY_TYPE} -e CI_LOOP=${CI_LOOP} \
+    -e TEST_DB_URL=${TEST_DB_URL} -e ENERGY_RECORDER_API_URL=${ENERGY_RECORDER_API_URL}"
 
 ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
 
@@ -132,6 +167,7 @@ set +e
 
 
 if [[ ${DEPLOY_SCENARIO} =~ ^os-.* ]]; then
+    [[ ${BRANCH##*/} == "master" ]] && check_os_deployment
     if [ ${FUNCTEST_MODE} == 'testcase' ]; then
         echo "FUNCTEST_MODE=testcase, FUNCTEST_SUITE_NAME=${FUNCTEST_SUITE_NAME}"
         run_test ${FUNCTEST_SUITE_NAME}
index fc277b9..c21b543 100755 (executable)
@@ -3,11 +3,7 @@
 [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
 echo "Cleaning up docker containers/images..."
-HOST_ARCH=$(uname -m)
 FUNCTEST_IMAGE=opnfv/functest
-if [ "$HOST_ARCH" = "aarch64" ]; then
-    FUNCTEST_IMAGE="${FUNCTEST_IMAGE}_${HOST_ARCH}"
-fi
 
 # Remove containers along with image opnfv/functest*:<none>
 dangling_images=($(docker images -f "dangling=true" | grep $FUNCTEST_IMAGE | awk '{print $3}'))
index 2d5d397..099d01a 100644 (file)
           slave-label: '{pod}'
           installer: daisy
           <<: *euphrates
+      - zte-pod9:
+          slave-label: '{pod}'
+          installer: daisy
+          <<: *master
+    # -------------------------------
     # PODs for verify jobs triggered by each patch upload
     #   - ool-virtual1:
     #       slave-label: '{pod}'
       - 'suite':
           job-timeout: 60
       - 'daily':
-          job-timeout: 240
+          job-timeout: 300
       - 'arm-daily':
-          job-timeout: 240
+          job-timeout: 300
 
     jobs:
       - 'functest-{installer}-{pod}-{testsuite}-{stream}'
     name: functest-daily
     builders:
       # yamllint disable rule:indentation
-      - shell:
-          !include-raw:
-              - ./functest-env-presetup.sh
-              - ../../utils/fetch_os_creds.sh
-              - ./functest-alpine.sh
+      - conditional-step:
+          condition-kind: regex-match
+          regex: "os-.*"
+          label: '$DEPLOY_SCENARIO'
+          steps:
+            - shell:
+                !include-raw:
+                    - ./functest-env-presetup.sh
+                    - ../../utils/fetch_os_creds.sh
+                    - ./functest-alpine.sh
+      - conditional-step:
+          condition-kind: regex-match
+          regex: "k8-.*"
+          label: '$DEPLOY_SCENARIO'
+          steps:
+            - shell:
+                !include-raw:
+                    - ../../utils/fetch_k8_conf.sh
+                    - ./functest-k8.sh
 
 # yamllint enable rule:indentation
 - builder:
index cd15d71..b7d33e7 100755 (executable)
@@ -38,7 +38,7 @@ elif [[ ${INSTALLER_TYPE} == 'daisy' ]]; then
 
         installer_mac=$(sudo virsh domiflist daisy | grep vnet | \
                       grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
-        export INSTALLER_IP=$(/usr/sbin/arp -e -i $bridge_name | grep ${installer_mac} | awk {'print $1'})
+        export INSTALLER_IP=$(/usr/sbin/arp -e -i $bridge_name | grep ${installer_mac} | head -n 1 | awk {'print $1'})
 
         echo "Installer ip is ${INSTALLER_IP}"
     else
diff --git a/jjb/functest/functest-k8.sh b/jjb/functest/functest-k8.sh
new file mode 100755 (executable)
index 0000000..6df5c53
--- /dev/null
@@ -0,0 +1,55 @@
+#!/bin/bash
+
+set -e
+set +u
+set +o pipefail
+
+[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+FUNCTEST_DIR=/home/opnfv/functest
+
+rc_file=${HOME}/k8.creds
+sudo rm -rf $rc_file
+
+if [[ ${INSTALLER_TYPE} == 'compass' ]]; then
+    admin_conf_file_vol = "-v ${HOME}/admin.conf:/root/.kube/config"
+    echo "export KUBECONFIG=/root/.kube/config" >> $rc_file
+    echo "export KUBERNETES_PROVIDER=local" >> $rc_file
+    KUBE_MASTER_URL = $(cat ${HOME}/admin.conf|grep server| awk '{print $2}')
+    echo "export KUBE_MASTER_URL=$KUBE_MASTER_URL" >> $rc_file
+    KUBE_MASTER_IP = $(echo $KUBE_MASTER_URL|awk -F'https://|:[0-9]+' '$0=$2')
+    echo "export KUBE_MASTER_IP=$KUBE_MASTER_IP" >> $rc_file
+else
+    echo "Not supported by other installers yet"
+    exit 1
+fi
+
+rc_file_vol="-v ${rc_file}:${FUNCTEST_DIR}/conf/env_file"
+
+dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
+mkdir -p ${dir_result}
+sudo rm -rf ${dir_result}/*
+results_vol="-v ${dir_result}:${FUNCTEST_DIR}/results"
+
+volumes="${rc_file_vol} ${results_vol} ${admin_conf_file_vol}"
+
+# Set iptables rule to allow forwarding return traffic for container
+if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
+    sudo iptables -I FORWARD -j RETURN
+fi
+
+envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} \
+    -e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
+    -e BUILD_TAG=${BUILD_TAG} -e DEPLOY_TYPE=${DEPLOY_TYPE}"
+
+DOCKER_TAG=`[[ ${BRANCH##*/} == "master" ]] && echo "latest" || echo ${BRANCH##*/}`
+
+FUNCTEST_IMAGE=opnfv/functest-kubernetes:${DOCKER_TAG}
+docker pull ${FUNCTEST_IMAGE}>/dev/null
+cmd_opt="run_tests -r -t all"
+cmd="docker run --rm --privileged=true ${volumes} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
+echo "Running Functest k8s test cases, CMD: ${cmd}"
+eval ${cmd}
+ret_value=$?
+if [ ${ret_value} != 0 ]; then
+  echo ${ret_value} > ${ret_val_file}
+fi
diff --git a/jjb/global/basic-jobs.yml b/jjb/global/basic-jobs.yml
new file mode 100644 (file)
index 0000000..e55f068
--- /dev/null
@@ -0,0 +1,46 @@
+---
+##
+# Basic Job Config
+#
+# This is used for project which don't have any jobs of substance
+# defined yet, but still need 'Verified+1'.
+##
+- job-group:
+    name: '{project}-verify-basic'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          gs-pathname: ''
+          disabled: false
+      - euphrates:
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project}-verify-{stream}'
+
+- job-template:
+    name: '{project}-verify-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - 'opnfv-build-defaults'
+
+    scm:
+      - git-scm-gerrit
+
+    triggers:
+      - gerrit-trigger-patchset-created:
+          project: '{project}'
+          branch: '{branch}'
+          files: 'docs/**|.gitignore'
+
+    builders:
+      - shell: |
+          echo "Nothing to verify!"
index 916db80..683ef78 100644 (file)
           name: EXTERNAL_NETWORK
           default: 'floating_net'
           description: 'external network for test'
-      - string:
-          name: BRIDGE
-          default: 'pxebr'
-          description: 'Bridge(s) to be used by salt master'
       - string:
           name: GS_URL
           default: '$GS_BASE{gs-pathname}'
@@ -80,8 +76,8 @@
           description: 'Model to deploy (os|k8)'
       - string:
           name: OS_RELEASE
-          default: 'ocata'
-          description: 'OpenStack release (mitaka|newton|ocata)'
+          default: 'pike'
+          description: 'OpenStack release (mitaka|ocata|pike)'
       - string:
           name: EXTERNAL_NETWORK
           default: ext-net
diff --git a/jjb/global/installer-report.sh b/jjb/global/installer-report.sh
new file mode 100755 (executable)
index 0000000..e2fcfd6
--- /dev/null
@@ -0,0 +1,25 @@
+#!/bin/bash
+##############################################################################
+# Copyright (c) 2017 ZTE Corporation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+source $WORKSPACE/installer_track.sh
+echo """
+    INSTALLER: $INSTALLER
+    INSTALLER_VERSION: $INSTALLER_VERSION
+    JOB_NAME: $JOB_NAME
+    BUILD_ID: $BUILD_ID
+    SENARIO: $DEPLOY_SCENARIO
+    UPSTREAM_JOB_NAME: $UPSTREAM_JOB_NAME:
+    UPSTREAM_BUILD_ID: $UPSTREAM_BUILD_ID
+    PROVISION_RESULT: $PROVISION_RESULT
+    TIMESTAMP_START: $TIMESTAMP_START
+    TIMESTAMP_END: `date '+%Y-%m-%d %H:%M:%S.%3N'`
+    POD_NAME: $NODE_NAME
+"""
+
+# TODO call TestAPI to report installer provisoin result when API is ready
index 0876694..a7d947f 100644 (file)
           name: GERRIT_BRANCH
           default: '{branch}'
           description: "JJB configured GERRIT_BRANCH parameter (deprecated)"
+      - string:
+          name: GERRIT_REFSPEC
+          default: 'refs/heads/{branch}'
+          description: "Default refspec needed for manually triggering."
 
 - property:
     name: logrotate-default
           submodule:
             recursive: true
             timeout: 20
+
+- scm:
+    name: git-scm-openstack
+    scm:
+      - git: &git-scm-openstack-defaults
+          url: '$GIT_BASE'
+          branches:
+            - 'origin/$BRANCH'
+          timeout: 15
+
 - trigger:
     name: 'daily-trigger-disabled'
     triggers:
       - gerrit:
           server-name: 'gerrit.opnfv.org'
           trigger-on:
-            - ref-updated
+            - ref-updated-event
           projects:
             - project-compare-type: 'ANT'
               project-pattern: '{project}'
           fi
 
 - builder:
-    name: lint-python-code
+    name: upload-review-docs
+    builders:
+      - upload-under-review-docs-to-opnfv-artifacts
+      - report-build-result-to-gerrit
+
+- builder:
+    name: lint-init
     builders:
       - shell: |
           #!/bin/bash
-          set -o errexit
-          set -o pipefail
-          set -o xtrace
-          export PATH=$PATH:/usr/local/bin/
-
-          virtualenv -p python2.7 $WORKSPACE/releng_flake8
-          source $WORKSPACE/releng_flake8/bin/activate
-
-          # install python packages
-          pip install "flake8==2.6.2"
-
-          # generate and upload lint log
-          echo "Running flake8 code on $PROJECT ..."
-
-          # Get number of flake8 violations. If none, this will be an
-          # empty string: ""
-          FLAKE_COUNT="$(find . \
-              -path './releng_flake8' -prune -o \
-              -path './.tox' -prune -o \
-              -type f -name "*.py" -print | \
-              xargs flake8 --exit-zero -qq --count 2>&1)"
-
           # Ensure we start with a clean environment
-          rm -f lint.log
-
-          if [ ! -z $FLAKE_COUNT ]; then
-            echo "Flake8 Violations: $FLAKE_COUNT" > lint.log
-            find . \
-                -path './releng_flake8' -prune -o \
-                -path './.tox' -prune -o \
-                -type f -name "*.py" -print | \
-                xargs flake8 --exit-zero --first >> violation.log
-            SHOWN=$(wc -l violation.log | cut -d' ' -f1)
-            echo -e "First $SHOWN shown\n---" >> lint.log
-            cat violation.log >> lint.log
-            sed -r -i '4,$s/^/ /g' lint.log
-            rm violation.log
-          fi
-
-          deactivate
+          rm -f bash-violation.log python-violation.log yaml-violation.log violation.log
+          git --no-pager diff --diff-filter=MCRAT --name-only HEAD^1 > modified_files
 
 - builder:
-    name: report-lint-result-to-gerrit
+    name: lint-report
     builders:
       - shell: |
           #!/bin/bash
-          set -o errexit
-          set -o pipefail
-          set -o xtrace
-          export PATH=$PATH:/usr/local/bin/
-
-          # If no violations were found, no lint log will exist.
-          if [[ -e lint.log ]] ; then
-              echo -e "\nposting linting report to gerrit...\n"
-
-              cat lint.log
-              echo
-
-              ssh -p 29418 gerrit.opnfv.org \
-                  "gerrit review -p $GERRIT_PROJECT \
-                   -m \"$(cat lint.log)\" \
-                   $GERRIT_PATCHSET_REVISION \
-                   --notify NONE"
-
+          if [[ -s violation.log ]]; then
+              echo "Reporting lint result..."
+              msg="Found syntax error and/or coding style violation(s) in the files modified by your patchset."
+              sed -i -e "1s#^#${msg}\n\n#" violation.log
+              cmd="gerrit review -p $GERRIT_PROJECT -m \"$(cat violation.log)\" $GERRIT_PATCHSET_REVISION --notify NONE"
+              ssh -p 29418 gerrit.opnfv.org "$cmd"
+
+              # Make sure the caller job failed
               exit 1
           fi
 
 - builder:
-    name: upload-review-docs
+    name: lint-bash-code
     builders:
-      - upload-under-review-docs-to-opnfv-artifacts
-      - report-build-result-to-gerrit
+      - shell: |
+          #!/bin/bash
+          echo "Checking bash code..."
+          for f in $(egrep '\.sh$' modified_files)
+          do
+              bash -n "$f" 2>> bash-violation.log
+          done
+          if [[ -s bash-violation.log ]]; then
+              echo -e "Bash syntax error(s)\n---" >> violation.log
+              sed -e 's/^/ /g' bash-violation.log >> violation.log
+          fi
 
 - builder:
-    name: check-bash-syntax
+    name: lint-python-code
     builders:
-      - shell: "find . -name '*.sh' | xargs bash -n"
+      - shell: |
+          #!/bin/bash
+          # Install python package
+          sudo pip install "flake8==2.6.2"
+
+          echo "Checking python code..."
+          for f in $(egrep '\.py$' modified_files)
+          do
+              flake8 "$f" >> python-violation.log
+          done
+          if [[ -s python-violation.log ]]; then
+              echo -e "Python violation(s)\n---" >> violation.log
+              sed -e 's/^/ /g' python-violation.log >> violation.log
+          fi
 
 - builder:
     name: lint-yaml-code
     builders:
       - shell: |
           #!/bin/bash
-          set -o errexit
-          set -o pipefail
-          set -o xtrace
-          export PATH=$PATH:/usr/local/bin/
-
-          # install python packages
+          # sudo Install python packages
           sudo pip install "yamllint==1.8.2"
 
-          # generate and upload lint log
-          echo "Running yaml code on $PROJECT ..."
-
-          # Get list of yaml files
-          YAML_FILES=$(git --no-pager diff --diff-filter=MCRAT --name-only HEAD^1 | egrep "ya?ml$") || true
-
-          #If YAML_FILES is none exit with 0
-          if [ -z "$YAML_FILES" ]; then
-              exit 0
-          fi
-
-          # Ensure we start with a clean environment
-          rm -f yaml-violation.log lint.log
-
-          # Yamllint files only in patchset
-          for yamlfile in $YAML_FILES; do
-            yamllint $yamlfile >> yaml-violation.log || true
+          echo "Checking yaml file..."
+          for f in $(egrep '\.ya?ml$' modified_files)
+          do
+              yamllint "$f" >> yaml-violation.log
           done
-
-          if [ -s "yaml-violation.log" ]; then
-            SHOWN=$(grep -c -v "^$" yaml-violation.log)
-            echo -e "First $SHOWN shown\n---" > lint.log
-            cat yaml-violation.log >> lint.log
-            sed -r -i '4,$s/^/ /g' lint.log
+          if [[ -s yaml-violation.log ]]; then
+              echo -e "YAML violation(s)\n---" >> violation.log
+              sed -e 's/^/ /g' yaml-violation.log >> violation.log
           fi
 
+- builder:
+    name: lint-all-code
+    builders:
+      - lint-init
+      - lint-bash-code
+      - lint-python-code
+      - lint-yaml-code
+      - lint-report
+
 - builder:
     name: clean-workspace
     builders:
       - shell: |
           find $WORKSPACE -type f -name '*.log' | xargs rm -f
 
+- builder:
+    name: track-begin-timestamp
+    builders:
+      - shell: |
+          echo "export TIMESTAMP_START="\'`date '+%Y-%m-%d %H:%M:%S.%3N'`\' > $WORKSPACE/installer_track.sh
+
 - publisher:
     name: archive-artifacts
     publishers:
           failure: true
           send-to:
             - recipients
+
+# Email PTL publishers
+- email_ptl_defaults: &email_ptl_defaults
+    name: 'email_ptl_defaults'
+    content-type: text
+    attach-build-log: true
+    attachments: '*.log'
+    compress-log: true
+    always: true
+    subject: '{subject}'
+
+- publisher: &email_apex_ptl_defaults
+    name: 'email-apex-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            trozet@redhat.com
+- publisher:
+    name: 'email-apex-os-net-config-ptl'
+    <<: *email_apex_ptl_defaults
+- publisher:
+    name: 'email-apex-puppet-tripleo-ptl'
+    <<: *email_apex_ptl_defaults
+- publisher:
+    name: 'email-apex-tripleo-heat-templates-ptl'
+    <<: *email_apex_ptl_defaults
+
+- publisher:
+    name: 'email-armband-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            bob.monkman@arm.com
+
+- publisher:
+    name: 'email-auto-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            tina.tsou@arm.com
+
+- publisher:
+    name: 'email-availability-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            fuqiao@chinamobile.com
+
+- publisher:
+    name: 'email-bamboo-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            donaldh@cisco.com
+
+- publisher:
+    name: 'email-barometer-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            aasmith@redhat.com
+
+- publisher:
+    name: 'email-bottlenecks-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            gabriel.yuyang@huawei.com
+
+- publisher:
+    name: 'email-calipso-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            korlev@cisco.com
+
+- publisher:
+    name: 'email-clover-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            stephen.wong1@huawei.com
+
+- publisher: &email_compass4nfv_ptl_defaults
+    name: 'email-compass4nfv-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            chigang@huawei.com
+- publisher:
+    name: 'email-compass-containers-ptl'
+    <<: *email_compass4nfv_ptl_defaults
+
+- publisher:
+    name: 'email-conductor-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            limingjiang@huawei.com
+
+- publisher:
+    name: 'email-container4nfv-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            jiaxuan@chinamobile.com
+
+- publisher:
+    name: 'email-copper-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            aimeeu.opensource@gmail.com
+
+- publisher:
+    name: 'email-cperf-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            matt.welch@intel.com
+
+- publisher:
+    name: 'email-daisy-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            hu.zhijiang@zte.com.cn
+
+- publisher:
+    name: 'email-doctor-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            r-mibu@cq.jp.nec.com
+
+- publisher:
+    name: 'email-domino-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            ulas.kozat@huawei.com
+
+- publisher:
+    name: 'email-dovetail-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            hongbo.tianhongbo@huawei.com
+
+- publisher:
+    name: 'email-dpacc-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            denglingli@chinamobile.com
+
+- publisher:
+    name: 'email-enfv-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            JBuchanan@advaoptical.com
+
+- publisher:
+    name: 'email-escalator-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            kong.wei2@zte.com.cn
+
+- publisher:
+    name: 'email-fastpathmetrics-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            maryam.tahhan@intel.com
+
+- publisher:
+    name: 'email-fds-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            fbrockne@cisco.com
+
+- publisher:
+    name: 'email-fuel-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            gelkinbard@mirantis.com
+
+- publisher:
+    name: 'email-functest-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            cedric.ollivier@orange.com
+
+- publisher:
+    name: 'email-ipv6-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            bh526r@att.com
+
+- publisher:
+    name: 'email-joid-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            artur.tyloch@canonical.com
+
+- publisher:
+    name: 'email-kvmfornfv-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            raghuveer.reddy@intel.com
+
+- publisher:
+    name: 'email-models-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            bs3131@att.com
+
+- publisher:
+    name: 'email-moon-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            ruan.he@orange.com
+
+- publisher:
+    name: 'email-multisite-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            joehuang@huawei.com
+
+- publisher:
+    name: 'email-netready-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            georg.kunz@ericsson.com
+
+- publisher:
+    name: 'email-nfvbench-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            ahothan@cisco.com
+
+- publisher:
+    name: 'email-octopus-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            ulrich.kleber@huawei.com
+
+- publisher:
+    name: 'email-onosfw-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            su.wei@huawei.com
+
+- publisher:
+    name: 'email-openretriever-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            jiaxuan@chinamobile.com
+
+- publisher:
+    name: 'email-opera-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            Yingjun.li@huawei.com
+
+- publisher:
+    name: 'email-opnfvdocs-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            sofia.wallin@ericsson.com
+
+- publisher:
+    name: 'email-orchestra-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            giuseppe.carella@fokus.fraunhofer.de
+
+- publisher:
+    name: 'email-ovn4nfv-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            trinath.somanchi@gmail.com
+
+- publisher:
+    name: 'email-ovno-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            wsmackie@juniper.net
+
+- publisher:
+    name: 'email-ovsnfv-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            MarkD.Graymark.d.gray@intel.com
+
+- publisher:
+    name: 'email-parser-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            shang.xiaodong@zte.com.cn
+
+- publisher: &email_pharos_ptl_defaults
+    name: 'email-pharos-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            jack.morgan@intel.com
+- publisher:
+    name: 'email-pharos-tools-ptl'
+    <<: *email_pharos_ptl_defaults
+
+- publisher:
+    name: 'email-promise-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            kunzmann@docomolab-euro.com
+
+- publisher:
+    name: 'email-qtip-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            wu.zhihui1@zte.com.cn
+
+- publisher: &email_releng_ptl_defaults
+    name: 'email-releng-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            fatih.degirmenci@ericsson.com
+- publisher:
+    name: 'email-releng-anteater-ptl'
+    <<: *email_releng_ptl_defaults
+- publisher:
+    name: 'email-releng-testresults-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            fatih.degirmenci@ericsson.com
+            feng.xiaowei@zte.com.cn
+- publisher:
+    name: 'email-releng-utils-ptl'
+    <<: *email_releng_ptl_defaults
+- publisher:
+    name: 'email-releng-xci-ptl'
+    <<: *email_releng_ptl_defaults
+
+- publisher:
+    name: 'email-samplevnf-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            deepak.s@intel.com
+
+- publisher:
+    name: 'email-sdnvpn-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            tim.irnich@ericsson.com
+
+- publisher:
+    name: 'email-securityscanning-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            lhinds@redhat.com
+
+- publisher:
+    name: 'email-sfc-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            ManuelBuilmbuil@suse.com
+
+- publisher:
+    name: 'email-snaps-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            s.pisarski@cablelabs.com
+
+- publisher:
+    name: 'email-stor4nfv-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            shane.wang@intel.com
+
+- publisher:
+    name: 'email-storperf-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            mark.beierl@emc.com
+
+- publisher:
+    name: 'email-ves-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            bryan.sullivan@att.com
+
+- publisher:
+    name: 'email-vswitchperf-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            sridhar.rao@spirent.com
+
+- publisher:
+    name: 'email-yardstick-ptl'
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            ross.b.brattain@intel.com
+
+- publisher:
+    name: 'report-provision-result'
+    publishers:
+      - postbuildscript:
+          script-only-if-succeeded: true
+          builders:
+            - shell: |
+                echo "export PROVISION_RESULT=SUCCEED" >> $WORKSPACE/installer_track.sh
+                echo "export INSTALLER={installer}" >> $WORKSPACE/installer_track.sh
+            - shell:
+                !include-raw-escape: installer-report.sh
+      - postbuildscript:
+          script-only-if-failed: true
+          builders:
+            - shell: |
+                echo "export PROVISION_RESULT=FAIL" >> $WORKSPACE/installer_track.sh
+                echo "export INSTALLER={installer}" >> $WORKSPACE/installer_track.sh
+            - shell:
+                !include-raw-escape: installer-report.sh
index 04de1e0..86b369b 100644 (file)
           name: SSH_KEY
           default: /root/.ssh/id_rsa
           description: 'SSH key to use for Apex'
-      - node:
-          name: SLAVE_NAME
-          description: 'Slave name on Jenkins'
-          allowed-slaves:
-            - lf-pod1
-          default-slaves:
-            - lf-pod1
 
 - parameter:
     name: 'apex-baremetal-euphrates-defaults'
           name: SSH_KEY
           default: /root/.ssh/id_rsa
           description: 'SSH key to use for Apex'
-      - node:
-          name: SLAVE_NAME
-          description: 'Slave name on Jenkins'
-          allowed-slaves:
-            - lf-pod1
-          default-slaves:
-            - lf-pod1
 
 - parameter:
     name: 'apex-baremetal-danube-defaults'
           name: SSH_KEY
           default: /root/.ssh/id_rsa
           description: 'SSH key to use for Apex'
-      - node:
-          name: SLAVE_NAME
-          description: 'Slave name on Jenkins'
-          allowed-slaves:
-            - lf-pod1
-          default-slaves:
-            - lf-pod1
+
 
 - parameter:
     name: 'apex-virtual-master-defaults'
           name: SSH_KEY
           default: /root/.ssh/id_rsa
           description: 'SSH key to use for Apex'
-      - node:
-          name: SLAVE_NAME
-          description: 'Slave name on Jenkins'
-          allowed-slaves:
-            - lf-virtual2
-            - lf-virtual3
-          default-slaves:
-            - lf-virtual2
-            - lf-virtual3
 
 - parameter:
     name: 'apex-virtual-euphrates-defaults'
           name: SSH_KEY
           default: /root/.ssh/id_rsa
           description: 'SSH key to use for Apex'
-      - node:
-          name: SLAVE_NAME
-          description: 'Slave name on Jenkins'
-          allowed-slaves:
-            - lf-virtual2
-            - lf-virtual3
-          default-slaves:
-            - lf-virtual2
-            - lf-virtual3
 
 - parameter:
     name: 'apex-virtual-danube-defaults'
           name: SSH_KEY
           default: /root/.ssh/id_rsa
           description: 'SSH key to use for Apex'
+
+- parameter:
+    name: 'lf-pod1-defaults'
+    parameters:
+      - node:
+          name: SLAVE_NAME
+          description: 'Slave name on Jenkins'
+          allowed-slaves:
+            - lf-pod1
+          default-slaves:
+            - lf-pod1
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+      - string:
+          name: SSH_KEY
+          default: /root/.ssh/id_rsa
+          description: 'SSH key to use for Apex'
+
+- parameter:
+    name: 'lf-pod3-defaults'
+    parameters:
       - node:
           name: SLAVE_NAME
           description: 'Slave name on Jenkins'
             - lf-pod3
           default-slaves:
             - lf-pod3
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+      - string:
+          name: SSH_KEY
+          default: /root/.ssh/id_rsa
+          description: 'SSH key to use for Apex'
 
 - parameter:
-    name: 'lf-pod1-defaults'
+    name: 'lf-pod4-defaults'
     parameters:
       - node:
           name: SLAVE_NAME
           description: 'Slave name on Jenkins'
           allowed-slaves:
-            - lf-pod1
+            - lf-pod4
           default-slaves:
-            - lf-pod1
+            - lf-pod4
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           description: 'SSH key to use for Apex'
 
 - parameter:
-    name: 'lf-pod3-defaults'
+    name: 'lf-pod5-defaults'
     parameters:
       - node:
           name: SLAVE_NAME
           description: 'Slave name on Jenkins'
           allowed-slaves:
-            - lf-pod3
+            - lf-pod5
           default-slaves:
-            - lf-pod3
+            - lf-pod5
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           default: $WORKSPACE/build_output
           description: "Directory where the build artifact will be located upon the completion of the build."
 
+- parameter:
+    name: 'pharos-dashboard-defaults'
+    parameters:
+      - label:
+          name: SLAVE_LABEL
+          default: 'pharos-dashboard'
+          description: 'Slave label on Jenkins'
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+      - string:
+          name: BUILD_DIRECTORY
+          default: $WORKSPACE/build_output
+          description: "Directory where the build artifact will be located upon the completion of the build."
+
 - parameter:
     name: 'opnfv-build-defaults'
     parameters:
           default: $WORKSPACE/build_output
           description: "Directory where the build artifact will be located upon the completion of the build."
 
+- parameter:
+    name: 'ericsson-build3-defaults'
+    parameters:
+      - label:
+          name: SLAVE_LABEL
+          default: 'ericsson-build3'
+          description: 'Slave label on Jenkins'
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+
 - parameter:
     name: 'huawei-build-defaults'
     parameters:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
 
+- parameter:
+    name: 'intel-pod17-defaults'
+    parameters:
+      - node:
+          name: SLAVE_NAME
+          description: 'Slave name on Jenkins'
+          allowed-slaves:
+            - intel-pod17
+          default-slaves:
+            - intel-pod17
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+
 - parameter:
     name: 'huawei-virtual5-defaults'
     parameters:
           default: 'br0'
           description: 'pxe bridge for booting of Daisy master'
 
+- parameter:
+    name: 'zte-pod9-defaults'
+    parameters:
+      - node:
+          name: SLAVE_NAME
+          description: 'Slave name on Jenkins'
+          allowed-slaves:
+            - zte-pod9
+          default-slaves:
+            - zte-pod9
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+      - string:
+          name: INSTALLER_IP
+          default: '10.20.7.2'
+          description: 'IP of the installer'
+      - string:
+          name: BRIDGE
+          default: 'br0'
+          description: 'pxe bridge for booting of Daisy master'
+
 - parameter:
     name: zte-virtual5-defaults
     parameters:
           description: 'Git URL to use on this Jenkins Slave'
 
 - parameter:
-    name: 'doctor-defaults'
+    name: 'doctor-slave-parameter'
     parameters:
       - node:
           name: SLAVE_NAME
           description: 'Slave name on Jenkins'
           allowed-slaves:
-            - '{default-slave}'
+            - 'doctor-apex-x86_64'
+            - 'doctor-fuel-x86_64'
+            - 'doctor-fuel-aarch64'
+            - 'doctor-joid-x86_64'
+            - 'not-specified'
           default-slaves:
-            - '{default-slave}'
+            - 'not-specified'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           default: /root/.ssh/id_rsa
           description: 'SSH key to be used'
 
-- parameter:
-    name: 'doctor-apex-verify-defaults'
-    parameters:
-      - 'doctor-defaults':
-          default-slave: 'doctor-apex-verify'
-
-- parameter:
-    name: 'doctor-fuel-verify-defaults'
-    parameters:
-      - 'doctor-defaults':
-          default-slave: 'doctor-fuel-verify'
-
-- parameter:
-    name: 'doctor-joid-verify-defaults'
-    parameters:
-      - 'doctor-defaults':
-          default-slave: 'doctor-joid-verify'
-
 - parameter:
     name: 'multisite-virtual-defaults'
     parameters:
index f51c497..2946ec7 100644 (file)
@@ -1,64 +1,8 @@
 ---
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
 - project:
     name: ipv6
 
     project: '{name}'
 
     jobs:
-      - 'ipv6-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-- job-template:
-    name: 'ipv6-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - shell: |
-          echo "Nothing to verify!"
+      - '{project}-verify-basic'
index 569bff2..947b2f1 100644 (file)
       branch: 'stable/{stream}'
       disabled: false
       gs-pathname: '/{stream}'
+    master: &master
+      stream: master
+      branch: '{stream}'
+      disabled: false
+      gs-pathname: ''
     # -------------------------------
     # POD, INSTALLER, AND BRANCH MAPPING
     # -------------------------------
@@ -27,7 +32,9 @@
       - baremetal:
           slave-label: joid-baremetal
           <<: *euphrates
-
+      - baremetal:
+          slave-label: joid-baremetal
+          <<: *master
     # -------------------------------
     # scenarios
     # -------------------------------
           auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
       - 'os-nosdn-openbaton-ha':
           auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+      - 'k8-canal-lb-noha':
+          auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+      - 'k8-nosdn-lb_ceph-noha':
+          auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+      - 'k8-multus-lb-noha':
+          auto-trigger-name: 'daily-trigger-disabled'
+      - 'os-ovn-nofeature-ha':
+          auto-trigger-name: 'daily-trigger-disabled'
 
     jobs:
       - 'joid-{scenario}-{pod}-daily-{stream}'
     name: 'joid-os-nosdn-nofeature-ha-baremetal-euphrates-trigger'
     triggers:
       - timed: '5 2 * * *'
+# os-nosdn-nofeature-ha trigger - branch: master
+- trigger:
+    name: 'joid-os-nosdn-nofeature-ha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 2 * * *'
 # os-odl_l2-nofeature-ha trigger - branch: euphrates
 - trigger:
     name: 'joid-os-odl_l2-nofeature-ha-baremetal-euphrates-trigger'
     triggers:
       - timed: '5 7 * * *'
+# os-odl_l2-nofeature-ha trigger - branch: master
+- trigger:
+    name: 'joid-os-odl_l2-nofeature-ha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 7 * * *'
 # os-nosdn-lxd-noha trigger - branch: euphrates
 - trigger:
     name: 'joid-os-nosdn-lxd-noha-baremetal-euphrates-trigger'
     triggers:
       - timed: '5 22 * * *'
+# os-nosdn-lxd-noha trigger - branch: master
+- trigger:
+    name: 'joid-os-nosdn-lxd-noha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 22 * * *'
 # os-nosdn-lxd-ha trigger - branch: euphrates
 - trigger:
     name: 'joid-os-nosdn-lxd-ha-baremetal-euphrates-trigger'
     triggers:
       - timed: '5 10 * * *'
+# os-nosdn-lxd-ha trigger - branch: master
+- trigger:
+    name: 'joid-os-nosdn-lxd-ha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 10 * * *'
 # os-nosdn-nofeature-noha trigger - branch: euphrates
 - trigger:
     name: 'joid-os-nosdn-nofeature-noha-baremetal-euphrates-trigger'
     triggers:
       - timed: '5 4 * * *'
+# os-nosdn-nofeature-noha trigger - branch: master
+- trigger:
+    name: 'joid-os-nosdn-nofeature-noha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 4 * * *'
 # k8-nosdn-nofeature-noha trigger - branch: euphrates
 - trigger:
     name: 'joid-k8-nosdn-nofeature-noha-baremetal-euphrates-trigger'
     triggers:
       - timed: '5 15 * * *'
+# k8-nosdn-nofeature-noha trigger - branch: master
+- trigger:
+    name: 'joid-k8-nosdn-nofeature-noha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 15 * * *'
 # k8-nosdn-lb-noha trigger - branch: euphrates
 - trigger:
     name: 'joid-k8-nosdn-lb-noha-baremetal-euphrates-trigger'
     triggers:
       - timed: '5 20 * * *'
+# k8-nosdn-lb-noha trigger - branch: master
+- trigger:
+    name: 'joid-k8-nosdn-lb-noha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 20 * * *'
 # k8-ovn-lb-noha trigger - branch: euphrates
 - trigger:
     name: 'joid-k8-ovn-lb-noha-baremetal-euphrates-trigger'
     triggers:
       - timed: '5 17 * * *'
+# k8-ovn-lb-noha trigger - branch: master
+- trigger:
+    name: 'joid-k8-ovn-lb-noha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 17 * * *'
 # os-nosdn-openbaton-ha trigger - branch: euphrates
 - trigger:
     name: 'joid-os-nosdn-openbaton-ha-baremetal-euphrates-trigger'
     triggers:
-      - timed: '5 25 * * *'
+      - timed: '5 23 * * *'
+# os-nosdn-openbaton-ha trigger - branch: master
+- trigger:
+    name: 'joid-os-nosdn-openbaton-ha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 23 * * *'
 # os-ocl-nofeature-ha trigger - branch: euphrates
 - trigger:
     name: 'joid-os-ocl-nofeature-ha-baremetal-euphrates-trigger'
     triggers:
       - timed: '5 13 * * *'
-# os-ocl-nofeature-ha trigger - branch: euphrates
+# os-ocl-nofeature-ha trigger - branch: master
+- trigger:
+    name: 'joid-os-ocl-nofeature-ha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 13 * * *'
+# os-ocl-nofeature-noha trigger - branch: euphrates
 - trigger:
     name: 'joid-os-ocl-nofeature-noha-baremetal-euphrates-trigger'
     triggers:
-      - timed: '5 17 * * *'
+      - timed: '5 16 * * *'
+# os-ocl-nofeature-noha trigger - branch: master
+- trigger:
+    name: 'joid-os-ocl-nofeature-noha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 16 * * *'
+# k8-canal-lb-noha trigger - branch: euphrates
+- trigger:
+    name: 'joid-k8-canal-lb-noha-baremetal-euphrates-trigger'
+    triggers:
+      - timed: '5 14 * * *'
+# k8-canal-lb-noha trigger - branch: master
+- trigger:
+    name: 'joid-k8-canal-lb-noha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 14 * * *'
+# k8-multus-lb-noha trigger - branch: euphrates
+- trigger:
+    name: 'joid-k8-multus-lb-noha-baremetal-euphrates-trigger'
+    triggers:
+      - timed: '5 18 * * *'
+# k8-multus-lb-noha trigger - branch: master
+- trigger:
+    name: 'joid-k8-multus-lb-noha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 18 * * *'
+# k8-nosdn-lb_ceph-noha trigger - branch: euphrates
+- trigger:
+    name: 'joid-k8-nosdn-lb_ceph-noha-baremetal-euphrates-trigger'
+    triggers:
+      - timed: '5 18 * * *'
+# k8-nosdn-lb_ceph-noha trigger - branch: master
+- trigger:
+    name: 'joid-k8-nosdn-lb_ceph-noha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 18 * * *'
+# os-ovn-nofeature-ha trigger - branch: euphrates
+- trigger:
+    name: 'joid-os-ovn-nofeature-ha-baremetal-euphrates-trigger'
+    triggers:
+      - timed: '5 19 * * *'
+# os-ovn-nofeature-ha trigger - branch: master
+- trigger:
+    name: 'joid-os-ovn-nofeature-ha-baremetal-master-trigger'
+    triggers:
+      - timed: '5 19 * * *'
index aae6e02..ad497e9 100644 (file)
@@ -8,7 +8,7 @@
       - master:
           branch: '{stream}'
           gs-pathname: ''
-          disabled: true
+          disabled: false
       - euphrates:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
index b317c05..40fcf68 100644 (file)
@@ -1,69 +1,8 @@
 ---
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
 - project:
     name: models
 
     project: '{name}'
 
     jobs:
-      - 'models-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-- job-template:
-    name: 'models-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-          set -o nounset
-          set -o pipefail
-
-          # shellcheck -f tty tests/*.sh
+      - '{project}-verify-basic'
index b72eeaa..7980293 100644 (file)
@@ -5,7 +5,7 @@
     project: '{name}'
 
     jobs:
-      - 'netready-verify-{stream}'
+      - '{project}-verify-{stream}'
       - 'netready-build-gluon-packages-daily-{stream}'
 
     stream:
           gs-pathname: ''
           disabled: false
 
-- job-template:
-    name: 'netready-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**'
-
-    builders:
-      - shell: |
-          echo "Nothing to verify!"
-
-
 - job-template:
     name: 'netready-build-gluon-packages-daily-{stream}'
 
index fb6263d..a04d7f1 100644 (file)
@@ -8,56 +8,4 @@
     project: '{name}'
 
     jobs:
-      - 'octopus-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-- job-template:
-    name: 'octopus-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - shell: |
-          echo "Nothing to verify!"
+      - '{project}-verify-basic'
index 5d7526b..f0841a3 100644 (file)
@@ -11,7 +11,7 @@
       stream: master
       branch: '{stream}'
       gs-pathname: ''
-      disabled: false
+      disabled: true
 
     #####################################
     # pod definitions
index ad93d46..97c9a43 100644 (file)
@@ -11,7 +11,7 @@
       - master:
           branch: '{stream}'
           gs-pathname: ''
-          disabled: false
+          disabled: true
 
     #####################################
     # patch verification phases
index 3dfb9d1..908ddba 100644 (file)
@@ -69,7 +69,7 @@
             notbuilt: true
 
     builders:
-      - check-bash-syntax
+      - lint-bash-code
 
 - job-template:
     name: 'opnfvdocs-merge-shellcheck-{stream}'
                   branch-pattern: '**/{branch}'
 
     builders:
-      - check-bash-syntax
+      - lint-bash-code
diff --git a/jjb/pharos/check-jinja2.yml b/jjb/pharos/check-jinja2.yml
new file mode 100644 (file)
index 0000000..7251d3f
--- /dev/null
@@ -0,0 +1,95 @@
+---
+########################
+# Job configuration to validate jinja2 files
+########################
+- project:
+
+    name: validate-pdf-templates
+
+    project: 'pharos'
+
+    jobs:
+      - 'validate-pdf-jinja2-templates-{stream}'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - euphrates:
+          branch: 'stable/{stream}'
+          disabled: false
+
+########################
+# job templates
+########################
+
+- job-template:
+    name: 'validate-pdf-jinja2-templates-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    parameters:
+      - project-parameter:
+          project: $GERRIT_PROJECT
+          branch: '{branch}'
+      - node:
+          name: SLAVE_NAME
+          description: Slave to execute jnija template test
+          default-slaves:
+            - lf-build1
+          allowed-multiselect: true
+          ignore-offline-nodes: true
+
+    scm:
+      - git-scm-gerrit
+
+    triggers:
+      - gerrit:
+          server-name: 'gerrit.opnfv.org'
+          trigger-on:
+            - patchset-created-event:
+                exclude-drafts: 'false'
+                exclude-trivial-rebase: 'false'
+                exclude-no-code-change: 'false'
+            - draft-published-event
+            - comment-added-contains-event:
+                comment-contains-value: 'recheck'
+            - comment-added-contains-event:
+                comment-contains-value: 'reverify'
+          projects:
+            - project-compare-type: 'REG_EXP'
+              project-pattern: '{project}'
+              branches:
+                - branch-compare-type: 'ANT'
+                  branch-pattern: '**/{branch}'
+              file-paths:
+                - compare-type: ANT
+                  pattern: '**/*.yaml'
+                - compare-type: ANT
+                  pattern: 'config/installers/**/*.j2'
+                - compare-type: ANT
+                  pattern: 'check-jinja2.sh'
+                - compare-type: ANT
+                  pattern: 'check-schema.sh'
+          skip-vote:
+            successful: false
+            failed: false
+            unstable: false
+            notbuilt: false
+
+    builders:
+      - check-pdf-jinja
+      - check-pdf-schema
+
+- builder:
+    name: check-pdf-jinja
+    builders:
+      - shell: |
+          $WORKSPACE/config/utils/check-jinja2.sh
+- builder:
+    name: check-pdf-schema
+    builders:
+      - shell: |
+          $WORKSPACE/config/utils/check-schema.sh
index 7af4f0f..acf1488 100644 (file)
@@ -1,8 +1,4 @@
 ---
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
 - project:
     name: pharos
 
       - '{name}'
       - '{name}-tools'
 
+    disabled: false
+
     jobs:
-      - '{project}-verify-{stream}'
+      - '{project}-verify-basic'
+      - 'backup-pharos-dashboard'
+      - 'deploy-pharos-dashboard'
+
+- job-template:
+    name: 'backup-pharos-dashboard'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+      - 'pharos-dashboard-defaults'
+
+    triggers:
+      - timed: '@daily'
 
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
+    builders:
+      - shell: |
+          BACKUP_DIR=$HOME/backups/
+          TMP_DIR=$HOME/tmp/
+          mkdir -p $BACKUP_DIR
+          echo "-- $(date +%Y%m%d) --"
+          echo "Backing up Pharos Dashboard data..."
+          sudo docker run --rm \
+            -v pharos-data:/pharos-data:ro \
+            -v $TMP_DIR:/backup \
+            alpine \
+            tar -czf /backup/pharos-dashboard-db-$(date +%Y%m%d).tar.gz -C /pharos-data ./
+          sudo mv $TMP_DIR/pharos-dashboard-db-$(date +%Y%m%d).tar.gz $BACKUP_DIR
+          sudo chown $USER:$USER $BACKUP_DIR/pharos-dashboard-db-$(date +%Y%m%d).tar.gz
+          echo "...complete"
 
 - job-template:
-    name: '{project}-verify-{stream}'
+    name: 'deploy-pharos-dashboard'
 
     disabled: '{obj:disabled}'
 
     parameters:
       - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
+          project: 'pharos-tools'
+          branch: 'master'
+      - 'pharos-dashboard-defaults'
 
     scm:
       - git-scm-gerrit
       - gerrit:
           server-name: 'gerrit.opnfv.org'
           trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
+            - ref-updated-event
             - comment-added-contains-event:
-                comment-contains-value: 'reverify'
+                comment-contains-value: '^redeploy$'
           projects:
             - project-compare-type: 'ANT'
-              project-pattern: '{project}'
+              project-pattern: 'pharos-tools'
               branches:
                 - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
+                  branch-pattern: '**/master'
 
     builders:
       - shell: |
-          echo "Nothing to verify!"
+          cp $HOME/config.env $WORKSPACE/dashboard
+          cd $WORKSPACE/dashboard
+          sudo docker-compose build
+          sudo docker-compose up -d
index 5e6e834..b686d95 100644 (file)
@@ -1,64 +1,8 @@
 ---
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
 - project:
     name: prediction
 
     project: '{name}'
 
     jobs:
-      - 'prediction-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-- job-template:
-    name: 'prediction-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - shell: |
-          echo "Nothing to verify!"
+      - '{project}-verify-basic'
index 3f1d9e2..6e6fb54 100644 (file)
@@ -1,64 +1,8 @@
 ---
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
 - project:
     name: promise
 
     project: '{name}'
 
     jobs:
-      - 'promise-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-- job-template:
-    name: 'promise-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - shell: |
-          echo "Nothing to verify!"
+      - '{project}-verify-basic'
index db2fb8b..103069f 100644 (file)
@@ -7,7 +7,7 @@
     name: qtip-experimental-jobs
     project: qtip
     jobs:
-      - 'qtip-experimental-{stream}'
+      - 'qtip-experimental-{pod}-{stream}'
     stream:
       - master:
           branch: '{stream}'
           gs-pathname: '/{stream}'
           disabled: false
 
+    pod:
+      - zte-virtual6:
+          installer: fuel
+          pod: zte-virtual6
+
 ################################
 ## job templates
 #################################
 
 - job-template:
-    name: 'qtip-experimental-{stream}'
+    name: 'qtip-experimental-{pod}-{stream}'
 
     disabled: '{obj:disabled}'
 
@@ -31,9 +36,8 @@
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
-      # Pin the tests on zte-pod6 with MCP deployment
-      - fuel-defaults
-      - zte-virtual6-defaults
+      - '{installer}-defaults'
+      - '{pod}-defaults'
     scm:
       - git-scm-gerrit
 
index 07f2431..d9d3e72 100644 (file)
     # JOB LIST
     # -------------------------------
     jobs:
-      - 'qtip-{qpi}-{installer}-{stream}'
+      - 'qtip-{qpi}-{installer}-{pod}-{stream}'
 
 ################################
 # job templates
 ################################
 - job-template:
-    name: 'qtip-{qpi}-{installer}-{stream}'
+    name: 'qtip-{qpi}-{installer}-{pod}-{stream}'
     disabled: false
 
     parameters:
@@ -89,7 +89,7 @@
 
     wrappers:
       - timeout:
-          timeout: 15
+          timeout: 30
           abort: true
       - timestamps
 
index 047d7f2..8698fa0 100644 (file)
@@ -8,8 +8,6 @@
     project: qtip
     jobs:
       - 'qtip-verify-{stream}'
-      - 'qtip-verify-notebook-{stream}'
-      - 'qtip-merged-notebook-{stream}'
     stream:
       - master:
           branch: '{stream}'
@@ -67,7 +65,8 @@
       - publish-coverage
       - email-jenkins-admins-on-failure
 
-# upload juypter notebook to artifacts for review
+# Upload juypter notebook to artifacts for review
+# TODO(yujunz): deal with *.ipynb deletion
 - job-template:
     name: 'qtip-verify-notebook-{stream}'
 
           name: GS_URL
           default: '$GS_BASE{gs-pathname}'
           description: "Directory where the build artifact will be located upon the completion of the build."
-      - string:
-          name: GERRIT_REFSPEC
-          default: 'refs/heads/{branch}'
-          description: "JJB configured GERRIT_REFSPEC parameter"
 
     scm:
       - git-scm
 
           mkdir -p $local_path
 
-          git diff HEAD~1 --name-only | grep -E ".+\.ipynb$" | xargs -I '{}' cp '{}' $local_path
+          git diff HEAD~1 --name-status | grep -E "[AM]\t.+\.ipynb$" | awk '{print $2}' \
+            | xargs -I '{}' cp '{}' $local_path
           gsutil -m cp -r "$local_path" "gs://$gs_base/"
 
           echo "Document link(s):" >> gerrit_comment.txt
index afeb358..6bd952a 100644 (file)
     builders:
       - mongodb-backup
 
+    publishers:
+      - email:
+          recipients: serena.feng.711@gmail.com
+          notify-every-unstable-build: true
+          send-to-individuals: true
+
 - job-template:
     name: '{module}-verify-{stream}'
 
diff --git a/jjb/releng/compass4nfv-docker.yml b/jjb/releng/compass4nfv-docker.yml
new file mode 100644 (file)
index 0000000..7f14224
--- /dev/null
@@ -0,0 +1,174 @@
+---
+##############################################
+# job configuration for docker build and push
+##############################################
+- project:
+
+    name: compass-docker
+
+    project: compass-containers
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - euphrates:
+          branch: 'stable/{stream}'
+          disabled: true
+
+    arch_tag:
+      - 'amd64':
+          slave_label: 'opnfv-build-ubuntu'
+
+    # yamllint disable rule:key-duplicates
+    image:
+      - 'tasks'
+      - 'cobbler'
+      - 'db'
+      - 'deck'
+      - 'tasks-base'
+
+    # settings for jobs run in multijob phases
+    build-job-settings: &build-job-settings
+      current-parameters: false
+      git-revision: true
+      node-parameters: false
+      predefined-parameters: |
+        PUSH_IMAGE=$PUSH_IMAGE
+        COMMIT_ID=$COMMIT_ID
+        GERRIT_REFNAME=$GERRIT_REFNAME
+        DOCKERFILE=$DOCKERFILE
+      kill-phase-on: FAILURE
+      abort-all-jobs: false
+
+    # yamllint enable rule:key-duplicates
+    jobs:
+      - "compass-docker-{stream}"
+      - "compass-{image}-build-{arch_tag}-{stream}"
+
+########################
+# job templates
+########################
+- job-template:
+    name: 'compass-docker-{stream}'
+
+    project-type: multijob
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+      - compass-job-parameters:
+          project: '{project}'
+          branch: '{branch}'
+          slave_label: 'opnfv-build-ubuntu'
+          arch_tag: 'amd64'
+
+    properties:
+      - throttle:
+          max-per-node: 1
+          option: 'project'
+
+    scm:
+      - git-scm
+
+    triggers:
+      - pollscm:
+          cron: "*/30 * * * *"
+
+    builders:
+      - multijob:
+          name: 'build compass-tasks-base images'
+          execution-type: PARALLEL
+          projects:
+            - name: 'compass-tasks-base-build-amd64-{stream}'
+              <<: *build-job-settings
+      - multijob:
+          name: 'build all compass images'
+          condition: SUCCESSFUL
+          execution-type: PARALLEL
+          projects:
+            - name: 'compass-cobbler-build-amd64-{stream}'
+              <<: *build-job-settings
+            - name: 'compass-db-build-amd64-{stream}'
+              <<: *build-job-settings
+            - name: 'compass-deck-build-amd64-{stream}'
+              <<: *build-job-settings
+            - name: 'compass-tasks-build-amd64-{stream}'
+              <<: *build-job-settings
+
+    publishers:
+      - 'compass-amd64-recipients'
+
+- job-template:
+    name: 'compass-{image}-build-{arch_tag}-{stream}'
+    disabled: '{obj:disabled}'
+    parameters:
+      - compass-job-parameters:
+          project: '{project}'
+          branch: '{branch}'
+          slave_label: '{slave_label}'
+          arch_tag: '{arch_tag}'
+      - string:
+          name: DOCKER_REPO_NAME
+          default: "opnfv/compass-{image}"
+          description: "Dockerhub repo to be pushed to."
+      - string:
+          name: DOCKER_DIR
+          default: "compass-{image}"
+          description: "Directory containing files needed by the Dockerfile"
+    scm:
+      - git-scm
+    builders:
+      - shell:
+          !include-raw-escape: ./opnfv-docker.sh
+
+# parameter macro
+- parameter:
+    name: compass-job-parameters
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - label:
+          name: SLAVE_LABEL
+          default: '{slave_label}'
+          description: 'Slave label on Jenkins'
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+      - string:
+          name: PUSH_IMAGE
+          default: "true"
+          description: "To enable/disable pushing the image to Dockerhub."
+      - string:
+          name: COMMIT_ID
+          default: ""
+          description: "commit id to make a snapshot docker image"
+      - string:
+          name: GERRIT_REFNAME
+          default: ""
+          description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
+      - string:
+          name: DOCKERFILE
+          default: "Dockerfile"
+          description: "Dockerfile to use for creating the image."
+      - string:
+          name: ARCH_TAG
+          default: ""
+          description: "If set, this value will be added to the docker image tag as a prefix"
+      - string:
+          name: PROJECT
+          default: "{project}"
+          description: "Project name used to enable job conditions"
+
+# publisher macros
+- publisher:
+    name: 'compass-amd64-recipients'
+    publishers:
+      - email:
+          recipients: >
+            chigang@huawei.com
+            huangxiangyui5@huawei.com
+            xueyifei@huawei.com
+            wutianwei1@huawei.com
index acf381f..584dc30 100644 (file)
@@ -41,7 +41,7 @@
       predefined-parameters: |
         PUSH_IMAGE=$PUSH_IMAGE
         COMMIT_ID=$COMMIT_ID
-        RELEASE_VERSION=$RELEASE_VERSION
+        GERRIT_REFNAME=$GERRIT_REFNAME
         DOCKERFILE=$DOCKERFILE
       kill-phase-on: FAILURE
       abort-all-jobs: false
       git-revision: true
       node-parameters: false
       predefined-parameters:
-        RELEASE_VERSION=$RELEASE_VERSION
+        GERRIT_REFNAME=$GERRIT_REFNAME
       kill-phase-on: FAILURE
       abort-all-jobs: false
 
     # yamllint enable rule:key-duplicates
     jobs:
       - "functest-docker-{stream}"
-      - "functest-{image}-build-{arch_tag}-{stream}"
-      - "functest-{image}-manifest-{stream}"
+      - "functest-{image}-docker-build-{arch_tag}-{stream}"
+      - "functest-{image}-docker-manifest-{stream}"
 
 ########################
 # job templates
     triggers:
       - pollscm:
           cron: "*/30 * * * *"
+      - gerrit-trigger-tag-created:
+          project: '{project}'
 
     builders:
       - multijob:
           name: 'build functest-core images'
           execution-type: PARALLEL
           projects:
-            - name: 'functest-core-build-amd64-{stream}'
+            - name: 'functest-core-docker-build-amd64-{stream}'
               <<: *build-job-settings
-            - name: 'functest-core-build-arm64-{stream}'
+            - name: 'functest-core-docker-build-arm64-{stream}'
               <<: *build-job-settings
       - multijob:
           name: 'publish functest-core manifests'
           execution-type: PARALLEL
           projects:
-            - name: 'functest-core-manifest-{stream}'
+            - name: 'functest-core-docker-manifest-{stream}'
               <<: *manifest-job-settings
       - multijob:
           name: 'build all functest images'
           condition: SUCCESSFUL
           execution-type: PARALLEL
           projects:
-            - name: 'functest-healthcheck-build-amd64-{stream}'
+            - name: 'functest-healthcheck-docker-build-amd64-{stream}'
+              <<: *build-job-settings
+            - name: 'functest-healthcheck-docker-build-arm64-{stream}'
               <<: *build-job-settings
-            - name: 'functest-healthcheck-build-arm64-{stream}'
+            - name: 'functest-features-docker-build-amd64-{stream}'
               <<: *build-job-settings
-            - name: 'functest-features-build-amd64-{stream}'
+            - name: 'functest-features-docker-build-arm64-{stream}'
               <<: *build-job-settings
-            - name: 'functest-features-build-arm64-{stream}'
+            - name: 'functest-components-docker-build-amd64-{stream}'
               <<: *build-job-settings
-            - name: 'functest-components-build-amd64-{stream}'
+            - name: 'functest-components-docker-build-arm64-{stream}'
               <<: *build-job-settings
-            - name: 'functest-components-build-arm64-{stream}'
+            - name: 'functest-parser-docker-build-amd64-{stream}'
               <<: *build-job-settings
-            - name: 'functest-parser-build-amd64-{stream}'
+            - name: 'functest-parser-docker-build-arm64-{stream}'
               <<: *build-job-settings
-            - name: 'functest-parser-build-arm64-{stream}'
+            - name: 'functest-smoke-docker-build-amd64-{stream}'
               <<: *build-job-settings
-            - name: 'functest-smoke-build-amd64-{stream}'
+            - name: 'functest-smoke-docker-build-arm64-{stream}'
               <<: *build-job-settings
-            - name: 'functest-smoke-build-arm64-{stream}'
+            - name: 'functest-vnf-docker-build-amd64-{stream}'
               <<: *build-job-settings
-            - name: 'functest-vnf-build-amd64-{stream}'
+            - name: 'functest-vnf-docker-build-arm64-{stream}'
               <<: *build-job-settings
-            - name: 'functest-restapi-build-amd64-{stream}'
+            - name: 'functest-restapi-docker-build-amd64-{stream}'
+              <<: *build-job-settings
+            - name: 'functest-restapi-docker-build-arm64-{stream}'
               <<: *build-job-settings
       - multijob:
           name: 'publish all manifests'
           condition: SUCCESSFUL
           execution-type: PARALLEL
           projects:
-            - name: 'functest-healthcheck-manifest-{stream}'
+            - name: 'functest-healthcheck-docker-manifest-{stream}'
               <<: *manifest-job-settings
-            - name: 'functest-features-manifest-{stream}'
+            - name: 'functest-features-docker-manifest-{stream}'
               <<: *manifest-job-settings
-            - name: 'functest-components-manifest-{stream}'
+            - name: 'functest-components-docker-manifest-{stream}'
               <<: *manifest-job-settings
-            - name: 'functest-parser-manifest-{stream}'
+            - name: 'functest-parser-docker-manifest-{stream}'
               <<: *manifest-job-settings
-            - name: 'functest-smoke-manifest-{stream}'
+            - name: 'functest-smoke-docker-manifest-{stream}'
               <<: *manifest-job-settings
-            - name: 'functest-vnf-manifest-{stream}'
+            - name: 'functest-vnf-docker-manifest-{stream}'
               <<: *manifest-job-settings
-            - name: 'functest-restapi-manifest-{stream}'
+            - name: 'functest-restapi-docker-manifest-{stream}'
               <<: *manifest-job-settings
 
     publishers:
       - 'functest-arm64-recipients'
 
 - job-template:
-    name: 'functest-{image}-build-{arch_tag}-{stream}'
+    name: 'functest-{image}-docker-build-{arch_tag}-{stream}'
     disabled: '{obj:disabled}'
     parameters:
       - job-parameters:
           exit $?
 
 - job-template:
-    name: 'functest-{image}-manifest-{stream}'
+    name: 'functest-{image}-docker-manifest-{stream}'
 
     parameters:
       - project-parameter:
           name: PROJECT
           default: "{project}"
           description: "Project name used to enable job conditions"
-      - string:
-          name: RELEASE_VERSION
-          default: ""
-          description: "Docker tag to be built, e.g. 5.0.0, opnfv-5.0.0, 5.0.RC1"
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           *)
               tag="{stream}" ;;
           esac
-          case "{image}" in
-          "vnf"|"restapi")
-              sudo manifest-tool push from-args \
-                  --platforms linux/amd64 \
-                  --template opnfv/functest-{image}:ARCH-$tag \
-                  --target opnfv/functest-{image}:$tag ;;
-          *)
-              sudo manifest-tool push from-args \
-                  --platforms linux/amd64,linux/arm64 \
-                  --template opnfv/functest-{image}:ARCH-$tag \
-                  --target opnfv/functest-{image}:$tag ;;
-          esac
+          sudo manifest-tool push from-args \
+              --platforms linux/amd64,linux/arm64 \
+              --template opnfv/functest-{image}:ARCH-$tag \
+              --target opnfv/functest-{image}:$tag
           exit $?
 
 # parameter macro
           default: ""
           description: "commit id to make a snapshot docker image"
       - string:
-          name: RELEASE_VERSION
+          name: GERRIT_REFNAME
           default: ""
-          description: "Docker tag to be built, e.g. 5.0.0, opnfv-5.0.0, 5.0.RC1"
+          description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
       - string:
           name: DOCKERFILE
           default: "Dockerfile"
index d1566b4..fbf5658 100644 (file)
@@ -19,7 +19,6 @@
       receivers: >
           cristina.pauna@enea.com
           alexandru.avadanii@enea.com
-          alexandru.nemes@enea.com
     storperf-arm-receivers: &storperf-arm-receivers
       receivers: >
           cristina.pauna@enea.com
@@ -29,8 +28,6 @@
       receivers: >
           cristina.pauna@enea.com
           alexandru.avadanii@enea.com
-          alexandru.nemes@enea.com
-          catalina.focsa@enea.com
           delia.popescu@enea.com
     other-receivers: &other-receivers
       receivers: ''
           default: "{docker_repo_name}"
           description: "Dockerhub repo to be pushed to."
       - string:
-          name: RELEASE_VERSION
+          name: GERRIT_REFNAME
           default: ""
-          description: "Release version, e.g. 1.0, 2.0, 3.0"
+          description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
       - string:
           name: DOCKER_DIR
           default: "{dockerdir}"
     triggers:
       - pollscm:
           cron: "*/30 * * * *"
+      - gerrit-trigger-tag-created:
+          project: '{project}'
 
     publishers:
       - email:
index c179b1d..28e5b7c 100644 (file)
@@ -12,11 +12,41 @@ set -o nounset
 set -o pipefail
 
 
-
-echo "Starting opnfv-docker for $DOCKER_REPO_NAME ..."
+echo "Using Docker $(docker --version) on $NODE_NAME"
+echo "Starting Docker build for $DOCKER_REPO_NAME ..."
 echo "--------------------------------------------------------"
 echo
 
+function remove_containers_images()
+{
+    # Remove previous running containers if exist
+    if [[ -n "$(docker ps -a | grep $DOCKER_REPO_NAME)" ]]; then
+        echo "Removing existing $DOCKER_REPO_NAME containers..."
+        docker ps -a | grep $DOCKER_REPO_NAME | awk '{print $1}' | xargs docker rm -f
+        t=60
+        # Wait max 60 sec for containers to be removed
+        while [[ $t -gt 0 ]] && [[ -n "$(docker ps| grep $DOCKER_REPO_NAME)" ]]; do
+            sleep 1
+            let t=t-1
+        done
+    fi
+
+
+    # Remove existing images if exist
+    if [[ -n "$(docker images | grep $DOCKER_REPO_NAME)" ]]; then
+        echo "Docker images to remove:"
+        docker images | head -1 && docker images | grep $DOCKER_REPO_NAME
+        image_ids=($(docker images | grep $DOCKER_REPO_NAME | awk '{print $3}'))
+        for id in "${image_ids[@]}"; do
+            if [[ -n "$(docker images|grep $DOCKER_REPO_NAME|grep $id)" ]]; then
+                echo "Removing docker image $DOCKER_REPO_NAME:$id..."
+                docker rmi -f $id
+            fi
+        done
+    fi
+}
+
+
 count=30 # docker build jobs might take up to ~30 min
 while [[ -n `ps -ef| grep 'docker build' | grep $DOCKER_REPO_NAME | grep -v grep` ]]; do
     echo "Build or cleanup of $DOCKER_REPO_NAME in progress. Waiting..."
@@ -28,33 +58,12 @@ while [[ -n `ps -ef| grep 'docker build' | grep $DOCKER_REPO_NAME | grep -v grep
     fi
 done
 
-# Remove previous running containers if exist
-if [[ -n "$(docker ps -a | grep $DOCKER_REPO_NAME)" ]]; then
-    echo "Removing existing $DOCKER_REPO_NAME containers..."
-    docker ps -a | grep $DOCKER_REPO_NAME | awk '{print $1}' | xargs docker rm -f
-    t=60
-    # Wait max 60 sec for containers to be removed
-    while [[ $t -gt 0 ]] && [[ -n "$(docker ps| grep $DOCKER_REPO_NAME)" ]]; do
-        sleep 1
-        let t=t-1
-    done
-fi
-
+# Remove the existing containers and images before building
+remove_containers_images
 
-# Remove existing images if exist
-if [[ -n "$(docker images | grep $DOCKER_REPO_NAME)" ]]; then
-    echo "Docker images to remove:"
-    docker images | head -1 && docker images | grep $DOCKER_REPO_NAME
-    image_ids=($(docker images | grep $DOCKER_REPO_NAME | awk '{print $3}'))
-    for id in "${image_ids[@]}"; do
-        if [[ -n "$(docker images|grep $DOCKER_REPO_NAME|grep $id)" ]]; then
-            echo "Removing docker image $DOCKER_REPO_NAME:$id..."
-            docker rmi -f $id
-        fi
-    done
-fi
+DOCKER_PATH=$WORKSPACE/$DOCKER_DIR
 
-cd "$WORKSPACE/$DOCKER_DIR" || exit 1
+cd $DOCKER_PATH || exit 1
 HOST_ARCH="$(uname -m)"
 #If there is a patch for other arch then x86, apply the patch and
 #replace Dockerfile file
@@ -68,6 +77,9 @@ echo "Current branch: $BRANCH"
 
 BUILD_BRANCH=$BRANCH
 
+GERRIT_REFNAME=${GERRIT_REFNAME:-''}
+RELEASE_VERSION=${GERRIT_REFNAME/refs\/tags//}
+
 if [[ "$BRANCH" == "master" ]]; then
     DOCKER_TAG="latest"
 elif [[ -n "${RELEASE_VERSION-}" ]]; then
@@ -100,7 +112,8 @@ echo "Building docker image: $DOCKER_REPO_NAME:$DOCKER_TAG"
 echo "--------------------------------------------------------"
 echo
 cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BUILD_BRANCH
-    -f $DOCKERFILE ."
+    $ARCH_BUILD_ARG
+    -f $DOCKERFILE $DOCKER_PATH"
 
 echo ${cmd}
 ${cmd}
@@ -117,3 +130,6 @@ if [[ "$PUSH_IMAGE" == "true" ]]; then
     echo
     docker push $DOCKER_REPO_NAME:$DOCKER_TAG
 fi
+
+# Remove the existing containers and images after building
+remove_containers_images
index 8250bfe..317efc8 100644 (file)
           project: 'releng-anteater'
           <<: *master
           <<: *other-receivers
+      - 'barometer-collectd':
+          project: 'barometer'
+          dockerdir: 'docker/barometer-collectd'
+          <<: *master
+          <<: *other-receivers
+      - 'barometer-grafana':
+          project: 'barometer'
+          dockerdir: 'docker/barometer-grafana'
+          <<: *master
+          <<: *other-receivers
+      - 'barometer-influxdb':
+          project: 'barometer'
+          dockerdir: 'docker/barometer-influxdb'
+          <<: *master
+          <<: *other-receivers
+      - 'barometer-kafka':
+          project: 'barometer'
+          dockerdir: 'docker/barometer-kafka'
+          <<: *master
+          <<: *other-receivers
+      - 'barometer-ves':
+          project: 'barometer'
+          dockerdir: 'docker/barometer-ves'
+          <<: *master
+          <<: *other-receivers
       - 'bottlenecks':
           project: 'bottlenecks'
           <<: *master
@@ -57,6 +82,8 @@
           <<: *other-receivers
       - 'qtip':
           project: 'qtip'
+          dockerdir: '.'
+          dockerfile: 'docker/Dockerfile.local'
           <<: *master
           <<: *other-receivers
       - 'storperf-master':
           project: 'yardstick'
           <<: *master
           <<: *other-receivers
-      # projects with jobs for Danube
-      - 'qtip':
-          project: 'qtip'
-          <<: *euphrates
-          <<: *other-receivers
-      - 'yardstick':
-          project: 'yardstick'
-          <<: *euphrates
+      - 'functest-kubernetes':
+          project: 'functest-kubernetes'
+          <<: *master
           <<: *other-receivers
       # projects with jobs for euphrates
       - 'bottlenecks':
           project: 'bottlenecks'
           <<: *euphrates
           <<: *other-receivers
+      - 'nfvbench':
+          project: 'nfvbench'
+          <<: *euphrates
+          <<: *other-receivers
+      - 'qtip':
+          project: 'qtip'
+          <<: *euphrates
+          <<: *other-receivers
       - 'storperf-master':
           project: 'storperf'
           dockerdir: 'docker/storperf-master'
           arch_tag: 'x86_64'
           <<: *euphrates
           <<: *storperf-receivers
-      - 'nfvbench':
-          project: 'nfvbench'
+      - 'yardstick':
+          project: 'yardstick'
           <<: *euphrates
           <<: *other-receivers
+      # projects with jobs for danube
+      - 'dovetail':
+          project: 'dovetail'
+          <<: *danube
+          <<: *other-receivers
 
     # yamllint enable rule:key-duplicates
     jobs:
           default: ""
           description: "commit id to make a snapshot docker image"
       - string:
-          name: RELEASE_VERSION
+          name: GERRIT_REFNAME
           default: ""
-          description: "Docker tag to be built, e.g. 5.0.0, opnfv-5.0.0, 5.0.RC1"
+          description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
       - string:
           name: DOCKERFILE
           default: "{dockerfile}"
     triggers:
       - pollscm:
           cron: "*/30 * * * *"
+      - gerrit-trigger-tag-created:
+          project: '{project}'
 
     publishers:
       - email:
index d566430..e9e929d 100644 (file)
 
     disabled: '{obj:disabled}'
 
+    concurrent: true
+
     parameters:
       - project-parameter:
           project: $GERRIT_PROJECT
           branch: '{branch}'
+      - node:
+          name: SLAVE_NAME
+          description: Slaves to execute yamllint
+          default-slaves:
+            - lf-build1
+          allowed-multiselect: true
+          ignore-offline-nodes: true
+
+    scm:
+      - git-scm-gerrit
+
+    triggers:
+      - gerrit:
+          server-name: 'gerrit.opnfv.org'
+          trigger-on:
+            - patchset-created-event:
+                exclude-drafts: 'false'
+                exclude-trivial-rebase: 'false'
+                exclude-no-code-change: 'false'
+            - draft-published-event
+            - comment-added-contains-event:
+                comment-contains-value: 'recheck'
+            - comment-added-contains-event:
+                comment-contains-value: 'reverify'
+          projects:
+            - project-compare-type: 'REG_EXP'
+              project-pattern: 'releng|doctor|pharos'
+              branches:
+                - branch-compare-type: 'ANT'
+                  branch-pattern: '**/{branch}'
+              file-paths:
+                - compare-type: ANT
+                  pattern: '**/*.py'
+                - compare-type: ANT
+                  pattern: '**/*.sh'
+                - compare-type: ANT
+                  pattern: '**/*.yml'
+                - compare-type: ANT
+                  pattern: '**/*.yaml'
+
+    builders:
+      - lint-all-code
+
+- job-template:
+    name: 'opnfv-pylint-verify-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    parameters:
+      - project-parameter:
+          project: $GERRIT_PROJECT
+          branch: '{branch}'
+      - node:
+          name: SLAVE_NAME
+          description: Slaves to execute yamllint
+          default-slaves:
+            - lf-build1
+          allowed-multiselect: true
+          ignore-offline-nodes: true
 
     scm:
       - git-scm-gerrit
                 comment-contains-value: 'reverify'
           projects:
             - project-compare-type: 'REG_EXP'
-              project-pattern: 'functest|sdnvpn|qtip|daisy|sfc|escalator|releng'
+              project-pattern: 'functest|sdnvpn|qtip|daisy|sfc|escalator'
               branches:
                 - branch-compare-type: 'ANT'
                   branch-pattern: '**/{branch}'
                   pattern: '**/*.py'
 
     builders:
+      - lint-init
       - lint-python-code
-      - report-lint-result-to-gerrit
+      - lint-report
 
 - job-template:
     name: 'opnfv-yamllint-verify-{stream}'
                 comment-contains-value: 'reverify'
           projects:
             - project-compare-type: 'REG_EXP'
-              project-pattern: 'armband|fuel|octopus|pharos|releng|releng-anteater'
+              project-pattern: 'armband|fuel|octopus|releng-anteater'
               branches:
                 - branch-compare-type: 'ANT'
                   branch-pattern: '**/{branch}'
                   pattern: '**/*.yaml'
 
     builders:
+      - lint-init
       - lint-yaml-code
-      - report-lint-result-to-gerrit
+      - lint-report
index 2408c2a..6fa0aef 100644 (file)
@@ -61,6 +61,8 @@ echo "--------------------------------------"
 gsutil cp "$WORKSPACE/opnfv-archive-$DATE.tar.gz" \
     "gs://opnfv-archive/opnfv-archive-$DATE.tar.gz" 2>&1
 
+echo "https://storage.googleapis.com/opnfv-archive/opnfv-archive-$DATE.tar.gz" > archive-link.txt
+
 rm -f opnfv-archive-$DATE.tar.gz
 
 echo "Finished"
index 80cd08f..b12f663 100644 (file)
@@ -7,6 +7,7 @@
       - 'prune-docker-images'
       - 'archive-repositories'
       - 'check-status-of-slaves'
+      - 'ansible-build-server'
 
 ########################
 # job templates
           name: SLAVE_NAME
           description: Slaves to prune docker images
           default-slaves:
-            - arm-build2
+            - arm-build3
+            - arm-build4
+            - arm-build5
+            - arm-build6
+            - ericsson-build3
             - ericsson-build4
-            - ericsson-build5
             - lf-build2
           allowed-multiselect: true
           ignore-offline-nodes: true
     builders:
       - description-setter:
           description: "Built on $NODE_NAME"
+      # yamllint disable rule:line-length
       - shell: |
           #!/bin/bash
-
           (docker ps -q; docker ps -aq) | sort | uniq -u | xargs --no-run-if-empty docker rm
           docker images -f dangling=true -q | xargs --no-run-if-empty docker rmi
 
+
+    # yamllint enable rule:line-length
     triggers:
       - timed: '@midnight'
 
       - shell:
           !include-raw-escape: opnfv-repo-archiver.sh
 
+    publishers:
+      - email-ext:
+          content-type: 'text'
+          failure: false
+          always: true
+          body:
+            ${{FILE,path="archive-link.txt"}}
+          reply-to: >
+            helpdesk@opnfv.org
+          recipients: >
+            swinslow@linuxfoundation.org
+
+
 - job-template:
     name: 'check-status-of-slaves'
 
@@ -75,7 +94,7 @@
     parameters:
       - node:
           name: SLAVE_NAME
-          description: We don't want workspace wiped. so I just threw the script on the master
+          description: 'script lives on master node'
           default-slaves:
             - master
           allowed-multiselect: false
       - shell: |
           cd /opt/jenkins-ci/slavemonitor
           bash slave-monitor-0.1.sh | sort
+
+- job-template:
+    name: 'ansible-build-server'
+
+    project-type: freestyle
+
+    disabled: false
+    concurrent: true
+
+    parameters:
+      - node:
+          name: SLAVE_NAME
+          description: Build Servers
+          default-slaves:
+            - lf-build1
+            - lf-build2
+          allowed-multiselect: true
+          ignore-offline-nodes: true
+      - project-parameter:
+          project: releng
+          branch: master
+
+    scm:
+      - git-scm
+
+    triggers:
+      - timed: '@midnight'
+
+    builders:
+      - install-ansible
+      - run-ansible-build-server-playbook
+
+
+- builder:
+    name: install-ansible
+    builders:
+      - shell: |
+          # Install ansible here
+          if [ -f /etc/centos-release ] \
+          || [ -f /etc/redhat-release ] \
+          || [ -f /etc/system-release ]; then
+          sudo yum -y install ansible
+          fi
+          if [ -f /etc/debian_version ] \
+          || grep -qi ubuntu /etc/lsb-release \
+          || grep -qi ubuntu /etc/os-release; then
+          sudo apt-get -y install ansible
+          fi
+
+- builder:
+    name: run-ansible-build-server-playbook
+    builders:
+      - shell: |
+          # run playbook
+          sudo ansible-playbook -C -D -i \
+          $WORKSPACE/utils/build-server-ansible/inventory.ini \
+          $WORKSPACE/utils/build-server-ansible/main.yml
index c4b7c8d..ef99f5a 100644 (file)
           project: '{project}'
           branch: 'master'
 
+    properties:
+      - throttle:
+          max-per-node: 1
+          max-total: 1
+          option: 'project'
+
     scm:
       - git-scm
 
index bc3f393..a6c86c9 100644 (file)
@@ -15,7 +15,7 @@
       - master:
           branch: '{stream}'
           gs-pathname: ''
-          disabled: false
+          disabled: true
 
 - job-template:
     name: 'snaps-verify-{stream}'
index fc479b1..6e796cf 100644 (file)
@@ -1,64 +1,8 @@
 ---
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
 - project:
     name: stor4nfv
 
     project: '{name}'
 
     jobs:
-      - 'stor4nfv-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-- job-template:
-    name: 'stor4nfv-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - shell: |
-          echo "Nothing to verify!"
+      - '{project}-verify-basic'
index d2182cb..f8c5da2 100644 (file)
@@ -1,70 +1,8 @@
 ---
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
 - project:
     name: ves
 
     project: '{name}'
 
     jobs:
-      - 'ves-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-- job-template:
-    name: 'ves-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-          set -o nounset
-          set -o pipefail
-
-         # shellcheck -f tty tests/*.sh
-         # shellcheck -f tty utils/*.sh
+      - '{project}-verify-basic'
index 7f21032..b11c20f 100644 (file)
@@ -5,50 +5,4 @@
     project: '{name}'
 
     jobs:
-      - 'vnf_forwarding_graph-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-- job-template:
-    name: 'vnf_forwarding_graph-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-
-    builders:
-      - shell: |
-          echo "Nothing to verify!"
+      - '{project}-verify-basic'
index 9a0fb10..7fd875b 100644 (file)
@@ -56,7 +56,7 @@
           make clobber
           make MORE_MAKE_FLAGS="-j 10"
           cd ../ci
-          scl enable python33 "source ~/vsperfenv/bin/activate ; ./build-vsperf.sh daily"
+          scl enable rh-python34 "source ~/vsperfenv/bin/activate ; ./build-vsperf.sh daily"
 
 - job-template:
     name: 'vswitchperf-verify-{stream}'
index f8d39b2..e4c2d0e 100644 (file)
       - build-blocker:
           use-build-blocker: true
           blocking-jobs:
+            - 'xci-verify-{distro}-.*'
             - '.*-bifrost-verify.*-{type}'
+
           block-level: 'NODE'
 
     parameters:
       - string:
           name: PROJECT_REPO
           default: 'https://git.openstack.org/openstack/bifrost'
-      - string:
-          name: CLONE_LOCATION
-          default: '$WORKSPACE/bifrost'
       - string:
           name: DISTRO
           default: '{distro}'
             - comment-added-contains-event:
                 comment-contains-value: 'recheck'
           silent-start: true
+          custom-url: '* $JOB_NAME $BUILD_URL'
           projects:
             - project-compare-type: 'PLAIN'
               project-pattern: 'openstack/bifrost'
index a86ba91..451f33b 100755 (executable)
@@ -11,25 +11,25 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
-git clone https://git.openstack.org/openstack/bifrost $WORKSPACE/bifrost
 git clone https://gerrit.opnfv.org/gerrit/releng-xci $WORKSPACE/releng-xci
 
-# checkout the patch
-cd $CLONE_LOCATION
+cd $WORKSPACE
 git fetch $PROJECT_REPO $GERRIT_REFSPEC && sudo git checkout FETCH_HEAD
 
 # combine opnfv and upstream scripts/playbooks
-/bin/cp -rf $WORKSPACE/releng-xci/bifrost/* $WORKSPACE/bifrost/
+/bin/cp -rf $WORKSPACE/releng-xci/bifrost/* $WORKSPACE/
 
 cd $WORKSPACE/releng-xci
 cat > bifrost_test.sh<<EOF
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
 cd ~/bifrost
 # provision 3 VMs; xcimaster, controller, and compute
 ./scripts/bifrost-provision.sh
 
-# list the provisioned VMs
-source env-vars
-ironic node-list
 sudo -H -E virsh list
 EOF
 chmod a+x bifrost_test.sh
@@ -41,8 +41,11 @@ case ${DISTRO} in
        *suse*) VM_DISTRO=opensuse ;;
 esac
 
+export XCI_BUILD_CLEAN_VM_OS=false
+export XCI_UPDATE_CLEAN_VM_OS=true
+
 ./xci/scripts/vm/start-new-vm.sh $VM_DISTRO
 
-rsync -a $WORKSPACE/releng-xci ${VM_DISTRO}_xci_vm:~/bifrost
+rsync -a -e "ssh -F $HOME/.ssh/xci-vm-config" $WORKSPACE/ ${VM_DISTRO}_xci_vm:~/bifrost
 
-ssh -F $HOME/.ssh/xci-vm-config ${VM_DISTRO}_xci_vm "cd ~/bifrost && ./bifrost_test.sh"
+ssh -F $HOME/.ssh/xci-vm-config ${VM_DISTRO}_xci_vm "cd ~/bifrost/releng-xci && ./bifrost_test.sh"
index 26c1575..64daa69 100644 (file)
@@ -2,7 +2,7 @@
 - project:
     name: 'opnfv-osa-periodic'
 
-    project: 'releng-xci'
+    project: openstack-ansible
     # -------------------------------
     # branches
     # -------------------------------
     # distros
     # -------------------------------
     distro:
-      - 'xenial':
+      - ubuntu:
+          disabled: false
+      - centos:
+          disabled: false
+      - opensuse:
           disabled: false
-      - 'centos7':
-          disabled: true
-      - 'suse':
-          disabled: true
     # -------------------------------
     # type
     # -------------------------------
     type:
       - virtual
     # -------------------------------
-    # phases
+    # periodic deploy & test phases
     # -------------------------------
     phase:
       - 'deploy'
     # jobs
     # -------------------------------
     jobs:
-      - 'osa-periodic-{distro}-{type}-{stream}'
-      - 'osa-periodic-{phase}-{type}-{stream}'
-
+      - 'xci-osa-periodic-{distro}-{type}-{stream}'
+      - 'xci-osa-periodic-{distro}-{phase}-{type}-{stream}'
 # -------------------------------
 # job templates
 # -------------------------------
 - job-template:
-    name: 'osa-periodic-{distro}-{type}-{stream}'
+    name: 'xci-osa-periodic-{distro}-{type}-{stream}'
 
     project-type: multijob
 
     disabled: '{obj:disabled}'
 
-    concurrent: false
+    concurrent: true
 
     properties:
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
           blocking-jobs:
-            - 'xci-verify-.*'
-            - 'bifrost-verify-.*'
-            - 'bifrost-periodic-.*'
-            - 'osa-verify-.*'
-            - 'osa-periodic-.*'
+            - 'xci-verify-{distro}-.*'
+            - 'bifrost-verify-{distro}-.*'
+            - 'bifrost-periodic-{distro}-.*'
+            - 'xci-osa-verify-{distro}-.*'
+            - 'xci-osa-periodic-{distro}-.*'
           block-level: 'NODE'
+      - throttle:
+          max-per-node: 2
+          max-total: 10
+          categories:
+            - xci-verify-virtual
+          option: category
 
     wrappers:
       - ssh-agent-wrapper
       - fix-workspace-permissions
 
     scm:
-      - git-scm-osa
-
-    triggers:
-      - pollscm:
-          cron: "@midnight"
-          ignore-post-commit-hooks: true
+      - git-scm-openstack
 
     parameters:
       - project-parameter:
           branch: '{branch}'
       - label:
           name: SLAVE_LABEL
-          default: 'xci-virtual-{distro}'
+          default: 'xci-virtual'
+      - string:
+          name: OPENSTACK_OSA_VERSION
+          default: 'master'
+      - string:
+          name: CLEAN_DIB_IMAGES
+          default: 'true'
+      - string:
+          name: GIT_BASE
+          default: 'https://git.openstack.org/openstack/$PROJECT'
+          description: 'Git URL to use on this Jenkins Slave'
 
     builders:
       - description-setter:
           name: deploy
           condition: SUCCESSFUL
           projects:
-            - name: 'osa-periodic-deploy-{type}-{stream}'
+            - name: 'xci-osa-periodic-{distro}-deploy-{type}-{stream}'
               current-parameters: true
               predefined-parameters: |
                 DISTRO={distro}
                 DEPLOY_SCENARIO=os-nosdn-nofeature-noha
-              git-revision: true
+                OPENSTACK_OSA_VERSION=$OPENSTACK_OSA_VERSION
+                CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
+                BRANCH=$BRANCH
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
+              git-revision: true
       - multijob:
           name: healthcheck
           condition: SUCCESSFUL
           projects:
-            - name: 'osa-periodic-healthcheck-{type}-{stream}'
+            - name: 'xci-osa-periodic-{distro}-healthcheck-{type}-{stream}'
               current-parameters: true
               predefined-parameters: |
                 DISTRO={distro}
                 DEPLOY_SCENARIO=os-nosdn-nofeature-noha
+                OPENSTACK_OSA_VERSION=$OPENSTACK_OSA_VERSION
+                CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
                 FUNCTEST_MODE=tier
                 FUNCTEST_TIER=healthcheck
+                BRANCH=$BRANCH
               node-parameters: true
               kill-phase-on: NEVER
-              abort-all-job: false
+              abort-all-job: true
 
 - job-template:
-    name: 'osa-periodic-{phase}-{type}-{stream}'
+    name: 'xci-osa-periodic-{distro}-{phase}-{type}-{stream}'
 
     disabled: false
 
       - build-blocker:
           use-build-blocker: true
           blocking-jobs:
-            - 'xci-verify-deploy-.*'
-            - 'xci-verify-healthcheck-.*'
-            - 'bifrost-verify-.*'
-            - 'bifrost-periodic-.*'
-            - 'osa-verify-deploy-.*'
-            - 'osa-verify-halthcheck-.*'
-            - 'osa-periodic-deploy-.*'
-            - 'osa-periodic-healthcheck-.*'
+            - '.*-bifrost-verify-.*'
+            - '.*-bifrost-periodic-.*'
           block-level: 'NODE'
 
     parameters:
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
-      - label:
-          name: SLAVE_LABEL
-          default: 'xci-virtual-{distro}'
-      - string:
-          name: OPENSTACK_OSA_VERSION
-          default: 'master'
       - string:
           name: DISTRO
-          default: 'xenial'
+          default: 'ubuntu'
       - string:
           name: DEPLOY_SCENARIO
           default: 'os-nosdn-nofeature-noha'
       - string:
-          name: XCI_FLAVOR
-          default: 'mini'
-      - string:
-          name: XCI_LOOP
-          default: 'periodic'
-      - string:
-          name: OPNFV_RELENG_DEV_PATH
-          default: $WORKSPACE/releng-xci
+          name: OPENSTACK_OSA_VERSION
+          default: 'master'
       - string:
           name: FUNCTEST_MODE
           default: 'tier'
           name: FUNCTEST_SUITE_NAME
           default: 'healthcheck'
       - string:
-          name: FORCE_MASTER
+          name: XCI_FLAVOR
+          default: 'mini'
+      - string:
+          name: CLEAN_DIB_IMAGES
           default: 'true'
+      - string:
+          name: INSTALLER_TYPE
+          default: 'osa'
       - string:
           name: GIT_BASE
-          default: https://gerrit.opnfv.org/gerrit/$PROJECT
-
-    scm:
-      - git-scm-osa
+          default: 'https://git.openstack.org/openstack/$PROJECT'
+          description: 'Git URL to use on this Jenkins Slave'
 
     wrappers:
       - ssh-agent-wrapper
           timeout: 240
       - fix-workspace-permissions
 
+    scm:
+      - git-scm-openstack
+
     builders:
       - description-setter:
           description: "Built on $NODE_NAME"
-      - 'osa-periodic-{phase}-macro'
+      - 'xci-osa-periodic-{phase}-macro'
 
 # -------------------------------
 # builder macros
 # -------------------------------
 - builder:
-    name: 'osa-periodic-deploy-macro'
+    name: 'xci-osa-periodic-deploy-macro'
     builders:
       - shell: |
           #!/bin/bash
 
-          # here we will
-          # - clone releng-xci repo as the jobs are running against openstack gerrit
-          #   and we need to clone releng-xci ourselves to $OPNFV_RELENG_DEV_PATH
-          # - run sources-branch-updater.sh from osa to update/pin the role versions
-          #   at the time this job gets triggered against osa master in case if the
-          #   deployment succeeds and we decide to bump version used by xci
-          # - copy generated role versions into $OPNFV_RELENG_DEV_PATH/xci/file
-          # - start the deployment by executing xci-deploy.sh as usual
-          #
-          # we might also need to pin versions of openstack services as well.
+          cd $WORKSPACE
+
+          # The start-new-vm.sh script will copy the entire releng-xci directory
+          # so lets prepare the test script now so it can be copied by the script.
+          # Please do not move it elsewhere or you would have to move it to the VM
+          # yourself.
+          cat > xci_test.sh<<EOF
+          #!/bin/bash
+          export DISTRO=$DISTRO
+          export DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+          export OPENSTACK_OSA_VERSION=$OPENSTACK_OSA_VERSION
+          export FUNCTEST_MODE=$FUNCTEST_MODE
+          export FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
+          export XCI_FLAVOR=$XCI_FLAVOR
+          export CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
+          export OPNFV_RELENG_DEV_PATH=/home/devuser/releng-xci/
+          export INSTALLER_TYPE=$INSTALLER_TYPE
+          export GIT_BASE=$GIT_BASE
+          export JENKINS_HOME=$JENKINS_HOME
+
+          cd xci
+          ./xci-deploy.sh
+          EOF
+          chmod a+x xci_test.sh
+
+          export XCI_BUILD_CLEAN_VM_OS=false
+          export XCI_UPDATE_CLEAN_VM_OS=true
+
+          ./xci/scripts/vm/start-new-vm.sh $DISTRO
+      - shell: |
+          #!/bin/bash
+
+          ssh -F $HOME/.ssh/xci-vm-config ${DISTRO}_xci_vm "cd releng-xci && ./xci_test.sh"
 
-          echo "Hello World!"
 
 - builder:
-    name: 'osa-periodic-healthcheck-macro'
+    name: 'xci-osa-periodic-healthcheck-macro'
     builders:
       - shell: |
           #!/bin/bash
 
           echo "Hello World!"
-# -------------------------------
-# scm macro
-# -------------------------------
-- scm:
-    name: git-scm-osa
-    scm:
-      - git:
-          url: https://review.openstack.org/p/openstack/openstack-ansible.git
-          branches:
-            - master
-          timeout: 15
+      - shell: |
+          #!/bin/bash
+
+          sudo virsh destroy ${DISTRO}_xci_vm
+          sudo virsh undefine ${DISTRO}_xci_vm
+
+# this will be enabled once the xci is prepared
+# - builder:
+#    name: 'xci-verify-healthcheck-macro'
+#    builders:
+#        - shell:
+#            !include-raw: ../../utils/fetch_os_creds.sh
+#        - shell:
+#            !include-raw: ../functest/functest-alpine.sh
diff --git a/jjb/xci/xci-cleanup.sh b/jjb/xci/xci-cleanup.sh
new file mode 100755 (executable)
index 0000000..45b4ec2
--- /dev/null
@@ -0,0 +1,24 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 SUSE and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+#----------------------------------------------------------------------
+# This script is used by CI and executed by Jenkins jobs.
+# You are not supposed to use this script manually if you don't know
+# what you are doing.
+#----------------------------------------------------------------------
+
+# skip the deployment if the patch doesn't impact the deployment
+if [[ "$GERRIT_TOPIC" =~ 'skip-verify' ]]; then
+    echo "Skipping the deployment!"
+    exit 0
+fi
+
+sudo virsh destroy ${DISTRO}_xci_vm
+sudo virsh undefine ${DISTRO}_xci_vm
diff --git a/jjb/xci/xci-merge-jobs.yml b/jjb/xci/xci-merge-jobs.yml
new file mode 100644 (file)
index 0000000..351fe22
--- /dev/null
@@ -0,0 +1,257 @@
+---
+- project:
+    name: 'opnfv-xci-merge'
+
+    # -------------------------------
+    # branches
+    # -------------------------------
+    stream:
+      - master:
+          branch: '{stream}'
+    # -------------------------------
+    # distros
+    # -------------------------------
+    distro:
+      - ubuntu:
+          disabled: false
+      - centos:
+          disabled: false
+      - opensuse:
+          disabled: false
+    # -------------------------------
+    # type
+    # -------------------------------
+    type:
+      - virtual
+    # -------------------------------
+    # patch verification phases
+    # -------------------------------
+    phase:
+      - 'deploy'
+      - 'healthcheck'
+      - 'promote'
+    # -------------------------------
+    # scenarios
+    # -------------------------------
+    scenario:
+      - 'os-odl-sfc':
+          project: sfc
+    # -------------------------------
+    # jobs
+    # -------------------------------
+    jobs:
+      - 'xci-{scenario}-{type}-{distro}-merge-{stream}'
+      - 'xci-{phase}-{type}-{distro}-merge-{stream}'
+# -------------------------------
+# job templates
+# -------------------------------
+- job-template:
+    name: 'xci-{scenario}-{type}-{distro}-merge-{stream}'
+
+    project-type: multijob
+
+    disabled: '{obj:disabled}'
+
+    concurrent: false
+
+    properties:
+      - logrotate-default
+      - build-blocker:
+          use-build-blocker: true
+          blocking-jobs:
+            - 'xci-verify-{distro}-.*'
+            - 'bifrost-verify-{distro}-.*'
+            - 'bifrost-periodic-{distro}-.*'
+            - 'xci-osa-verify-{distro}-.*'
+            - 'xci-osa-periodic-{distro}-.*'
+            - 'xci-(os|k8s).*?-virtual-{distro}-.*'
+          block-level: 'NODE'
+
+    wrappers:
+      - ssh-agent-wrapper
+      - build-timeout:
+          timeout: 240
+      - fix-workspace-permissions
+
+    scm:
+      - git:
+          credentials-id: '$SSH_CREDENTIAL_ID'
+          url: 'https://gerrit.opnfv.org/gerrit/releng-xci'
+          branches:
+            - 'origin/master'
+          timeout: 15
+
+    triggers:
+      - gerrit:
+          server-name: 'gerrit.opnfv.org'
+          trigger-on:
+            - change-merged-event
+            - comment-added-contains-event:
+                comment-contains-value: 'remerge'
+          projects:
+            - project-compare-type: 'ANT'
+              project-pattern: '{obj:project}'
+              branches:
+                - branch-compare-type: 'ANT'
+                  branch-pattern: '**/{branch}'
+              disable-strict-forbidden-file-verification: 'true'
+              file-paths:
+                - compare-type: ANT
+                  pattern: 'scenarios/**'
+          readable-message: true
+          custom-url: '* $JOB_NAME $BUILD_URL'
+
+    parameters:
+      - project-parameter:
+          project: '{obj:project}'
+          branch: '{branch}'
+      - label:
+          name: SLAVE_LABEL
+          default: 'xci-virtual'
+      - string:
+          name: DEPLOY_SCENARIO
+          default: '{scenario}'
+      - string:
+          name: CLEAN_DIB_IMAGES
+          default: 'true'
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+
+    builders:
+      - description-setter:
+          description: "Built on $NODE_NAME"
+      - multijob:
+          name: deploy
+          condition: SUCCESSFUL
+          projects:
+            - name: 'xci-deploy-{type}-{distro}-merge-{stream}'
+              current-parameters: true
+              predefined-parameters: |
+                PROJECT=$PROJECT
+                DISTRO={distro}
+                DEPLOY_SCENARIO={scenario}
+                CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+              node-parameters: true
+              kill-phase-on: FAILURE
+              abort-all-job: true
+      - multijob:
+          name: healthcheck
+          condition: SUCCESSFUL
+          projects:
+            - name: 'xci-healthcheck-{type}-{distro}-merge-{stream}'
+              current-parameters: true
+              predefined-parameters: |
+                PROJECT=$PROJECT
+                DISTRO={distro}
+                DEPLOY_SCENARIO={scenario}
+                CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
+                FUNCTEST_MODE=tier
+                FUNCTEST_TIER=healthcheck
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+              node-parameters: true
+              kill-phase-on: NEVER
+              abort-all-job: true
+
+- job-template:
+    name: 'xci-{phase}-{type}-{distro}-merge-{stream}'
+
+    disabled: false
+
+    concurrent: true
+
+    properties:
+      - logrotate-default
+      - build-blocker:
+          use-build-blocker: true
+          blocking-jobs:
+            - '.*-bifrost-verify-.*'
+            - '.*-bifrost-periodic-.*'
+            - 'osa-verify-.*'
+            - 'osa-periodic-.*'
+          block-level: 'NODE'
+
+    parameters:
+      - string:
+          name: PROJECT
+          default: 'releng-xci'
+      - string:
+          name: DISTRO
+          default: 'ubuntu'
+      - string:
+          name: DEPLOY_SCENARIO
+          default: 'os-nosdn-nofeature-noha'
+      - string:
+          name: FUNCTEST_MODE
+          default: 'tier'
+      - string:
+          name: FUNCTEST_SUITE_NAME
+          default: 'healthcheck'
+      - string:
+          name: XCI_FLAVOR
+          default: 'mini'
+      - string:
+          name: CLEAN_DIB_IMAGES
+          default: 'true'
+      - string:
+          name: OPNFV_RELENG_DEV_PATH
+          default: $WORKSPACE/
+      - string:
+          name: INSTALLER_TYPE
+          default: 'osa'
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+
+    wrappers:
+      - ssh-agent-wrapper
+      - build-timeout:
+          timeout: 240
+      - fix-workspace-permissions
+
+    scm:
+      - git:
+          credentials-id: '$SSH_CREDENTIAL_ID'
+          url: 'https://gerrit.opnfv.org/gerrit/releng-xci'
+          branches:
+            - 'origin/master'
+          timeout: 15
+
+    builders:
+      - description-setter:
+          description: "Built on $NODE_NAME"
+      - 'xci-merge-{phase}-macro'
+
+# -------------------------------
+# builder macros
+# -------------------------------
+- builder:
+    name: 'xci-merge-deploy-macro'
+    builders:
+      - shell:
+          !include-raw: ./xci-start-new-vm.sh
+      - shell:
+          !include-raw: ./xci-start-deployment.sh
+
+- builder:
+    name: 'xci-merge-healthcheck-macro'
+    builders:
+      - shell:
+          !include-raw: ./xci-run-functest.sh
+      - shell:
+          !include-raw: ./xci-cleanup.sh
+
+- builder:
+    name: 'xci-merge-promote-macro'
+    builders:
+      - shell:
+          !include-raw: ./xci-promote.sh
diff --git a/jjb/xci/xci-promote.sh b/jjb/xci/xci-promote.sh
new file mode 100755 (executable)
index 0000000..98ad7ff
--- /dev/null
@@ -0,0 +1,30 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 Ericsson and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+#----------------------------------------------------------------------
+# This script is used by CI and executed by Jenkins jobs.
+# You are not supposed to use this script manually if you don't know
+# what you are doing.
+#----------------------------------------------------------------------
+
+# skip the healthcheck if the patch doesn't impact the deployment
+if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]]; then
+    echo "Skipping the healthcheck!"
+    exit 0
+fi
+
+# skip the deployment if the scenario is not supported on this distro
+OPNFV_SCENARIO_REQUIREMENTS=$WORKSPACE/xci/opnfv-scenario-requirements.yml
+if ! sed -n "/^- scenario: $DEPLOY_SCENARIO$/,/^$/p" $OPNFV_SCENARIO_REQUIREMENTS | grep -q $DISTRO; then
+    echo "# SKIPPED: Scenario $DEPLOY_SCENARIO is NOT supported on $DISTRO"
+    exit 0
+fi
+
+echo "Hello World!"
diff --git a/jjb/xci/xci-run-functest.sh b/jjb/xci/xci-run-functest.sh
new file mode 100755 (executable)
index 0000000..b2421de
--- /dev/null
@@ -0,0 +1,30 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 SUSE and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+#----------------------------------------------------------------------
+# This script is used by CI and executed by Jenkins jobs.
+# You are not supposed to use this script manually if you don't know
+# what you are doing.
+#----------------------------------------------------------------------
+
+# skip the healthcheck if the patch doesn't impact the deployment
+if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]]; then
+    echo "Skipping the healthcheck!"
+    exit 0
+fi
+
+# skip the deployment if the scenario is not supported on this distro
+OPNFV_SCENARIO_REQUIREMENTS=$WORKSPACE/xci/opnfv-scenario-requirements.yml
+if ! sed -n "/^- scenario: $DEPLOY_SCENARIO$/,/^$/p" $OPNFV_SCENARIO_REQUIREMENTS | grep -q $DISTRO; then
+    echo "# SKIPPED: Scenario $DEPLOY_SCENARIO is NOT supported on $DISTRO"
+    exit 0
+fi
+
+ssh -F $HOME/.ssh/xci-vm-config ${DISTRO}_xci_vm "cd releng-xci/xci && PATH=/home/devuser/.local/bin:$PATH ansible-playbook -i installer/osa/files/$XCI_FLAVOR/inventory playbooks/run-functest.yml"
diff --git a/jjb/xci/xci-start-deployment.sh b/jjb/xci/xci-start-deployment.sh
new file mode 100755 (executable)
index 0000000..e86f5b3
--- /dev/null
@@ -0,0 +1,30 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 SUSE and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+#----------------------------------------------------------------------
+# This script is used by CI and executed by Jenkins jobs.
+# You are not supposed to use this script manually if you don't know
+# what you are doing.
+#----------------------------------------------------------------------
+
+# skip the deployment if the patch doesn't impact the deployment
+if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]]; then
+    echo "Skipping the deployment!"
+    exit 0
+fi
+
+# skip the deployment if the scenario is not supported on this distro
+OPNFV_SCENARIO_REQUIREMENTS=$WORKSPACE/xci/opnfv-scenario-requirements.yml
+if ! sed -n "/^- scenario: $DEPLOY_SCENARIO$/,/^$/p" $OPNFV_SCENARIO_REQUIREMENTS | grep -q $DISTRO; then
+    echo "# SKIPPED: Scenario $DEPLOY_SCENARIO is NOT supported on $DISTRO"
+    exit 0
+fi
+
+ssh -F $HOME/.ssh/xci-vm-config ${DISTRO}_xci_vm "cd releng-xci && ./xci_test.sh"
diff --git a/jjb/xci/xci-start-new-vm.sh b/jjb/xci/xci-start-new-vm.sh
new file mode 100755 (executable)
index 0000000..b72c339
--- /dev/null
@@ -0,0 +1,63 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 SUSE and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+#----------------------------------------------------------------------
+# This script is used by CI and executed by Jenkins jobs.
+# You are not supposed to use this script manually if you don't know
+# what you are doing.
+#----------------------------------------------------------------------
+
+# skip the deployment if the patch doesn't impact the deployment
+if [[ "$GERRIT_TOPIC" =~ 'skip-verify' ]]; then
+    echo "Skipping the deployment!"
+    exit 0
+fi
+
+# skip the deployment if the scenario is not supported on this distro
+OPNFV_SCENARIO_REQUIREMENTS=$WORKSPACE/xci/opnfv-scenario-requirements.yml
+if ! sed -n "/^- scenario: $DEPLOY_SCENARIO$/,/^$/p" $OPNFV_SCENARIO_REQUIREMENTS | grep -q $DISTRO; then
+    echo "# SKIPPED: Scenario $DEPLOY_SCENARIO is NOT supported on $DISTRO"
+    exit 0
+fi
+
+cd $WORKSPACE
+
+# The start-new-vm.sh script will copy the entire releng-xci directory
+# so lets prepare the test script now so it can be copied by the script.
+# Please do not move it elsewhere or you would have to move it to the VM
+# yourself.
+cat > xci_test.sh<<EOF
+#!/bin/bash
+export DISTRO=$DISTRO
+export DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+export FUNCTEST_MODE=$FUNCTEST_MODE
+export FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
+export XCI_FLAVOR=$XCI_FLAVOR
+export CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
+export OPNFV_RELENG_DEV_PATH=/home/devuser/releng-xci/
+export INSTALLER_TYPE=$INSTALLER_TYPE
+export GIT_BASE=$GIT_BASE
+export JENKINS_HOME=$JENKINS_HOME
+
+if [[ ! -z ${WORKSPACE+x} && $GERRIT_PROJECT != "releng-xci" ]]; then
+    git clone https://gerrit.opnfv.org/gerrit/$GERRIT_PROJECT xci/scenarios/$DEPLOY_SCENARIO && cd xci/scenarios/$DEPLOY_SCENARIO
+    git fetch https://gerrit.opnfv.org/gerrit/$GERRIT_PROJECT $GERRIT_REFSPEC && git checkout FETCH_HEAD
+    cd -
+fi
+
+cd xci
+./xci-deploy.sh
+EOF
+chmod a+x xci_test.sh
+
+export XCI_BUILD_CLEAN_VM_OS=false
+export XCI_UPDATE_CLEAN_VM_OS=true
+
+./xci/scripts/vm/start-new-vm.sh $DISTRO
index 93ca187..3c31456 100644 (file)
@@ -68,8 +68,8 @@
             - 'xci-verify-{distro}-.*'
             - 'bifrost-verify-{distro}-.*'
             - 'bifrost-periodic-{distro}-.*'
-            - 'osa-verify-{distro}-.*'
-            - 'osa-periodic-{distro}-.*'
+            - 'xci-osa-verify-{distro}-.*'
+            - 'xci-osa-periodic-{distro}-.*'
           block-level: 'NODE'
       - throttle:
           max-per-node: 2
               branches:
                 - branch-compare-type: 'ANT'
                   branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              file-paths:
-                - compare-type: ANT
-                  pattern: 'bifrost/**'
-                - compare-type: ANT
-                  pattern: 'xci/**'
+              disable-strict-forbidden-file-verification: 'false'
               forbidden-file-paths:
                 - compare-type: ANT
-                  pattern: 'prototypes/**'
-                - compare-type: ANT
-                  pattern: 'upstream/**'
-                - compare-type: ANT
-                  pattern: '**/README.rst'
-                - compare-type: ANT
-                  pattern: 'docs/**'
+                  pattern: 'xci/scripts/vm/**'
           readable-message: true
+          custom-url: '* $JOB_NAME $BUILD_URL'
           skip-vote:
             successful: '{obj:successful}'
             failed: '{obj:failed}'
               current-parameters: true
               predefined-parameters: |
                 DISTRO={distro}
-                DEPLOY_SCENARIO=os-nosdn-nofeature-noha
+                DEPLOY_SCENARIO=os-nosdn-nofeature
                 CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
                 GERRIT_BRANCH=$GERRIT_BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
               current-parameters: true
               predefined-parameters: |
                 DISTRO={distro}
-                DEPLOY_SCENARIO=os-nosdn-nofeature-noha
+                DEPLOY_SCENARIO=os-nosdn-nofeature
                 CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
                 FUNCTEST_MODE=tier
                 FUNCTEST_TIER=healthcheck
       - build-blocker:
           use-build-blocker: true
           blocking-jobs:
-            - 'bifrost-verify-.*'
-            - 'bifrost-periodic-.*'
+            - '.*-bifrost-verify-.*'
+            - '.*-bifrost-periodic-.*'
             - 'osa-verify-.*'
             - 'osa-periodic-.*'
           block-level: 'NODE'
           default: 'ubuntu'
       - string:
           name: DEPLOY_SCENARIO
-          default: 'os-nosdn-nofeature-noha'
+          default: 'os-nosdn-nofeature'
       - string:
           name: FUNCTEST_MODE
           default: 'tier'
 - builder:
     name: 'xci-verify-deploy-macro'
     builders:
-      - shell: |
-          #!/bin/bash
-
-          # skip the deployment if the patch doesn't impact the deployment
-          if [[ "$GERRIT_TOPIC" =~ 'skip-verify' ]]; then
-              echo "Skipping the deployment!"
-              exit 0
-          fi
-
-          cd $WORKSPACE
-
-          # The start-new-vm.sh script will copy the entire releng-xci directory
-          # so lets prepare the test script now so it can be copied by the script.
-          # Please do not move it elsewhere or you would have to move it to the VM
-          # yourself.
-          cat > xci_test.sh<<EOF
-          #!/bin/bash
-          export DISTRO=$DISTRO
-          export DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-          export FUNCTEST_MODE=$FUNCTEST_MODE
-          export FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
-          export XCI_FLAVOR=$XCI_FLAVOR
-          export CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
-          export OPNFV_RELENG_DEV_PATH=/home/devuser/releng-xci/
-          export INSTALLER_TYPE=$INSTALLER_TYPE
-          export GIT_BASE=$GIT_BASE
-          export JENKINS_HOME=$JENKINS_HOME
-
-          cd xci
-          ./xci-deploy.sh
-          EOF
-          chmod a+x xci_test.sh
-
-          export XCI_BUILD_CLEAN_VM_OS=false
-          export XCI_UPDATE_CLEAN_VM_OS=true
-
-          ./xci/scripts/vm/start-new-vm.sh $DISTRO
-      - shell: |
-          #!/bin/bash
-
-          # skip the deployment if the patch doesn't impact the deployment
-          if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]]; then
-              echo "Skipping the deployment!"
-              exit 0
-          fi
-
-          ssh -F $HOME/.ssh/xci-vm-config ${DISTRO}_xci_vm "cd releng-xci && ./xci_test.sh"
+      - shell:
+          !include-raw: ./xci-start-new-vm.sh
 
+      - shell:
+          !include-raw: ./xci-start-deployment.sh
 
 - builder:
     name: 'xci-verify-healthcheck-macro'
     builders:
-      - shell: |
-          #!/bin/bash
-
-          # skip the healthcheck if the patch doesn't impact the deployment
-          if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]]; then
-              echo "Skipping the healthcheck!"
-              exit 0
-          fi
-
-          echo "Hello World!"
-      - shell: |
-          #!/bin/bash
-
-          # skip the deployment if the patch doesn't impact the deployment
-          if [[ "$GERRIT_TOPIC" =~ 'skip-verify' ]]; then
-              echo "Skipping the deployment!"
-              exit 0
-          fi
-
-          sudo virsh destroy ${DISTRO}_xci_vm
-          sudo virsh undefine ${DISTRO}_xci_vm
-
-# this will be enabled once the xci is prepared
-# - builder:
-#    name: 'xci-verify-healthcheck-macro'
-#    builders:
-#        - shell:
-#            !include-raw: ../../utils/fetch_os_creds.sh
-#        - shell:
-#            !include-raw: ../functest/functest-alpine.sh
+      - shell:
+          !include-raw: ./xci-cleanup.sh
index 4b8c5d2..f0e4f8e 100755 (executable)
@@ -22,7 +22,7 @@ fi
 
 if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
     if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
-        rc_file_vol="-v /home/ubuntu/config:/etc/yardstick/admin.conf"
+        rc_file_vol="-v ${HOME}/admin.conf:/etc/yardstick/admin.conf"
     else
         # If production lab then creds may be retrieved dynamically
         # creds are on the jumphost, always in the same folder
@@ -31,8 +31,12 @@ if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
         # replace the default one by the customized one provided by jenkins config
     fi
 elif [[ ${INSTALLER_TYPE} == 'compass' ]]; then
-    cacert_file_vol="-v ${HOME}/os_cacert:/etc/yardstick/os_cacert"
-    echo "export OS_CACERT=/etc/yardstick/os_cacert" >> ${HOME}/opnfv-openrc.sh
+    if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
+        rc_file_vol="-v ${HOME}/admin.conf:/etc/yardstick/admin.conf"
+    else
+        cacert_file_vol="-v ${HOME}/os_cacert:/etc/yardstick/os_cacert"
+        echo "export OS_CACERT=/etc/yardstick/os_cacert" >> ${HOME}/opnfv-openrc.sh
+    fi
 elif [[ ${INSTALLER_TYPE} == 'fuel' ]]; then
     cacert_file_vol="-v ${HOME}/os_cacert:/etc/ssl/certs/mcp_os_cacert"
     sshkey="-v ${SSH_KEY}:/root/.ssh/mcp.rsa"
@@ -68,13 +72,8 @@ sudo rm -rf ${dir_result}/*
 map_log_dir="-v ${dir_result}:/tmp/yardstick"
 
 # Run docker
-if [[ ${INSTALLER_TYPE} == "joid" && "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
-    juju ssh kubernetes-master/0 sudo apt-get install -y docker.io
-    cmd="juju ssh kubernetes-master/0 sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} ${DOCKER_REPO}:${DOCKER_TAG} exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
-else
-    cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} ${DOCKER_REPO}:${DOCKER_TAG} \
-    exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
-fi
+cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} ${DOCKER_REPO}:${DOCKER_TAG} \
+exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
 
 echo "Yardstick: Running docker cmd: ${cmd}"
 ${cmd}
index e93367f..22a6aa4 100755 (executable)
@@ -4,5 +4,9 @@ set -e
 dest_path="$HOME/admin.conf"
 
 if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
-    juju scp kubernetes-master/0:config "${dest_path}"
+    if [[ ${INSTALLER_TYPE} == 'joid' ]];then
+        juju scp kubernetes-master/0:config "${dest_path}"
+    elif [[ ${INSTALLER_TYPE} == 'compass' ]];then
+        docker cp compass-tasks:/opt/admin.conf "${dest_path}"
+    fi
 fi
index 7486d8a..e2fee29 100644 (file)
           fail: true
 
     builders:
-      - yardstick-unit-tests-and-docs-build
+      - yardstick-unit-tests-python-27
+      - yardstick-unit-tests-python-3
+      - yardstick-functional-tests-python-27
+      - yardstick-functional-tests-python-3
+      - yardstick-coverage-tests
+      - yardstick-pep8-tests
 
 - job-template:
     name: 'yardstick-merge-{stream}'
           fail: true
 
     builders:
-      - yardstick-unit-tests-and-docs-build
+      - yardstick-unit-tests-python-27
+      - yardstick-unit-tests-python-3
+      - yardstick-functional-tests-python-27
+      - yardstick-functional-tests-python-3
+      - yardstick-coverage-tests
+      - yardstick-pep8-tests
 
 ################################
 # job builders
 ################################
 
 - builder:
-    name: yardstick-unit-tests-and-docs-build
+    name: yardstick-unit-tests-python-27
     builders:
       - shell: |
           #!/bin/bash
 
           sudo apt-get install -y build-essential python-dev python3-dev
 
-          echo "Running unit tests..."
+          echo "Running unit tests in Python 2.7 ..."
           cd $WORKSPACE
-          tox
+          tox -epy27
+
+- builder:
+    name: yardstick-unit-tests-python-3
+    builders:
+      - shell: |
+          #!/bin/bash
+          set -o errexit
+          set -o pipefail
+
+          sudo apt-get install -y build-essential python-dev python3-dev
+
+          echo "Running unit tests in Python 3 ..."
+          cd $WORKSPACE
+          tox -epy3
+
+- builder:
+    name: yardstick-functional-tests-python-27
+    builders:
+      - shell: |
+          #!/bin/bash
+          set -o errexit
+          set -o pipefail
+
+          sudo apt-get install -y build-essential python-dev python3-dev
+
+          echo "Running functional tests in Python 2.7 ..."
+          cd $WORKSPACE
+          tox -efunctional
+
+- builder:
+    name: yardstick-functional-tests-python-3
+    builders:
+      - shell: |
+          #!/bin/bash
+          set -o errexit
+          set -o pipefail
+
+          sudo apt-get install -y build-essential python-dev python3-dev
+
+          echo "Running functional tests in Python 3 ..."
+          cd $WORKSPACE
+          tox -efunctional-py3
+
+- builder:
+    name: yardstick-coverage-tests
+    builders:
+      - shell: |
+          #!/bin/bash
+          set -o errexit
+          set -o pipefail
+
+          sudo apt-get install -y build-essential python-dev python3-dev
+
+          echo "Running coverage tests ..."
+          cd $WORKSPACE
+          tox -ecoverage
+
+- builder:
+    name: yardstick-pep8-tests
+    builders:
+      - shell: |
+          #!/bin/bash
+          set -o errexit
+          set -o pipefail
+
+          sudo apt-get install -y build-essential python-dev python3-dev
+
+          echo "Running style guidelines (PEP8) tests ..."
+          cd $WORKSPACE
+          tox -epep8
diff --git a/modules/opnfv/deployment/daisy/__init__.py b/modules/opnfv/deployment/daisy/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/modules/opnfv/deployment/daisy/adapter.py b/modules/opnfv/deployment/daisy/adapter.py
new file mode 100644 (file)
index 0000000..5634e24
--- /dev/null
@@ -0,0 +1,202 @@
+##############################################################################
+# Copyright (c) 2017 ZTE Corporation and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+from opnfv.deployment import manager
+from opnfv.utils import opnfv_logger as logger
+from opnfv.utils import ssh_utils
+
+logger = logger.Logger(__name__).getLogger()
+
+
+class DaisyAdapter(manager.DeploymentHandler):
+
+    def __init__(self, installer_ip, installer_user, installer_pwd):
+        super(DaisyAdapter, self).__init__(installer='daisy',
+                                           installer_ip=installer_ip,
+                                           installer_user=installer_user,
+                                           installer_pwd=installer_pwd,
+                                           pkey_file=None)
+
+    def _get_clusters(self):
+        clusters = []
+        cmd = 'source /root/daisyrc_admin; daisy cluster-list | grep -v "+--"'
+        output = self.installer_node.run_cmd(cmd)
+        lines = output.rsplit('\n')
+        if len(lines) < 2:
+            logger.info("No environments found in the deployment.")
+            return None
+        else:
+            fields = lines[0].rsplit('|')
+
+            index_id = -1
+            index_status = -1
+            index_name = -1
+            index_nodes = -1
+
+            for i in range(len(fields)):
+                if "ID" in fields[i]:
+                    index_id = i
+                elif "Status" in fields[i]:
+                    index_status = i
+                elif "Name" in fields[i]:
+                    index_name = i
+                elif "Nodes" in fields[i]:
+                    index_nodes = i
+
+            # order env info
+            for i in range(1, len(lines)):
+                fields = lines[i].rsplit('|')
+                dict = {"id": fields[index_id].strip(),
+                        "status": fields[index_status].strip(),
+                        "name": fields[index_name].strip(),
+                        "nodes": fields[index_nodes].strip()}
+                clusters.append(dict)
+
+        return clusters
+
+    def get_nodes(self, options=None):
+        if hasattr(self, 'nodes') and len(self.nodes) > 0:
+            if options and 'cluster' in options and options['cluster']:
+                nodes = []
+                for node in self.nodes:
+                    if str(node.info['cluster']) == str(options['cluster']):
+                        nodes.append(node)
+                return nodes
+            else:
+                return self.nodes
+
+        clusters = self._get_clusters()
+        nodes = []
+        for cluster in clusters:
+            if options and 'cluster' in options and options['cluster']:
+                if cluster["id"] != options['cluster']:
+                    continue
+            cmd = 'source /root/daisyrc_admin; daisy host-list ' \
+                  '--cluster-id {} | grep -v "+--"'.format(cluster["id"])
+            output = self.installer_node.run_cmd(cmd)
+            lines = output.rsplit('\n')
+            if len(lines) < 2:
+                logger.info("No nodes found in the cluster {}".format(
+                    cluster["id"]))
+                continue
+
+            fields = lines[0].rsplit('|')
+            index_id = -1
+            index_status = -1
+            index_name = -1
+
+            for i in range(len(fields)):
+                if "ID" in fields[i]:
+                    index_id = i
+                elif "Role_status" in fields[i]:
+                    index_status = i
+                elif "Name" in fields[i]:
+                    index_name = i
+
+            for i in range(1, len(lines)):
+                fields = lines[i].rsplit('|')
+                id = fields[index_id].strip().encode()
+                status_node = fields[index_status].strip().encode().lower()
+                name = fields[index_name].strip().encode()
+                ip = ".".join(name.split("-")[1:])
+
+                cmd_role = 'source /root/daisyrc_admin; ' \
+                           'daisy host-detail {} | grep "^| role"'.format(id)
+                output_role = self.installer_node.run_cmd(cmd_role)
+                role_all = output_role.rsplit('|')[2].strip().encode()
+                roles = []
+                if 'COMPUTER' in role_all:
+                    roles.append(manager.Role.COMPUTE)
+                if 'CONTROLLER_LB' in role_all or 'CONTROLLER_HA' in role_all:
+                    roles.append(manager.Role.CONTROLLER)
+
+                ssh_client = None
+                if status_node == 'active':
+                    status = manager.NodeStatus.STATUS_OK
+                    proxy = {'ip': self.installer_ip,
+                             'username': self.installer_user,
+                             'password': self.installer_pwd,
+                             'pkey_file': '/root/.ssh/id_dsa'}
+                    ssh_client = ssh_utils.get_ssh_client(hostname=ip,
+                                                          username='root',
+                                                          proxy=proxy)
+                else:
+                    status = manager.NodeStatus.STATUS_INACTIVE
+
+                node = DaisyNode(id, ip, name, status, roles, ssh_client)
+                nodes.append(node)
+        return nodes
+
+    def get_openstack_version(self):
+        cmd = 'docker exec nova_api nova-manage version 2>/dev/null'
+        version = None
+        for node in self.nodes:
+            if node.is_controller() and node.is_active():
+                version = node.run_cmd(cmd)
+                break
+        return version
+
+    def get_sdn_version(self):
+        version = None
+        for node in self.nodes:
+            if manager.Role.CONTROLLER in node.roles and node.is_active():
+                cmd = 'docker inspect --format=\'{{.Name}}\' `docker ps -q`'
+                output = node.run_cmd(cmd)
+                if '/opendaylight' in output.rsplit('\n'):
+                    cmd2 = 'docker exec opendaylight ' \
+                           'sudo yum info opendaylight 2>/dev/null ' \
+                           '| grep Version | tail -1'
+                    odl_ver = node.run_cmd(cmd2)
+                    if odl_ver:
+                        version = 'OpenDaylight: ' + odl_ver.split(' ')[-1]
+                    break
+        return version
+
+    def get_deployment_status(self):
+        clusters = self._get_clusters()
+        if clusters is None or len(clusters) == 0:
+            return 'unknown'
+        else:
+            return clusters[0]['status']
+
+
+class DaisyNode(manager.Node):
+
+    def __init__(self,
+                 id,
+                 ip,
+                 name,
+                 status,
+                 roles=None,
+                 ssh_client=None,
+                 info=None):
+        super(DaisyNode, self).__init__(id, ip, name, status,
+                                        roles, ssh_client, info)
+
+    def is_odl(self):
+        '''
+        Returns if the node is an opendaylight
+        '''
+        if manager.Role.CONTROLLER in self.roles and self.is_active():
+            cmd = 'docker inspect --format=\'{{.Name}}\' `docker ps -q`'
+            output = self.run_cmd(cmd)
+            if '/opendaylight' in output.rsplit('\n'):
+                return True
+        return False
+
+    def get_ovs_info(self):
+        '''
+        Returns the ovs version installed
+        '''
+        if self.is_active():
+            cmd = 'docker exec openvswitch_vswitchd ' \
+                  'ovs-vsctl --version | head -1 | awk \'{print $NF}\''
+            return self.run_cmd(cmd)
+        return None
index e14783f..2788e5e 100644 (file)
@@ -12,6 +12,7 @@ from opnfv.deployment.apex import adapter as apex_adapter
 from opnfv.deployment.compass import adapter as compass_adapter
 from opnfv.deployment.fuel import adapter as fuel_adapter
 from opnfv.deployment.osa import adapter as osa_adapter
+from opnfv.deployment.daisy import adapter as daisy_adapter
 from opnfv.utils import opnfv_logger as logger
 
 logger = logger.Logger(__name__).getLogger()
@@ -51,6 +52,10 @@ class Factory(object):
             return osa_adapter.OSAAdapter(installer_ip=installer_ip,
                                           installer_user=installer_user,
                                           pkey_file=pkey_file)
+        elif installer.lower() == "daisy":
+            return daisy_adapter.DaisyAdapter(installer_ip=installer_ip,
+                                              installer_user=installer_user,
+                                              installer_pwd=installer_pwd)
         else:
             raise Exception("Installer adapter is not implemented for "
                             "the given installer.")
index 4c5ff5c..175a380 100644 (file)
@@ -49,9 +49,11 @@ def get_ssh_client(hostname,
             client = paramiko.SSHClient()
         else:
             client = ProxyHopClient()
+            proxy_pkey_file = proxy.get('pkey_file', '/root/.ssh/id_rsa')
             client.configure_jump_host(proxy['ip'],
                                        proxy['username'],
-                                       proxy['password'])
+                                       proxy['password'],
+                                       proxy_pkey_file)
         if client is None:
             raise Exception('Could not connect to client')
 
@@ -115,6 +117,8 @@ class ProxyHopClient(paramiko.SSHClient):
                             jh_ssh_key='/root/.ssh/id_rsa'):
         self.proxy_ip = jh_ip
         self.proxy_ssh_key = jh_ssh_key
+        self.local_ssh_key = os.path.join(os.getcwd(),
+                                          jh_ssh_key.split('/')[-1])
         self.proxy_ssh = paramiko.SSHClient()
         self.proxy_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
         self.proxy_ssh.connect(jh_ip,
@@ -138,8 +142,12 @@ class ProxyHopClient(paramiko.SSHClient):
                                     self.local_ssh_key)
             if get_file_res is None:
                 raise Exception('Could\'t fetch SSH key from jump host')
-            proxy_key = (paramiko.RSAKey
-                         .from_private_key_file(self.local_ssh_key))
+            if self.proxy_ssh_key.split('/')[-1] == 'id_dsa':
+                proxy_key = (paramiko.DSSKey
+                             .from_private_key_file(self.local_ssh_key))
+            else:
+                proxy_key = (paramiko.RSAKey
+                             .from_private_key_file(self.local_ssh_key))
 
             self.proxy_channel = self.proxy_transport.open_channel(
                 "direct-tcpip",
diff --git a/utils/build-server-ansible/inventory.ini b/utils/build-server-ansible/inventory.ini
new file mode 100644 (file)
index 0000000..115b130
--- /dev/null
@@ -0,0 +1,8 @@
+#############################################################################
+# Copyright (c) 2016 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+localhost              ansible_connection=local
diff --git a/utils/build-server-ansible/main.yml b/utils/build-server-ansible/main.yml
new file mode 100644 (file)
index 0000000..0fcce71
--- /dev/null
@@ -0,0 +1,37 @@
+############################################################################
+# Copyright (c) 2016 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#############################################################################
+---
+- hosts: "localhost"
+  become: "True"
+  tasks:
+    - debug:
+        msg: "{{ inventory_hostname }} is {{ ansible_distribution }}"
+    - include_vars: vars/defaults.yml
+    - include: vars/CentOS.yml
+      when: ansible_distribution == "CentOS"
+    - include: vars/Ubuntu.yml
+      when: ansible_distribution == "Ubuntu"
+    - name: Install Docker.
+      package: name={{ docker_package }} state={{ docker_package_state }}
+    - name: Ensure Docker is started and enabled at boot.
+      service:
+        name: docker
+        state: started
+        enabled: "yes"
+    - name: install gsutil
+      pip:
+        name: gsutil
+        state: present
+    - name: install tox
+      pip:
+        name: tox
+        state: present
+    - include: vars/docker-compose-CentOS.yml
+      when: ansible_distribution == "CentOS"
+    - include: vars/docker-compose-Ubuntu.yml
+      when: ansible_distribution == "Ubuntu"
diff --git a/utils/build-server-ansible/vars/CentOS.yml b/utils/build-server-ansible/vars/CentOS.yml
new file mode 100644 (file)
index 0000000..0d5a011
--- /dev/null
@@ -0,0 +1,72 @@
+############################################################################
+# Copyright (c) 2016 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+############################################################################
+---
+- name: Ensure old versions of Docker are not installed.
+  package:
+    name: '{{ item }}'
+    state: absent
+  with_items:
+    - docker
+    - docker-common
+    - docker-engine
+
+- name: Add Docker GPG key.
+  rpm_key:
+    key: https://download.docker.com/linux/centos/gpg
+    state: present
+
+- name: Ensure epel is installed.
+  yum:
+    name: epel-release
+    state: present
+- name: Ensure depdencies are installed.
+  yum:
+    name: "{{ item }}"
+    state: present
+  with_items:
+    - python-pip
+    - rpm-build
+    - kernel-headers
+    - libpcap-devel
+    - zlib-devel
+    - numactl-devel
+    - doxygen
+    - python-sphinx
+    - libvirt-devel
+    - python-devel
+    - openssl-devel
+    - python-six
+    - net-tools
+    - bc
+
+- name: install the 'Development tools' package group
+  yum:
+    name: "@Development tools"
+    state: present
+
+- name: Add Docker repository.
+  get_url:
+    url: "{{ docker_yum_repo_url }}"
+    dest: '/etc/yum.repos.d/docker-ce.repo'
+    owner: root
+    group: root
+    mode: 0644
+
+- name: Configure Docker Edge repo.
+  ini_file:
+    dest: '/etc/yum.repos.d/docker-ce.repo'
+    section: 'docker-ce-edge'
+    option: enabled
+    value: '{{ docker_yum_repo_enable_edge }}'
+
+- name: Configure Docker Test repo.
+  ini_file:
+    dest: '/etc/yum.repos.d/docker-ce.repo'
+    section: 'docker-ce-test'
+    option: enabled
+    value: '{{ docker_yum_repo_enable_test }}'
diff --git a/utils/build-server-ansible/vars/Ubuntu.yml b/utils/build-server-ansible/vars/Ubuntu.yml
new file mode 100644 (file)
index 0000000..609c8d5
--- /dev/null
@@ -0,0 +1,84 @@
+#############################################################################
+# Copyright (c) 2016 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#############################################################################
+---
+- name: Ensure old versions of Docker are not installed.
+  package:
+    name: '{{ item }}'
+    state: absent
+  with_items:
+    - docker
+    - docker-engine
+
+- name: Ensure depdencies are installed.
+  apt:
+    name: "{{ item }}"
+    state: present
+  with_items:
+    - apt-transport-https
+    - ca-certificates
+    - git
+    - build-essential
+    - curl
+    - wget
+    - rpm
+    - fuseiso
+    - createrepo
+    - genisoimage
+    - libfuse-dev
+    - dh-autoreconf
+    - pkg-config
+    - zlib1g-dev
+    - libglib2.0-dev
+    - libpixman-1-dev
+    - python-virtualenv
+    - python-dev
+    - libffi-dev
+    - libssl-dev
+    - libxml2-dev
+    - libxslt1-dev
+    - bc
+    - qemu-kvm
+    - libvirt-bin
+    - ubuntu-vm-builder
+    - bridge-utils
+    - monit
+    - openjdk-8-jre-headless
+    - python-nose
+    - dirmngr
+    - collectd
+    - flex
+    - bison
+    - libnuma-dev
+    - shellcheck
+    - python-pip
+
+- name: Add Docker apt key.
+  apt_key:
+    url: https://download.docker.com/linux/ubuntu/gpg
+    id: 9DC858229FC7DD38854AE2D88D81803C0EBFCD88
+    state: present
+  register: add_repository_key
+  ignore_errors: true
+
+- name: Ensure curl is present (on older systems without SNI).
+  package: name=curl state=present
+  when: add_repository_key|failed
+
+- name: Add Docker apt key (alternative for older systems without SNI).
+  # yamllint disable rule:line-length
+  shell: "curl -sSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -"
+  # yamllint enable rule:line-length
+  args:
+    warn: "no"
+  when: add_repository_key|failed
+
+- name: Add Docker repository.
+  apt_repository:
+    repo: "{{ docker_apt_repository }}"
+    state: present
+    update_cache: "yes"
diff --git a/utils/build-server-ansible/vars/defaults.yml b/utils/build-server-ansible/vars/defaults.yml
new file mode 100644 (file)
index 0000000..8d83380
--- /dev/null
@@ -0,0 +1,23 @@
+#############################################################################
+# Copyright (c) 2016 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#############################################################################
+---
+docker_package: "docker-ce"
+docker_package_state: present
+
+# Used only for Debian/Ubuntu. Switch 'stable' to 'edge' if needed.
+docker_apt_release_channel: stable
+# yamllint disable rule:line-length
+docker_apt_repository: "deb https://download.docker.com/linux/{{ ansible_distribution|lower }} {{ ansible_distribution_release }} {{ docker_apt_release_channel }}"
+# yamllint enable rule:line-length
+
+# Used only for RedHat/CentOS.
+# yamllint disable rule:line-length
+docker_yum_repo_url: https://download.docker.com/linux/centos/docker-ce.repo
+# yamllint enable rule:line-length
+docker_yum_repo_enable_edge: 0
+docker_yum_repo_enable_test: 0
diff --git a/utils/build-server-ansible/vars/docker-compose-Centos.yml b/utils/build-server-ansible/vars/docker-compose-Centos.yml
new file mode 100644 (file)
index 0000000..fc4bcba
--- /dev/null
@@ -0,0 +1,12 @@
+#############################################################################
+# Copyright (c) 2016 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+- name: Ensure docker compose is installed.
+  yum:
+    name: 'docker-compose'
+    state: present
diff --git a/utils/build-server-ansible/vars/docker-compose-Ubuntu.yml b/utils/build-server-ansible/vars/docker-compose-Ubuntu.yml
new file mode 100644 (file)
index 0000000..f985b6a
--- /dev/null
@@ -0,0 +1,12 @@
+#############################################################################
+# Copyright (c) 2016 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#############################################################################
+---
+- name: Ensure docker compose is installed
+  apt:
+    name: 'docker-compose'
+    state: present
index def5ecc..a60ece4 100644 (file)
@@ -92,6 +92,9 @@ def create_file(handler, INSTALLER_TYPE):
     if args.INSTALLER_TYPE == 'compass':
         for item in node_list:
             item['password'] = 'root'
+    elif args.INSTALLER_TYPE == 'daisy':
+        for item in node_list:
+            item['key_filename'] = '/root/.ssh/id_dsa'
     else:
         for item in node_list:
             item['key_filename'] = args.sshkey
diff --git a/utils/fetch_k8_conf.sh b/utils/fetch_k8_conf.sh
new file mode 100755 (executable)
index 0000000..f82fa54
--- /dev/null
@@ -0,0 +1,63 @@
+#!/bin/bash
+##############################################################################
+# Copyright (c) 2018 Huawei and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+set -o errexit
+set -o nounset
+set -o pipefail
+
+info ()  {
+    logger -s -t "fetch_k8_conf.info" "$*"
+}
+
+
+error () {
+    logger -s -t "fetch_k8_conf.error" "$*"
+    exit 1
+}
+
+: ${DEPLOY_TYPE:=''}
+
+#Get options
+while getopts ":d:i:a:h:s:o:v" optchar; do
+    case "${optchar}" in
+        d) dest_path=${OPTARG} ;;
+        i) installer_type=${OPTARG} ;;
+        v) DEPLOY_TYPE="virt" ;;
+        *) echo "Non-option argument: '-${OPTARG}'" >&2
+           usage
+           exit 2
+           ;;
+    esac
+done
+
+# set vars from env if not provided by user as options
+dest_path=${dest_path:-$HOME/admin.conf}
+installer_type=${installer_type:-$INSTALLER_TYPE}
+
+if [ -z $dest_path ] || [ -z $installer_type ]; then
+    usage
+    exit 2
+fi
+
+# Checking if destination path is valid
+if [ -d $dest_path ]; then
+    error "Please provide the full destination path for the credentials file including the filename"
+else
+    # Check if we can create the file (e.g. path is correct)
+    touch $dest_path || error "Cannot create the file specified. Check that the path is correct and run the script again."
+fi
+
+info "Fetching admin.conf file..."
+if [ "$installer_type" == "compass" ]; then
+    sudo docker cp compass-tasks:/opt/admin.conf $dest_path &> /dev/null
+    sudo chown $(whoami):$(whoami) $dest_path
+    info "Fetch admin.conf successfully"
+else
+    error "Installer $installer is not supported by this script"
+fi
+
index 8e00b9c..0fcea0d 100644 (file)
@@ -23,7 +23,7 @@ dir_result="${HOME}/opnfv/$project/results/${branch}"
 node_list=(\
 'lf-pod1' 'lf-pod2' 'intel-pod2' 'intel-pod12' \
 'lf-virtual2' 'lf-virtual3' \
-'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' \
+'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' 'intel-pod18' \
 'ericsson-pod1' 'ericsson-pod2' \
 'ericsson-virtual1' 'ericsson-virtual2'  'ericsson-virtual3' \
 'ericsson-virtual4' 'ericsson-virtual5' 'ericsson-virtual12' \
@@ -32,7 +32,7 @@ node_list=(\
 'huawei-pod6' 'huawei-pod7' 'huawei-pod12' \
 'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4' \
 'huawei-virtual5' 'huawei-virtual8' 'huawei-virtual9' \
-'zte-pod2' \
+'zte-pod2' 'zte-pod3' \
 'zte-virtual1')