Merge "Adding a new verification job for testapi UI."
authorSerena Feng <feng.xiaowei@zte.com.cn>
Tue, 20 Nov 2018 07:37:51 +0000 (07:37 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Tue, 20 Nov 2018 07:37:51 +0000 (07:37 +0000)
274 files changed:
.gitignore
.gitmodules
INFO [deleted file]
INFO.yaml
docs/ci/index.rst
docs/ci/resources.rst
docs/ci/tables/ci-baremetal-servers.rst [new file with mode: 0644]
docs/ci/tables/ci-build-servers.rst [new file with mode: 0644]
docs/ci/tables/ci-labels.rst [new file with mode: 0644]
docs/ci/tables/ci-virtual-servers.rst [new file with mode: 0644]
docs/ci/tables/none-ci-servers.rst [new file with mode: 0644]
docs/infra/index.rst
docs/infra/jenkins/connect-to-jenkins.rst
docs/infra/jenkins/jjb-usage.rst
docs/release/index.rst
global-jjb [new submodule]
jjb-sandbox/releng/releng-sandbox-jobs.yml [deleted file]
jjb-sandbox/releng/verify-sandbox-jobs.sh [deleted file]
jjb/3rd_party_ci/detect-snapshot.sh [new file with mode: 0755]
jjb/3rd_party_ci/download-netvirt-artifact.sh
jjb/3rd_party_ci/install-netvirt.sh
jjb/3rd_party_ci/odl-netvirt.yaml
jjb/apex/apex-build.sh
jjb/apex/apex-deploy.sh
jjb/apex/apex-download-artifact.sh
jjb/apex/apex-fetch-snap-info.sh [new file with mode: 0755]
jjb/apex/apex-functest-scenario.sh [new file with mode: 0644]
jjb/apex/apex-iso-verify.sh
jjb/apex/apex-jjb-renderer.py
jjb/apex/apex-project-jobs.yaml
jjb/apex/apex-rtd-jobs.yaml [new file with mode: 0644]
jjb/apex/apex-snapshot-create.sh
jjb/apex/apex-snapshot-deploy.sh
jjb/apex/apex-upload-artifact.sh
jjb/apex/apex-verify-jobs.yaml
jjb/apex/apex.yaml
jjb/apex/apex.yaml.j2
jjb/apex/scenarios.yaml.hidden
jjb/armband/armband-ci-jobs.yaml
jjb/armband/armband-rtd-jobs.yaml [new file with mode: 0644]
jjb/armband/armband-verify-jobs.yaml
jjb/auto/auto-rtd-jobs.yaml [new file with mode: 0644]
jjb/auto/auto.yaml
jjb/availability/availability-rtd-jobs.yaml [new file with mode: 0644]
jjb/barometer/barometer-rtd-jobs.yaml [new file with mode: 0644]
jjb/barometer/barometer.yaml
jjb/bottlenecks/bottlenecks-ci-jobs.yaml
jjb/bottlenecks/bottlenecks-project-jobs.yaml
jjb/bottlenecks/bottlenecks-rtd-jobs.yaml [new file with mode: 0644]
jjb/bottlenecks/bottlenecks-run-suite.sh
jjb/calipso/calipso-rtd-jobs.yaml [new file with mode: 0644]
jjb/ci_gate_security/opnfv-ci-gate-security.yaml
jjb/clover/clover-project.yaml
jjb/clover/clover-rtd-jobs.yaml [new file with mode: 0644]
jjb/compass4nfv/compass-ci-jobs.yaml
jjb/compass4nfv/compass-deploy.sh
jjb/compass4nfv/compass-dovetail-jobs.yaml
jjb/compass4nfv/compass-project-jobs.yaml
jjb/compass4nfv/compass-verify-jobs.yaml
jjb/compass4nfv/compass4nfv-rtd-jobs.yaml [new file with mode: 0644]
jjb/container4nfv/arm64/compass-build.sh
jjb/container4nfv/arm64/compass-deploy.sh
jjb/container4nfv/arm64/deploy-cni.sh [new file with mode: 0755]
jjb/container4nfv/container4nfv-arm64.yaml
jjb/container4nfv/container4nfv-project.yaml
jjb/container4nfv/container4nfv-rtd-jobs.yaml [new file with mode: 0644]
jjb/copper/copper-rtd-jobs.yaml [new file with mode: 0644]
jjb/cperf/cirros-upload.yaml.ansible [new file with mode: 0644]
jjb/cperf/cperf-ci-jobs.yaml
jjb/cperf/cperf-prepare-robot.sh [new file with mode: 0755]
jjb/cperf/cperf-robot-netvirt-csit.sh [new file with mode: 0755]
jjb/cperf/cperf-upload-logs-csit.sh [new file with mode: 0644]
jjb/cperf/csit-clean.yaml.ansible [new file with mode: 0644]
jjb/cperf/parse-node-yaml.py [new file with mode: 0644]
jjb/daisy4nfv/daisy-daily-jobs.yaml
jjb/daisy4nfv/daisy-deploy.sh
jjb/daisy4nfv/daisy-project-jobs.yaml
jjb/daisy4nfv/daisy-rtd-jobs.yaml [new file with mode: 0644]
jjb/daisy4nfv/daisy4nfv-merge-jobs.yaml
jjb/daisy4nfv/daisy4nfv-verify-jobs.yaml
jjb/doctor/doctor-env-presetup.sh [new file with mode: 0755]
jjb/doctor/doctor-rtd-jobs.yaml [new file with mode: 0644]
jjb/doctor/doctor.yaml
jjb/domino/domino-rtd-jobs.yaml [new file with mode: 0644]
jjb/dovetail-webportal/dovetail-webportal-project-jobs.yaml [new file with mode: 0644]
jjb/dovetail/dovetail-ci-jobs.yaml
jjb/dovetail/dovetail-rtd-jobs.yaml [new file with mode: 0644]
jjb/dovetail/dovetail-run.sh
jjb/edgecloud/edgecloud-rtd-jobs.yaml [new file with mode: 0644]
jjb/escalator/escalator-rtd-jobs.yaml [new file with mode: 0644]
jjb/fds/fds-rtd-jobs.yaml [new file with mode: 0644]
jjb/fuel/fuel-daily-jobs.yaml
jjb/fuel/fuel-deploy.sh
jjb/fuel/fuel-docker-jobs.yaml [new file with mode: 0644]
jjb/fuel/fuel-download-artifact.sh [deleted file]
jjb/fuel/fuel-lab-reconfig.sh [deleted file]
jjb/fuel/fuel-project-jobs.yaml [deleted file]
jjb/fuel/fuel-rtd-jobs.yaml [new file with mode: 0644]
jjb/fuel/fuel-set-scenario.sh [new file with mode: 0755]
jjb/fuel/fuel-verify-jobs.yaml
jjb/fuel/fuel-weekly-jobs.yaml [deleted file]
jjb/functest/functest-alpine.sh
jjb/functest/functest-daily-jobs.yaml
jjb/functest/functest-docker.yaml
jjb/functest/functest-env-presetup.sh
jjb/functest/functest-kubernetes-docker.yaml
jjb/functest/functest-kubernetes-project-jobs.yaml
jjb/functest/functest-project-jobs.yaml
jjb/functest/functest-rtd-jobs.yaml [new file with mode: 0644]
jjb/functest/functest-virtual.yaml [new file with mode: 0644]
jjb/functest/xtesting-docker.yaml
jjb/functest/xtesting-project-jobs.yaml
jjb/global-jjb [deleted submodule]
jjb/global-jjb/jjb [new symlink]
jjb/global-jjb/shell [new symlink]
jjb/global/basic-jobs.yaml
jjb/global/releng-defaults.yaml
jjb/global/releng-macros.yaml
jjb/global/slave-params.yaml
jjb/ipv6/ipv6-rtd-jobs.yaml [new file with mode: 0644]
jjb/joid/joid-daily-jobs.yaml
jjb/joid/joid-rtd-jobs.yaml [new file with mode: 0644]
jjb/joid/joid-verify-jobs.yaml
jjb/kvmfornfv/kvmfornfv-rtd-jobs.yaml [new file with mode: 0644]
jjb/moon/moon-rtd-jobs.yaml [new file with mode: 0644]
jjb/multisite/multisite-rtd-jobs.yaml [new file with mode: 0644]
jjb/netready/netready-rtd-jobs.yaml [new file with mode: 0644]
jjb/nfvbench/nfvbench-rtd-jobs.yaml [new file with mode: 0644]
jjb/nfvbench/nfvbench.yaml
jjb/octopus/octopus-rtd-jobs.yaml [new file with mode: 0644]
jjb/onosfw/onosfw-rtd-jobs.yaml [new file with mode: 0644]
jjb/openci/create-ane.sh [deleted file]
jjb/openci/create-ape.sh [new file with mode: 0755]
jjb/openci/create-cde.sh
jjb/openci/create-clme.sh
jjb/openci/openci-odl-daily-jobs.yaml
jjb/openci/openci-onap-daily-jobs.yaml
jjb/openci/openci-opnfv-daily-jobs.yaml
jjb/opera/opera-rtd-jobs.yaml [new file with mode: 0644]
jjb/opnfvdocs/docs-rtd.yaml
jjb/opnfvdocs/opnfvdocs.yaml
jjb/opnfvtsc/opnfvtsc-rtd-jobs.yaml [new file with mode: 0644]
jjb/orchestra/orchestra-daily-jobs.yaml
jjb/orchestra/orchestra-rtd-jobs.yaml [new file with mode: 0644]
jjb/ovn4nfv/golang-make-test.sh [new file with mode: 0644]
jjb/ovn4nfv/ovn4nfv-daily-jobs.yaml
jjb/ovn4nfv/ovn4nfv-k8s-plugins-project-jobs.yaml [new file with mode: 0644]
jjb/ovn4nfv/ovn4nfv-project-jobs.yaml
jjb/ovn4nfv/ovn4nfv-rtd-jobs.yaml [new file with mode: 0644]
jjb/ovno/ovno-rtd-jobs.yaml [new file with mode: 0644]
jjb/ovsnfv/ovsnfv-rtd-jobs.yaml [new file with mode: 0644]
jjb/parser/parser-rtd-jobs.yaml [new file with mode: 0644]
jjb/pharos/pharos-rtd-jobs.yaml [new file with mode: 0644]
jjb/pharos/pharos-tools.yml [new file with mode: 0644]
jjb/pharos/pharos.yaml
jjb/pharos/shell/backup-dashboard.sh [new file with mode: 0644]
jjb/pharos/shell/deploy-dashboard.sh [new file with mode: 0644]
jjb/promise/promise-rtd-jobs.yaml [new file with mode: 0644]
jjb/qtip/qtip-experimental-jobs.yaml
jjb/qtip/qtip-rtd-jobs.yaml [new file with mode: 0644]
jjb/qtip/qtip-validate-jobs.yaml
jjb/releng/compass4nfv-docker.yaml
jjb/releng/opnfv-docker-arm.yaml
jjb/releng/opnfv-docker.sh
jjb/releng/opnfv-docker.yaml
jjb/releng/opnfv-repo-archiver.sh
jjb/releng/opnfv-utils.yaml
jjb/releng/releng-ci-jobs.yaml [deleted file]
jjb/releng/releng-release-create-branch.sh
jjb/releng/releng-release-create-venv.sh [moved from jjb/releng/releng-release-verify.sh with 51% similarity]
jjb/releng/releng-release-jobs.yaml
jjb/releng/releng-release-tagging.sh [new file with mode: 0644]
jjb/releng/releng-rtd-jobs.yaml [new file with mode: 0644]
jjb/releng/testresults-automate.yaml
jjb/samplevnf/samplevnf-rtd-jobs.yaml [new file with mode: 0644]
jjb/sdnvpn/sdnvpn-rtd-jobs.yaml [new file with mode: 0644]
jjb/securedlab/check-jinja2.yaml [deleted file]
jjb/sfc/sfc-project-jobs.yaml
jjb/sfc/sfc-rtd-jobs.yaml [new file with mode: 0644]
jjb/snaps/snaps-rtd-jobs.yaml [new file with mode: 0644]
jjb/stor4nfv/stor4nfv-rtd-jobs.yaml [new file with mode: 0644]
jjb/storperf/storperf-daily-jobs.yaml
jjb/storperf/storperf-rtd-jobs.yaml [new file with mode: 0644]
jjb/storperf/storperf-verify-jobs.yaml
jjb/storperf/storperf.yaml
jjb/ves/ves-rtd-jobs.yaml [new file with mode: 0644]
jjb/vnf_forwarding_graph/vnf-forwarding-graph-rtd-jobs.yaml [new file with mode: 0644]
jjb/vswitchperf/vswitchperf-rtd-jobs.yaml [new file with mode: 0644]
jjb/vswitchperf/vswitchperf.yaml
jjb/xci/bifrost-periodic-jobs.yaml
jjb/xci/bifrost-verify-jobs.yaml
jjb/xci/osa-periodic-jobs.yaml
jjb/xci/xci-daily-jobs.yaml
jjb/xci/xci-merge-jobs.yaml
jjb/xci/xci-rtd-jobs.yaml [new file with mode: 0644]
jjb/xci/xci-run-functest.sh
jjb/xci/xci-set-scenario.sh
jjb/xci/xci-verify-jobs.yaml
jjb/yardstick/yardstick-daily-jobs.yaml
jjb/yardstick/yardstick-daily.sh
jjb/yardstick/yardstick-project-jobs.yaml
jjb/yardstick/yardstick-rtd-jobs.yaml [new file with mode: 0644]
modules/opnfv/deployment/example.py
modules/opnfv/deployment/factory.py
modules/opnfv/deployment/fuel/adapter.py
modules/opnfv/deployment/manager.py
modules/opnfv/utils/Credentials.py
modules/opnfv/utils/ssh_utils.py
modules/requirements.txt
releases/fraser/apex.yaml
releases/fraser/armband.yaml [moved from releases/fraser/armband.yml with 75% similarity]
releases/fraser/auto.yaml
releases/fraser/availability.yaml
releases/fraser/barometer.yaml
releases/fraser/bottlenecks.yaml
releases/fraser/calipso.yaml
releases/fraser/clover.yaml
releases/fraser/compass4nfv.yaml
releases/fraser/container4nfv.yaml
releases/fraser/doctor.yaml
releases/fraser/fds.yaml
releases/fraser/fuel.yaml
releases/fraser/functest.yaml
releases/fraser/ipv6.yaml
releases/fraser/joid.yaml
releases/fraser/nfvbench.yaml
releases/fraser/opnfvdocs.yaml
releases/fraser/ovn4nfv.yaml
releases/fraser/samplevnf.yaml
releases/fraser/sdnvpn.yaml
releases/fraser/sfc.yaml
releases/fraser/snaps.yaml
releases/fraser/stor4nfv.yaml
releases/fraser/storperf.yaml
releases/fraser/vswitchperf.yaml
releases/fraser/yardstick.yaml
releases/gambia/apex.yaml [new file with mode: 0644]
releases/gambia/armband.yaml [new file with mode: 0644]
releases/gambia/auto.yaml [new file with mode: 0644]
releases/gambia/availability.yaml [new file with mode: 0644]
releases/gambia/barometer.yaml [new file with mode: 0644]
releases/gambia/bottlenecks.yaml [new file with mode: 0644]
releases/gambia/clover.yaml [new file with mode: 0644]
releases/gambia/compass4nfv.yaml [new file with mode: 0644]
releases/gambia/container4nfv.yaml [new file with mode: 0644]
releases/gambia/doctor.yaml [new file with mode: 0644]
releases/gambia/edgecloud.yaml [new file with mode: 0644]
releases/gambia/fuel.yaml [new file with mode: 0644]
releases/gambia/functest.yaml [new file with mode: 0644]
releases/gambia/ipv6.yaml [new file with mode: 0644]
releases/gambia/nfvbench.yaml [new file with mode: 0644]
releases/gambia/opnfvdocs.yaml [new file with mode: 0644]
releases/gambia/ovn4nfv.yaml [new file with mode: 0644]
releases/gambia/samplevnf.yaml [new file with mode: 0644]
releases/gambia/sandbox.yaml [new file with mode: 0644]
releases/gambia/sdnvpn.yaml [new file with mode: 0644]
releases/gambia/sfc.yaml [new file with mode: 0644]
releases/gambia/stor4nfv.yaml [new file with mode: 0644]
releases/gambia/storperf.yaml [new file with mode: 0644]
releases/gambia/vswitchperf.yaml [new file with mode: 0644]
releases/gambia/yardstick.yaml [new file with mode: 0644]
releases/hunter/functest.yaml [new file with mode: 0644]
releases/scripts/create_branch.py [deleted file]
releases/scripts/release-status.sh [new file with mode: 0755]
releases/scripts/repos.py [new file with mode: 0644]
tox.ini
utils/artifacts.opnfv.org.sh
utils/build-server-ansible/main.yml
utils/build-server-ansible/vars/CentOS.yml
utils/build-server-ansible/vars/Ubuntu.yml
utils/build-server-ansible/vars/defaults.yml
utils/fetch_os_creds.sh
utils/gpg_import_key.sh
utils/push-test-logs.sh

index 7790d46..9ee8c53 100644 (file)
@@ -1,5 +1,6 @@
 *~
 .*.sw?
+*.swp
 /docs_build/
 /docs_output/
 /releng/
index 07b28be..9f7b778 100644 (file)
@@ -1,3 +1,3 @@
 [submodule "jjb/global-jjb"]
-       path = jjb/global-jjb
+       path = global-jjb
        url = https://github.com/lfit/releng-global-jjb
diff --git a/INFO b/INFO
deleted file mode 100644 (file)
index d9051ab..0000000
--- a/INFO
+++ /dev/null
@@ -1,31 +0,0 @@
-Project: Release Engineering (Releng)
-Project Creation Date: July 14, 2015
-Project Category: Integration & Testing
-Lifecycle State: Incubation
-Primary Contact: Fatih Degirmenci
-Project Lead: Fatih Degirmenci
-Jira Project Name: Release Engineering
-Jira Project Prefix: RELENG
-Mailing list tag: [releng]
-IRC: Server:freenode.net Channel:#opnfv-octopus
-Repository: releng
-Other Repositories: releng-xci, releng-testresults
-
-Committers:
-Fatih Degirmenci (Ericsson, fatih.degirmenci@ericsson.com)
-Aric Gardner (Linux Foundation, agardner@linuxfoundation.org)
-Tim Rozet (Red Hat, trozet@redhat.com)
-Morgan Richomme (Orange, morgan.richomme@orange.com)
-Jose Lausuch (Ericsson, jose.lausuch@ericsson.com)
-Ryota Mibu (NEC, r-mibu@cq.jp.nec.com)
-Mei Mei (Huawei, meimei@huawei.com)
-Trevor Bramwell (Linux Foundation, tbramwell@linuxfoundation.org)
-Serena Feng (ZTE, feng.xiaowei@zte.com.cn)
-Yolanda Robla Mota (Red Hat, yroblamo@redhat.com)
-Markos Chandras (SUSE, mchandras@suse.de)
-Luke Hinds (Red Hat, lhinds@redhat.com)
-
-Link to TSC approval of the project: http://ircbot.wl.linuxfoundation.org/meetings/opnfv-meeting/2015/opnfv-meeting.2015-07-14-14.00.html
-Link to TSC voting for removal of Victor Laza as committer: http://meetbot.opnfv.org/meetings/opnfv-meeting/2016/opnfv-meeting.2016-02-16-14.59.html
-Link to nomination and acceptance of Mei Mei as committer: http://lists.opnfv.org/pipermail/opnfv-tsc/2016-March/002228.html
-Links to nomination and accceptance of Trevor Bramwell as committer: http://lists.opnfv.org/pipermail/opnfv-tech-discuss/2016-July/011659.html and http://lists.opnfv.org/pipermail/opnfv-tech-discuss/2016-July/011714.html
index dae36c9..d265be3 100644 (file)
--- a/INFO.yaml
+++ b/INFO.yaml
@@ -93,6 +93,15 @@ committers:
       email: 'lhinds@redhat.com'
       id: 'lukehinds'
       timezone: 'Europe/London'
+    - name: 'Cédric Ollivier'
+      email: 'cedric.ollivier@orange.com'
+      company: 'orange'
+      id: 'ollivier'
+      timezone: 'Europe/Paris'
+    - name: 'Alexandru Avadanii'
+      email: 'Alexandru.Avadanii@enea.com'
+      company: 'enea.com'
+      id: 'AlexandruAvadanii'
 tsc:
     approval: 'http://ircbot.wl.linuxfoundation.org/meetings/opnfv-meeting/2015/opnfv-meeting.2015-07-14-14.00.html'
     changes:
index da6fc7d..08f23ac 100644 (file)
@@ -11,7 +11,6 @@ OPNFV CI
 TBD
 
 .. toctree::
-   :numbered:
    :maxdepth: 2
 
    user-guide
index 572852c..b4efe97 100644 (file)
@@ -13,49 +13,81 @@ verification needs. Each resource must meet a set of criteria in order
 to be part of CI for an OPNFV release. There are three types of
 resources:
 
-  - Baremetal PODs (PODs)
-  - Virtual PODs (vPODs)
-  - Build Servers
+- Baremetal PODs (PODs)
+- Virtual PODs (vPODs)
+- Build Servers
+
+
+.. _ci-resources-baremetal-pods:
 
---------------
 Baremetal PODs
 --------------
 
-TBD
+Baremetal PODs are used to deploy OPNFV on to baremetal hardware through
+one of the installer projects. They enable the full range of scenarios
+to be deployed and tested.
 
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Baremetal PODs Requirements
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
+**Requirements**
 
 In order of a POD to be considered CI-Ready the following requirements
 must be met:
 
-  #. Pharos Compliant and has a PDF
-  #. Connected to Jenkins
-  #. 24/7 Uptime
-  #. No Development
-  #. No manual intervention
+#. Pharos Compliant and has a PDF
+#. Connected to Jenkins
+#. 24/7 Uptime
+#. No Development
+#. No manual intervention
 
-------------
-Virtual PODS
+.. include:: tables/ci-baremetal-servers.rst
+
+
+.. _ci-resources-virtual-pods:
+
+Virtual PODs
 ------------
 
-TBD
+Virtual PODs are used to deploy OPNFV in a virtualized environment
+generally on top of KVM through libvirt.
 
-~~~~~~~~~~~~
-Requirements
-~~~~~~~~~~~~
+**Requirements**
 
-TBD
+#. Have required virtualization packages installed
+#. Meet the Pharos resource specification for virtual PODs
+#. Connected to Jenkins
+#. 24/7 Uptime
+
+.. include:: tables/ci-virtual-servers.rst
+
+.. _ci-resources-build-servers:
 
--------------
 Build Servers
 -------------
 
-TBD
+Build servers are used to build project, run basic verifications (such
+as unit tests and linting), and generate documentation.
+
+**Requirements**
+
+#. Have required `packages_` installed
+#. 24/7 Uptime
+#. Connected to Jenkins
+
+.. include:: tables/ci-build-servers.rst
+
+.. _dev-resources:
+
+=====================
+Development Resources
+=====================
+
+.. include:: tables/none-ci-servers.rst
+
+.. _ci-lables:
+
+===================
+CI Resources Labels
+===================
 
-~~~~~~~~~~~~
-Requirements
-~~~~~~~~~~~~
+.. include:: tables/ci-labels.rst
 
-TBD
+.. _packages: https://wiki.opnfv.org/display/INF/Continuous+Integration#ContinuousIntegration-BuildServers
diff --git a/docs/ci/tables/ci-baremetal-servers.rst b/docs/ci/tables/ci-baremetal-servers.rst
new file mode 100644 (file)
index 0000000..2efea0a
--- /dev/null
@@ -0,0 +1,143 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. SPDX-License-Identifier: CC-BY-4.0
+.. (c) Open Platform for NFV Project, Inc. and its contributors
+
+.. list-table:: CI Servers for Baremetal Deployment
+   :header-rows: 1
+   :stub-columns: 1
+
+   * - Node
+     - Usage
+     - Jumphost OS / Version
+     - PDF
+     - IDF
+   * - `arm-pod6 <https://build.opnfv.org/ci/computer/arm-pod6>`_
+     - Armband
+     - Ubuntu 16.04
+     - `PDF <https://git.opnfv.org/pharos/plain/labs/arm/pod6.yaml>`__
+     - `IDF <https://git.opnfv.org/pharos/plain/labs/arm/idf-pod6.yaml>`__
+   * - `ericsson-pod1 <https://build.opnfv.org/ci/computer/ericsson-pod1>`_
+     - Fuel
+     - Ubuntu 16.04
+     - `PDF <https://git.opnfv.org/pharos/plain/labs/ericsson/pod1.yaml>`__
+     - `IDF <https://git.opnfv.org/pharos/plain/labs/ericsson/idf-pod1.yaml>`__
+   * - `ericsson-pod2 <https://build.opnfv.org/ci/computer/ericsson-pod2>`_
+     - XCI
+     - Ubuntu 16.04
+     - `PDF <https://git.opnfv.org/pharos/plain/labs/ericsson/pod2.yaml>`__
+     - `IDF <https://git.opnfv.org/pharos/plain/labs/ericsson/idf-pod2.yaml>`__
+   * - `flex-pod1 <https://build.opnfv.org/ci/computer/flex-pod1>`_
+     - Yardstick
+     -
+     - PDF
+     - IDF
+   * - `flex-pod2 <https://build.opnfv.org/ci/computer/flex-pod2>`_
+     - Apex
+     -
+     - PDF
+     - IDF
+   * - `huawei-pod1 <https://build.opnfv.org/ci/computer/huawei-pod1>`_
+     - Compass4NFV
+     -
+     - `PDF <https://git.opnfv.org/pharos/plain/labs/huawei/pod1.yaml>`__
+     - `IDF <https://git.opnfv.org/pharos/plain/labs/huawei/idf-pod1.yaml>`__
+   * - `huawei-pod2 <https://build.opnfv.org/ci/computer/huawei-pod2>`_
+     - Compass4NFV
+     - Ubuntu 14.04
+     - PDF
+     - IDF
+   * - `huawei-pod3 <https://build.opnfv.org/ci/computer/huawei-pod3>`_
+     - Yardstick
+     - Ubuntu 14.04
+     - PDF
+     - IDF
+   * - `huawei-pod4 <https://build.opnfv.org/ci/computer/huawei-pod4>`_
+     - Dovetail
+     -
+     - PDF
+     - IDF
+   * - `huawei-pod6 <https://build.opnfv.org/ci/computer/huawei-pod6>`_
+     -
+     - Ubuntu 14.04
+     - PDF
+     - IDF
+   * - `huawei-pod7 <https://build.opnfv.org/ci/computer/huawei-pod7>`_
+     - Dovetail
+     - Ubuntu 14.04
+     - PDF
+     - IDF
+   * - `huawei-pod8 <https://build.opnfv.org/ci/computer/huawei-pod8>`_
+     - Compass4NFV
+     - Ubuntu 16.04 (aarch64)
+     - PDF
+     - IDF
+   * - `huawei-pod12 <https://build.opnfv.org/ci/computer/huawei-pod12>`_
+     - JOID
+     - Ubuntu 16.04
+     - PDF
+     - IDF
+   * - `intel-pod10 <https://build.opnfv.org/ci/computer/intel-pod10>`_
+     - KVMforNFV
+     - CentOS 7
+     - PDF
+     - IDF
+   * - `intel-pod11 <https://build.opnfv.org/ci/computer/intel-pod11>`_
+     - Apex
+     -
+     - PDF
+     - IDF
+   * - `intel-pod12 <https://build.opnfv.org/ci/computer/intel-pod12>`_
+     - VSPerf
+     - CentOS 7
+     - PDF
+     - IDF
+   * - `intel-pod17 <https://build.opnfv.org/ci/computer/intel-pod17>`_
+     - Compass4NFV
+     -
+     - PDF
+     - IDF
+   * - `intel-pod18 <https://build.opnfv.org/ci/computer/intel-pod18>`_
+     - JOID
+     - Ubuntu 16.04
+     - `PDF <https://git.opnfv.org/pharos/plain/labs/intel/pod18.yaml>`__
+     - `IDF <https://git.opnfv.org/pharos/plain/labs/intel/idf-pod18.yaml>`__
+   * - `lf-pod1 <https://build.opnfv.org/ci/computer/lf-pod1>`_
+     - Apex
+     - CentOS 7
+     - `PDF <https://git.opnfv.org/pharos/plain/labs/lf/pod1.yaml>`__
+     - IDF
+   * - `lf-pod2 <https://build.opnfv.org/ci/computer/lf-pod2>`_
+     - Fuel
+     - CentOS 7
+     - `PDF <https://git.opnfv.org/pharos/plain/labs/lf/pod2.yaml>`__
+     - `IDF <https://git.opnfv.org/pharos/plain/labs/lf/idf-pod2.yaml>`__
+   * - `unh-pod1 <https://build.opnfv.org/ci/computer/unh-pod1>`_
+     - Auto
+     - Ubuntu 16.04 (aarch64)
+     - PDF
+     - IDF
+   * - `zte-pod1 <https://build.opnfv.org/ci/computer/zte-pod1>`_
+     -
+     -
+     - `PDF <https://git.opnfv.org/pharos/plain/labs/zte/pod1.yaml>`__
+     - `IDF <https://git.opnfv.org/pharos/plain/labs/zte/idf-pod1.yaml>`__
+   * - `zte-pod2 <https://build.opnfv.org/ci/computer/zte-pod2>`_
+     -
+     -
+     - `PDF <https://git.opnfv.org/pharos/plain/labs/zte/pod2.yaml>`__
+     - `IDF <https://git.opnfv.org/pharos/plain/labs/zte/idf-pod2.yaml>`__
+   * - `zte-pod3 <https://build.opnfv.org/ci/computer/zte-pod3>`_
+     -
+     -
+     - `PDF <https://git.opnfv.org/pharos/plain/labs/zte/pod3.yaml>`__
+     - `IDF <https://git.opnfv.org/pharos/plain/labs/zte/idf-pod3.yaml>`__
+   * - `zte-pod4 <https://build.opnfv.org/ci/computer/zte-pod4>`_
+     -
+     -
+     - PDF
+     - IDF
+   * - `zte-pod9 <https://build.opnfv.org/ci/computer/zte-pod9>`_
+     -
+     -
+     - `PDF <https://git.opnfv.org/pharos/plain/labs/zte/pod9.yaml>`__
+     - `IDF <https://git.opnfv.org/pharos/plain/labs/zte/idf-pod9.yaml>`__
diff --git a/docs/ci/tables/ci-build-servers.rst b/docs/ci/tables/ci-build-servers.rst
new file mode 100644 (file)
index 0000000..b58b0fc
--- /dev/null
@@ -0,0 +1,48 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. SPDX-License-Identifier: CC-BY-4.0
+.. (c) Open Platform for NFV Project, Inc. and its contributors
+
+.. list-table:: CI Build Servers
+   :header-rows: 1
+   :stub-columns: 1
+
+   * - Node
+     - Architecture
+     - OS
+     - Contact
+   * - `arm-build3 <https://build.opnfv.org/ci/computer/arm-build3>`_
+     - aarch64
+     - CentOS 7.4
+     - `Armband ENEA Team`_
+   * - `arm-build4 <https://build.opnfv.org/ci/computer/arm-build4>`_
+     - aarch64
+     - CentOS 7.4
+     - `Armband ENEA Team`_
+   * - `arm-build5 <https://build.opnfv.org/ci/computer/arm-build5>`_
+     - aarch64
+     - CentOS 7.4
+     - `Armband ENEA Team`_
+   * - `arm-build6 <https://build.opnfv.org/ci/computer/arm-build6>`_
+     - aarch64
+     - CentOS 7.4
+     - `Armband ENEA Team`_
+   * - `ericsson-build3 <https://build.opnfv.org/ci/computer/ericsson-build3>`_
+     - x86_64
+     - Ubuntu 16.04
+     - `Dianfeng Du`_
+   * - `ericsson-build4 <https://build.opnfv.org/ci/computer/ericsson-build4>`_
+     - x86_64
+     - Ubuntu 16.04
+     - `Dianfeng Du`_
+   * - `lf-build1 <https://build.opnfv.org/ci/computer/lf-build1>`_
+     - x86_64
+     - CentOS 7.4
+     - `Linux Foundation`_
+   * - `lf-build2 <https://build.opnfv.org/ci/computer/lf-build2>`_
+     - x86_64
+     - Ubuntu 16.10
+     - `Linux Foundation`_
+
+.. _Linux Foundation: helpdesk@opnfv.org
+.. _Dianfeng Du: dianfeng.du@ericsson.com
+.. _Armband ENEA Team: armband@enea.com
diff --git a/docs/ci/tables/ci-labels.rst b/docs/ci/tables/ci-labels.rst
new file mode 100644 (file)
index 0000000..2865cc2
--- /dev/null
@@ -0,0 +1,13 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. SPDX-License-Identifier: CC-BY-4.0
+.. (c) Open Platform for NFV Project, Inc. and its contributors
+
+:ci-resource: Resource devoted to CI
+:ci-pod: POD devoted to CI
+:opnfv-build: Node is for builds - independent of OS
+:opnfv-build-centos: Node is for builds needing CentOS
+:opnfv-build-centos-arm: Node is for ARM builds on CentOS
+:opnfv-build-ubuntu: Node is for builds needing Ubuntu
+:opnfv-build-ubuntu-arm: Node is for ARM builds on Ubuntu
+:{installer}-baremetal: POD is devoted to {installer} for baremetal deployments
+:{installer}-virtual: Server is devoted to {installer} for virtual deployments
diff --git a/docs/ci/tables/ci-virtual-servers.rst b/docs/ci/tables/ci-virtual-servers.rst
new file mode 100644 (file)
index 0000000..e87c463
--- /dev/null
@@ -0,0 +1,164 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. SPDX-License-Identifier: CC-BY-4.0
+.. (c) Open Platform for NFV Project, Inc. and its contributors
+
+.. list-table:: CI Servers for Virtual Deployment
+   :header-rows: 1
+   :stub-columns: 1
+
+   * - Node
+     - Architecture
+     - OS
+     - Contact
+   * - `arm-virtual2 <https://build.opnfv.org/ci/computer/arm-virtual2>`_
+     - aarch64
+     - Ubuntu 16.04
+     - `Armband ENEA Team`_
+   * - `arm-virtual3 <https://build.opnfv.org/ci/computer/arm-virtual3>`_
+     - aarch64
+     - Ubuntu 16.04
+     - `Xuan Jia`
+   * - `arm-virtual4 <https://build.opnfv.org/ci/computer/arm-virtual4>`_
+     - aarch64
+     - Ubuntu 16.04
+     - `Xuan Jia`
+   * - `ericsson-virtual-pod1bl01 <https://build.opnfv.org/ci/computer/ericsson-virtual-pod1bl01>`_
+     - x86_64
+     - CentOS 7
+     -
+   * - `ericsson-virtual1 <https://build.opnfv.org/ci/computer/ericsson-virtual1>`_
+     - x86_64
+     - Ubuntu 16.04
+     -
+   * - `ericsson-virtual2 <https://build.opnfv.org/ci/computer/ericsson-virtual2>`_
+     - x86_64
+     - Ubuntu 16.04
+     -
+   * - `ericsson-virtual3 <https://build.opnfv.org/ci/computer/ericsson-virtual3>`_
+     - x86_64
+     - Ubuntu 16.04
+     -
+   * - `ericsson-virtual4 <https://build.opnfv.org/ci/computer/ericsson-virtual4>`_
+     - x86_64
+     - Ubuntu 16.04
+     -
+   * - `ericsson-virtual5 <https://build.opnfv.org/ci/computer/ericsson-virtual5>`_
+     - x86_64
+     - Ubuntu 16.04
+     -
+   * - `huawei-virtual1 <https://build.opnfv.org/ci/computer/huawei-virtual1>`_
+     - x86_64
+     - Ubuntu 14.04
+     -
+   * - `huawei-virtual2 <https://build.opnfv.org/ci/computer/huawei-virtual2>`_
+     - x86_64
+     - Ubuntu 14.04
+     -
+   * - `huawei-virtual3 <https://build.opnfv.org/ci/computer/huawei-virtual3>`_
+     - x86_64
+     - Ubuntu 14.04
+     -
+   * - `huawei-virtual4 <https://build.opnfv.org/ci/computer/huawei-virtual4>`_
+     - x86_64
+     - Ubuntu 14.04
+     -
+   * - `huawei-virtual5 <https://build.opnfv.org/ci/computer/huawei-virtual5>`_
+     - x86_64
+     -
+     -
+   * - `huawei-virtual6 <https://build.opnfv.org/ci/computer/huawei-virtual6>`_
+     - x86_64
+     - Ubuntu 16.04
+     -
+   * - `huawei-virtual7 <https://build.opnfv.org/ci/computer/huawei-virtual7>`_
+     - x86_64
+     - Ubuntu 14.04
+     -
+   * - `huawei-virtual8 <https://build.opnfv.org/ci/computer/huawei-virtual8>`_
+     - x86_64
+     - Ubuntu 14.04
+     -
+   * - `huawei-virtual9 <https://build.opnfv.org/ci/computer/huawei-virtual9>`_
+     - x86_64
+     - Ubuntu 14.04
+     -
+   * - `intel-virtual3 <https://build.opnfv.org/ci/computer/intel-virtual3>`_
+     - x86_64
+     -
+     -
+   * - `intel-virtual11 <https://build.opnfv.org/ci/computer/intel-virtual11>`_
+     - x86_64
+     -
+     -
+   * - `intel-virtual12 <https://build.opnfv.org/ci/computer/intel-virtual12>`_
+     - x86_64
+     -
+     -
+   * - `intel-virtual13 <https://build.opnfv.org/ci/computer/intel-virtual13>`_
+     - x86_64
+     -
+     -
+   * - `intel-virtual14 <https://build.opnfv.org/ci/computer/intel-virtual14>`_
+     - x86_64
+     -
+     -
+   * - `intel-virtual15 <https://build.opnfv.org/ci/computer/intel-virtual15>`_
+     - x86_64
+     -
+     -
+   * - `intel-virtual16 <https://build.opnfv.org/ci/computer/intel-virtual16>`_
+     - x86_64
+     -
+     -
+   * - `lf-virtual1 <https://build.opnfv.org/ci/computer/lf-virtual1>`_
+     - x86_64
+     - Ubuntu 14.04
+     - `Linux Foundation`_
+   * - `lf-virtual2 <https://build.opnfv.org/ci/computer/lf-virtual2>`_
+     - x86_64
+     - CentOS 7
+     - `Linux Foundation`_
+   * - `lf-virtual3 <https://build.opnfv.org/ci/computer/lf-virtual3>`_
+     - x86_64
+     - CentOS 7
+     - `Linux Foundation`_
+   * - `ool-virtual1 <https://build.opnfv.org/ci/computer/ool-virtual1>`_
+     - x86_64
+     -
+     -
+   * - `ool-virtual2 <https://build.opnfv.org/ci/computer/ool-virtual2>`_
+     - x86_64
+     -
+     -
+   * - `ool-virtual3 <https://build.opnfv.org/ci/computer/ool-virtual3>`_
+     - x86_64
+     -
+     -
+   * - `zte-virtual1 <https://build.opnfv.org/ci/computer/zte-virtual1>`_
+     - x86_64
+     -
+     -
+   * - `zte-virtual2 <https://build.opnfv.org/ci/computer/zte-virtual2>`_
+     - x86_64
+     -
+     -
+   * - `zte-virtual3 <https://build.opnfv.org/ci/computer/zte-virtual3>`_
+     - x86_64
+     -
+     -
+   * - `zte-virtual4 <https://build.opnfv.org/ci/computer/zte-virtual4>`_
+     - x86_64
+     -
+     -
+   * - `zte-virtual5 <https://build.opnfv.org/ci/computer/zte-virtual5>`_
+     - x86_64
+     -
+     -
+   * - `zte-virtual6 <https://build.opnfv.org/ci/computer/zte-virtual6>`_
+     - x86_64
+     -
+     -
+
+.. _Armband ENEA Team: armband@enea.com
+.. _Linux Foundation: helpdesk@opnfv.org
+.. _Xuan Jia: jason.jiaxuan@gmail.com'
diff --git a/docs/ci/tables/none-ci-servers.rst b/docs/ci/tables/none-ci-servers.rst
new file mode 100644 (file)
index 0000000..c4f403a
--- /dev/null
@@ -0,0 +1,58 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. SPDX-License-Identifier: CC-BY-4.0
+.. (c) Open Platform for NFV Project, Inc. and its contributors
+
+.. list-table:: Baremetal Development Servers
+   :header-rows: 1
+   :stub-columns: 1
+
+   * - Node
+     - Usage
+     - Jumphost OS / Version
+     - PDF
+     - IDF
+   * - `arm-pod5 <https://build.opnfv.org/ci/computer/arm-pod5>`_
+     - Armband
+     - Ubuntu 16.04
+     - `PDF <https://git.opnfv.org/pharos/plain/labs/arm/pod5.yaml>`__
+     - `IDF <https://git.opnfv.org/pharos/plain/labs/arm/idf-pod5.yaml>`__
+   * - cacti-pod1
+     -
+     -
+     -
+     -
+   * - cengn-pod1
+     -
+     -
+     -
+     -
+   * - itri-pod1
+     -
+     -
+     -
+     -
+   * - lf-pod4
+     -
+     -
+     -
+     -
+   * - lf-pod5
+     -
+     -
+     -
+     -
+   * - nokia-pod1
+     -
+     -
+     -
+     -
+   * - ool-pod1
+     -
+     -
+     -
+     -
+   * - bii-pod1
+     -
+     -
+     -
+     -
index 50c971e..248e823 100644 (file)
@@ -70,7 +70,7 @@ Issue and Bug Tracking
 JIRA
 
 .. toctree::
-   :maxdepth:
+   :maxdepth: 1
 
    jira/user-guide
 
index e83cada..ddf345f 100644 (file)
@@ -113,54 +113,80 @@ Please follow below steps to connect a slave to OPNFV Jenkins.
 
   1. Create a user named **jenkins** on the machine you want to connect to OPNFV Jenkins and give the user sudo rights.
   2. Install needed software on the machine you want to connect to OPNFV Jenkins as slave.
+
     - openjdk 8
     - monit
+
   3. If the slave will be used for running virtual deployments, Functest, and Yardstick, install below software and make jenkins user the member of the groups.
+
     - docker
     - libvirt
+
   4. Create slave root in Jenkins user home directory.
+
     ``mkdir -p /home/jenkins/opnfv/slave_root``
+
   5. Clone OPNFV Releng Git repository.
+
     ``mkdir -p /home/jenkins/opnfv/repos``
 
     ``cd /home/jenkins/opnfv/repos``
 
     ``git clone https://gerrit.opnfv.org/gerrit/p/releng.git``
+
   6. Contact LF by sending mail to `OPNFV LF Helpdesk <opnfv-helpdesk@rt.linuxfoundation.org>`_ and request creation of a slave on OPNFV Jenkins. Include below information in your mail.
+
     - Slave root (/home/jenkins/opnfv/slave_root)
     - Public IP of the slave (You can get the IP by executing ``curl http://icanhazip.com/``)
     - PGP Key (attached to the mail or exported to a key server)
+
   7. Once you get confirmation from LF stating that your slave is created on OPNFV Jenkins, check if the firewall on LF is open for the server you are trying to connect to Jenkins.
+
     ``cp /home/jenkins/opnfv/repos/releng/utils/jenkins-jnlp-connect.sh /home/jenkins/``
     ``cd /home/jenkins/``
     ``sudo ./jenkins-jnlp-connect.sh -j /home/jenkins -u jenkins -n  <slave name on OPNFV Jenkins> -s <the token you received from LF> -f``
 
      - If you receive an error, follow the steps listed on the command output.
+
   8. Run the same script with test(-t) on foreground in order to make sure no problem on connection. You should see **INFO: Connected** in the console log.
+
     ``sudo ./jenkins-jnlp-connect.sh -j /home/jenkins -u jenkins -n <slave name on OPNFV Jenkins> -s <the token you received from LF> -t``
 
      - If you receive an error similar to the one shown `on this link <http://hastebin.com/ozadagirax.avrasm>`_, you need to check your firewall and allow outgoing connections for the port.
+
   9. Kill the Java slave.jar process.
   10. Run the same script normally without test(-t) in order to get monit script created.
+
     ``sudo ./jenkins-jnlp-connect.sh -j /home/jenkins -u jenkins -n <slave name on OPNFV Jenkins> -s <the token you received from LF>``
+
   11. Edit monit configuration and enable http interface. The file to edit is /etc/monit/monitrc on Ubuntu systems. Uncomment below lines.
+
     set httpd port 2812 and
         use address localhost  # only accept connection from localhost
         allow localhost        # allow localhost to connect to the server and
+
   12. Restart monit service.
+
     - Without systemd:
 
       ``sudo service monit restart``
+
     - With systemd: you have to enable monit service first and then restart it.
 
       ``sudo systemctl enable monit``
 
       ``sudo systemctl restart monit``
+
   13. Check to see if jenkins comes up as managed service in monit.
+
     ``sudo monit status``
+
   14. Connect slave to OPNFV Jenkins using monit.
+
     ``sudo monit start jenkins``
+
   15. Check slave on OPNFV Jenkins to verify the slave is reported as connected.
+
     - The slave on OPNFV Jenkins should have some executors in “Idle” state if the connection is successful.
 
 Notes
index 6712781..d467dbe 100644 (file)
@@ -10,7 +10,7 @@ Creating/Configuring/Verifying Jenkins Jobs
 
 Clone and setup the repo::
 
-    git clone ssh://YOU@gerrit.opnfv.org:29418/releng
+    git clone --recursive ssh://YOU@gerrit.opnfv.org:29418/releng
     cd releng
     git review -s
 
@@ -29,13 +29,13 @@ Make changes::
 
 Test with tox::
 
-    tox -v -ejjb
+    tox -jjb
 
 .. note:: You can also test the jobs under a single jjb directory by
     specifying the directory. For example to test only the releng jobs, you
     could run:
 
-     tox -v -e jjb -- jjb/releng
+     tox -e jjb -- jjb/global:jjb/global-jjb:jjb/releng
 
 Submit the change to gerrit::
 
index 49cd00b..e814bec 100644 (file)
@@ -9,7 +9,6 @@ Releasing OPNFV
 ===============
 
 .. toctree::
-   :numbered:
    :maxdepth: 2
 
    release-process
diff --git a/global-jjb b/global-jjb
new file mode 160000 (submodule)
index 0000000..5d1ddb5
--- /dev/null
@@ -0,0 +1 @@
+Subproject commit 5d1ddb578a5253fc360a73be6ceea89d65af043e
diff --git a/jjb-sandbox/releng/releng-sandbox-jobs.yml b/jjb-sandbox/releng/releng-sandbox-jobs.yml
deleted file mode 100644 (file)
index df8b1d5..0000000
+++ /dev/null
@@ -1,61 +0,0 @@
----
-- project:
-    name: 'releng-sandbox-jobs'
-    jobs:
-      - 'releng-deploy-sandbox'
-
-    project: 'releng'
-    node: 'releng-sandbox'
-
-- job-template:
-    name: 'releng-deploy-sandbox'
-    node: '{node}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: 'master'
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'redeploy'
-          custom-url: '$BUILD_URL deploying to $JENKINS_URL'
-          silent-start: true
-          skip-vote:
-            successful: true
-            failed: true
-            unstable: true
-            notbuilt: true
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: 'releng'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/master'
-              file-paths:
-                - compare-type: ANT
-                  pattern: jjb-sandbox/**
-
-    wrappers: ''
-
-    builders:
-      - shell:
-          !include-raw-escape: verify-sandbox-jobs.sh
-      # yamllint disable rule:line-length
-      - shell: |
-          #!/bin/bash
-          jenkins-jobs update --delete-old -r jjb/releng-defaults.yaml:jjb/releng-macros.yaml:jjb/opnfv/installer-params.yml:jjb/opnfv/slave-params.yml:jjb-sandbox
-      # yamllint enable
-    publishers:
-      - archive-artifacts:
-          artifacts: 'job_output/*'
-      - email-jenkins-admins-on-failure
diff --git a/jjb-sandbox/releng/verify-sandbox-jobs.sh b/jjb-sandbox/releng/verify-sandbox-jobs.sh
deleted file mode 100755 (executable)
index 5990161..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-#test for non-ascii characters, these can pass the test and end up breaking things in production
-for x in $(find . -name *\.yml); do
-
-  if LC_ALL=C grep -q '[^[:print:][:space:]]' "$x"; then
-    echo "file "$x" contains non-ascii characters"
-    exit 1
-  fi
-
-done
-
-jenkins-jobs test -r jjb/releng-defaults.yaml:jjb/releng-macros.yaml:jjb/opnfv/installer-params.yml:jjb/opnfv/slave-params.yml:jjb-sandbox \
-    -o job_output
diff --git a/jjb/3rd_party_ci/detect-snapshot.sh b/jjb/3rd_party_ci/detect-snapshot.sh
new file mode 100755 (executable)
index 0000000..77788aa
--- /dev/null
@@ -0,0 +1,30 @@
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
+echo "Detecting requested OpenStack branch and topology type in gerrit comment"
+parsed_comment=$(echo $GERRIT_EVENT_COMMENT_TEXT | sed -n 's/^.*check-opnfv\s*//p')
+parsed_comment=$(echo $parsed_comment | sed -n 's/\s*$//p')
+if [ ! -z "$parsed_comment" ]; then
+  if echo $parsed_comment | grep -E '^[a-z]+-(no)?ha'; then
+    os_version=${parsed_comment%%"-"*}
+    topo=${parsed_comment#*"-"}
+    echo "OS version detected in gerrit comment: ${os_version}"
+    echo "Topology type detected in gerrit comment: ${topo}"
+  else
+    echo "Invalid format given for scenario in gerrit comment: ${parsed_comment}...aborting"
+    exit 1
+  fi
+else
+  echo "No scenario given in gerrit comment, will use default (master OpenStack, noha)"
+  os_version='master'
+  topo='noha'
+fi
+
+echo "Writing variables to file"
+cat > detected_snapshot << EOI
+OS_VERSION=$os_version
+TOPOLOGY=$topo
+SNAP_CACHE=$HOME/snap_cache/$os_version/$topo
+EOI
index 7ecf8d7..ac7f76c 100755 (executable)
@@ -3,35 +3,36 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
-ODL_ZIP=distribution-karaf-0.6.0-SNAPSHOT.zip
-
 echo "Attempting to fetch the artifact location from ODL Jenkins"
 if [ "$ODL_BRANCH" != 'master' ]; then
   DIST=$(echo ${ODL_BRANCH} | sed -rn 's#([a-zA-Z]+)/([a-zA-Z]+)#\2#p')
   ODL_BRANCH=$(echo ${ODL_BRANCH} | sed -rn 's#([a-zA-Z]+)/([a-zA-Z]+)#\1%2F\2#p')
 else
-  DIST='nitrogen'
+  DIST='neon'
 fi
-CHANGE_DETAILS_URL="https://git.opendaylight.org/gerrit/changes/netvirt~${ODL_BRANCH}~${GERRIT_CHANGE_ID}/detail"
+
+echo "ODL Distribution is ${DIST}"
+ODL_ZIP="karaf-SNAPSHOT.zip"
+CHANGE_DETAILS_URL="https://git.opendaylight.org/gerrit/changes/${GERRIT_PROJECT}~${ODL_BRANCH}~${GERRIT_CHANGE_ID}/detail"
 # due to limitation with the Jenkins Gerrit Trigger, we need to use Gerrit REST API to get the change details
-ODL_BUILD_JOB_NUM=$(curl --fail -s ${CHANGE_DETAILS_URL} | grep -Eo "netvirt-distribution-check-${DIST}/[0-9]+" | tail -1 | grep -Eo [0-9]+)
-DISTRO_CHECK_CONSOLE_LOG="https://logs.opendaylight.org/releng/jenkins092/netvirt-distribution-check-${DIST}/${ODL_BUILD_JOB_NUM}/console.log.gz"
-NETVIRT_ARTIFACT_URL=$(curl --fail -s --compressed ${DISTRO_CHECK_CONSOLE_LOG} | grep 'BUNDLE_URL' | cut -d = -f 2)
+ODL_BUILD_JOB_NUM=$(curl --fail ${CHANGE_DETAILS_URL} | grep -Eo "${GERRIT_PROJECT}-distribution-check-${DIST}/[0-9]+" | tail -1 | grep -Eo [0-9]+)
+DISTRO_CHECK_CONSOLE_LOG="https://logs.opendaylight.org/releng/vex-yul-odl-jenkins-1/${GERRIT_PROJECT}-distribution-check-${DIST}/${ODL_BUILD_JOB_NUM}/console.log.gz"
+NETVIRT_ARTIFACT_URL=$(curl --fail --compressed ${DISTRO_CHECK_CONSOLE_LOG} | grep 'BUNDLE_URL' | cut -d = -f 2)
 
 echo -e "URL to artifact is\n\t$NETVIRT_ARTIFACT_URL"
 
 echo "Downloading the artifact. This could take time..."
-wget -q -O $ODL_ZIP $NETVIRT_ARTIFACT_URL
-if [[ $? -ne 0 ]]; then
+if ! wget -q -O $ODL_ZIP $NETVIRT_ARTIFACT_URL; then
     echo "The artifact does not exist! Probably removed due to ODL Jenkins artifact retention policy."
-    echo "Rerun netvirt-patch-test-current-carbon to get artifact rebuilt."
+    echo "Use 'recheck' on the gerrit to get artifact rebuilt."
     exit 1
 fi
 
 #TODO(trozet) remove this once odl-pipeline accepts zip files
 echo "Converting artifact zip to tar.gz"
-unzip $ODL_ZIP
-tar czf /tmp/${NETVIRT_ARTIFACT} $(echo $ODL_ZIP | sed -n 's/\.zip//p')
+UNZIPPED_DIR=`dirname $(unzip -qql ${ODL_ZIP} | head -n1 | tr -s ' ' | cut -d' ' -f5-)`
+unzip ${ODL_ZIP}
+tar czf /tmp/${NETVIRT_ARTIFACT} ${UNZIPPED_DIR}
 
 echo "Download complete"
 ls -al /tmp/${NETVIRT_ARTIFACT}
index ed1a12b..232d60e 100755 (executable)
@@ -3,7 +3,7 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
-SNAP_CACHE=$HOME/snap_cache
+SNAP_CACHE=$HOME/snap_cache/$OS_VERSION/$TOPOLOGY
 # clone opnfv sdnvpn repo
 git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn
 
@@ -26,8 +26,7 @@ fi
 # but we really should check the cache here, and not use a single cache folder
 # for when we support multiple jobs on a single slave
 pushd sdnvpn/odl-pipeline/lib > /dev/null
-# FIXME (trozet) remove this once permissions are fixed in sdnvpn repo
-chmod +x odl_reinstaller.sh
+git fetch https://gerrit.opnfv.org/gerrit/sdnvpn refs/changes/17/59017/5 && git checkout FETCH_HEAD
 ./odl_reinstaller.sh --pod-config ${SNAP_CACHE}/node.yaml \
   --odl-artifact /tmp/${NETVIRT_ARTIFACT} --ssh-key-file ${SNAP_CACHE}/id_rsa
 popd > /dev/null
index 863eb94..15d2848 100644 (file)
@@ -13,8 +13,8 @@
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
-      - carbon:
-          branch: 'stable/carbon'
+      - oxygen:
+          branch: 'stable/oxygen'
           gs-pathname: ''
           disabled: false
     #####################################
     #####################################
     phase:
       - 'create-apex-vms':
-          slave-label: 'odl-netvirt-virtual-intel'
+          slave-label: 'apex-virtual-master'
       - 'install-netvirt':
-          slave-label: 'odl-netvirt-virtual-intel'
+          slave-label: 'apex-virtual-master'
       - 'postprocess':
-          slave-label: 'odl-netvirt-virtual-intel'
+          slave-label: 'apex-virtual-master'
     #####################################
     # jobs
     #####################################
           max-total: 5
           max-per-node: 1
           option: 'project'
-
+      - build-blocker:
+          use-build-blocker: true
+          blocking-jobs:
+            - 'apex-verify.*'
+            - 'apex-.*-promote.*'
+            - 'apex-virtual.*'
+            - 'odl-netvirt-verify-virtual-create-apex-vms-.*'
+            - 'odl-netvirt-verify-virtual-install-netvirt-.*'
+            - 'functest-netvirt-virtual-suite-.*'
+            - 'odl-netvirt-verify-virtual-postprocess-.*'
+          blocking-level: 'NODE'
     scm:
       - git:
           url: https://gerrit.opnfv.org/gerrit/apex
@@ -69,7 +79,7 @@
       - string:
           name: NETVIRT_ARTIFACT
           default: distribution-karaf.tar.gz
-      - 'odl-netvirt-virtual-intel-defaults'
+      - 'apex-virtual-master-defaults'
 
     triggers:
       - gerrit:
             #     comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : UNSTABLE'
             # yamllint enable rule:line-length
             - comment-added-contains-event:
-                comment-contains-value: 'opnfv-test'
+                comment-contains-value: 'check-opnfv'
           projects:
             - project-compare-type: 'ANT'
-              project-pattern: '{project}'
+              project-pattern: '*'
               branches:
                 - branch-compare-type: 'ANT'
                   branch-pattern: '**/{branch}'
     builders:
       - description-setter:
           description: "Built on $NODE_NAME"
+      - detect-opnfv-snapshot
+      - inject:
+          properties-file: detected_snapshot
       - multijob:
           name: create-apex-vms
           condition: SUCCESSFUL
                 GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
                 NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
                 APEX_ENV_NUMBER=$APEX_ENV_NUMBER
+                GERRIT_EVENT_COMMENT_TEXT=$GERRIT_EVENT_COMMENT_TEXT
+                TOPOLOGY=$TOPOLOGY
+                OS_VERSION=$OS_VERSION
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
             - name: 'odl-netvirt-verify-virtual-install-netvirt-{stream}'
               current-parameters: false
               predefined-parameters: |
-                ODL_BRANCH={branch}
+                ODL_BRANCH=$BRANCH
                 BRANCH=$BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
                 GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
                 GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
+                GERRIT_PROJECT=$GERRIT_PROJECT
                 NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
+                TOPOLOGY=$TOPOLOGY
+                OS_VERSION=$OS_VERSION
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
       - multijob:
-          name: functest
-          condition: SUCCESSFUL
+          name: csit
+          condition: ALWAYS
           projects:
-            - name: 'functest-netvirt-virtual-suite-master'
+            - name: cperf-apex-csit-master
               predefined-parameters: |
-                DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
-                FUNCTEST_MODE=testcase
-                FUNCTEST_SUITE_NAME=odl_netvirt
-                RC_FILE_PATH=$HOME/cloner-info/overcloudrc
+                ODL_BRANCH=$BRANCH
+                RC_FILE_PATH=$SNAP_CACHE/overcloudrc
+                NODE_FILE_PATH=$SNAP_CACHE/node.yaml
+                SSH_KEY_PATH=$SNAP_CACHE/id_rsa
+                ODL_CONTAINERIZED=false
+                OS_VERSION=$OS_VERSION
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: false
+      - multijob:
+          name: csit-collect-logs
+          condition: ALWAYS
+          projects:
+            - name: cperf-upload-logs-csit
+              predefined-parameters: |
+                ODL_BRANCH=$BRANCH
+                OS_VERSION=$OS_VERSION
+              node-parameters: true
+              kill-phase-on: FAILURE
+              abort-all-job: false
+      - multijob:
+          name: apex-fetch-logs
+          condition: ALWAYS
+          projects:
+            - name: 'apex-fetch-logs-{stream}'
+              current-parameters: false
+              node-parameters: true
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
       - multijob:
           name: postprocess
           condition: ALWAYS
             - 'odl-netvirt-verify-virtual-install-netvirt-.*'
             - 'functest-netvirt-virtual-suite-.*'
             - 'odl-netvirt-verify-virtual-postprocess-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - ssh-agent-wrapper
       - '{installer}-defaults'
       - string:
           name: DEPLOY_SCENARIO
-          default: 'os-odl_l2-bgpvpn-noha'
+          default: 'os-odl-nofeature-noha'
           description: 'Scenario to deploy and test'
       - string:
           name: GS_URL
     builders:
       - shell:
           !include-raw: ./postprocess-netvirt.sh
+
+- builder:
+    name: 'detect-opnfv-snapshot'
+    builders:
+      - shell:
+          !include-raw: ./detect-snapshot.sh
index 0c58a3c..618d181 100755 (executable)
@@ -12,7 +12,7 @@ echo
 if echo $ARTIFACT_VERSION | grep "dev" 1> /dev/null; then
   GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
   export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
-  if [ "$BRANCH" == 'master' ]; then
+  if [[ "$BRANCH" != 'stable/fraser' ]]; then
     # build rpm
     export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --rpms"
   else
@@ -23,15 +23,15 @@ elif echo $BUILD_TAG | grep "csit" 1> /dev/null; then
   export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
 elif [ "$ARTIFACT_VERSION" == "daily" ]; then
   export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d")
-  if [ "$BRANCH" == 'master' ]; then
-    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
+  if [[ "$BRANCH" != 'stable/fraser' ]]; then
+    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --rpms"
   else
     export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
   fi
 else
   export OPNFV_ARTIFACT_VERSION=${ARTIFACT_VERSION}
-  if [ "$BRANCH" == 'master' ]; then
-    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
+  if [[ "$BRANCH" != 'stable/fraser' ]]; then
+    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --rpms"
   else
     export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
   fi
@@ -59,7 +59,7 @@ echo "Cache Directory Contents:"
 echo "-------------------------"
 ls -al $CACHE_DIRECTORY
 
-if [[ "$BUILD_ARGS" =~ '--iso' && "$BRANCH" != 'master' ]]; then
+if [[ "$BUILD_ARGS" =~ '--iso' && "$BRANCH" == 'stable/fraser' ]]; then
   mkdir -p /tmp/apex-iso/
   rm -f /tmp/apex-iso/*.iso
   cp -f $BUILD_DIRECTORY/../.build/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso /tmp/apex-iso/
@@ -67,7 +67,7 @@ fi
 
 if ! echo $ARTIFACT_VERSION | grep "dev" 1> /dev/null; then
   echo "Writing opnfv.properties file"
-  if [ "$BRANCH" != master ]; then
+  if [ "$BRANCH" == 'stable/fraser' ]; then
     # save information regarding artifact into file
     (
       echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
index 35c2b85..09d6ca6 100755 (executable)
@@ -4,13 +4,16 @@ set -o nounset
 set -o pipefail
 
 IPV6_FLAG=False
+ALLINONE_FLAG=False
+CSIT_ENV_FLAG=False
+FUNCTEST_ENV_FLAG=False
 
 # log info to console
 echo "Starting the Apex deployment."
 echo "--------------------------------------------------------"
 echo
 
-if [ -z "$DEPLOY_SCENARIO" ]; then
+if [ -z ${DEPLOY_SCENARIO+x} ]; then
   echo "Deploy scenario not set!"
   exit 1
 else
@@ -27,7 +30,7 @@ if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
   # we want to use that built in mechanism to avoid re-downloading every job
   # so we use a dedicated folder to hold the upstream cache
   UPSTREAM_CACHE=$HOME/upstream_cache
-  if [ "$BRANCH" == 'master' ]; then
+  if [[ "$BRANCH" != 'stable/fraser' ]]; then
     mkdir -p ${UPSTREAM_CACHE}
     RESOURCES=$UPSTREAM_CACHE
   else
@@ -53,7 +56,7 @@ else
   # set to use different directory here because upon RPM removal this
   # directory will be wiped in daily
   UPSTREAM_CACHE=$HOME/upstream_cache
-  if [ "$BRANCH" == 'master' ]; then
+  if [[ "$BRANCH" != 'stable/fraser' ]]; then
     mkdir -p ${UPSTREAM_CACHE}
     RESOURCES=$UPSTREAM_CACHE
   else
@@ -99,12 +102,30 @@ if [ "$OPNFV_CLEAN" == 'yes' ]; then
   sudo ${CLEAN_CMD} ${clean_opts}
 fi
 
+# These are add-ons to regular scenarios where you can do like
+# os-nosdn-nofeature-noha-ipv6, or os-nosdn-nofeature-noha-allinone
 if echo ${DEPLOY_SCENARIO} | grep ipv6; then
   IPV6_FLAG=True
   DEPLOY_SCENARIO=$(echo ${DEPLOY_SCENARIO} |  sed 's/-ipv6//')
   echo "INFO: IPV6 Enabled"
 fi
 
+if echo ${DEPLOY_SCENARIO} | grep allinone; then
+  ALLINONE_FLAG=True
+  DEPLOY_SCENARIO=$(echo ${DEPLOY_SCENARIO} |  sed 's/-allinone//')
+  echo "INFO: All in one deployment detected"
+fi
+
+if echo ${DEPLOY_SCENARIO} | grep csit; then
+  CSIT_ENV_FLAG=True
+  DEPLOY_SCENARIO=$(echo ${DEPLOY_SCENARIO} |  sed 's/-csit//')
+  echo "INFO: CSIT env requested in deploy scenario"
+elif echo ${DEPLOY_SCENARIO} | grep functest; then
+  FUNCTEST_ENV_FLAG=True
+  DEPLOY_SCENARIO=$(echo ${DEPLOY_SCENARIO} |  sed 's/-functest//')
+  echo "INFO: Functest env requested in deploy scenario"
+fi
+
 echo "Deploy Scenario set to ${DEPLOY_SCENARIO}"
 DEPLOY_FILE="${DEPLOY_SETTINGS_DIR}/${DEPLOY_SCENARIO}.yaml"
 
@@ -118,11 +139,43 @@ if [[ "$JOB_NAME" =~ "virtual" ]]; then
   if [[ "${DEPLOY_SCENARIO}" =~ fdio|ovs ]]; then
     DEPLOY_CMD="${DEPLOY_CMD} --virtual-default-ram 12 --virtual-compute-ram 7"
   fi
-  if [[ "$JOB_NAME" == *csit* ]]; then
-    DEPLOY_CMD="${DEPLOY_CMD} -e csit-environment.yaml"
+  if [[ "$ALLINONE_FLAG" == "True" ]]; then
+    DEPLOY_CMD="${DEPLOY_CMD} --virtual-computes 0"
+  elif [[ "$PROMOTE" == "True" ]]; then
+    DEPLOY_CMD="${DEPLOY_CMD} --virtual-computes 2"
   fi
-  if [[ "$PROMOTE" == "True" ]]; then
-    DEPLOY_CMD="${DEPLOY_CMD} --virtual-computes 1"
+
+  if [[ "$FUNCTEST_ENV_FLAG" == "True"  || "$CSIT_ENV_FLAG" == "True" ]]; then
+    if [[ "$CSIT_ENV_FLAG" == "True" ]]; then
+      ENV_TYPE="csit"
+    else
+      ENV_TYPE="functest"
+    fi
+    if [ -z ${OS_VERSION+x} ]; then
+      echo "INFO: OS_VERSION not passed to deploy, detecting based on branch and scenario"
+      case $BRANCH in
+        master)
+          if [[ "$DEPLOY_SCENARIO" =~ "rocky" ]]; then
+            OS_VERSION=rocky
+          else
+            OS_VERSION=master
+          fi
+          ;;
+        *gambia)
+          OS_VERSION=queens
+          ;;
+        *)
+          echo "Unable to detection OS_VERSION, aborting"
+          exit 1
+          ;;
+      esac
+    fi
+    if [[ "$OS_VERSION" != "master" ]]; then
+      SNAP_ENV="${ENV_TYPE}-${OS_VERSION}-environment.yaml"
+    else
+      SNAP_ENV="${ENV_TYPE}-environment.yaml"
+    fi
+    DEPLOY_CMD="${DEPLOY_CMD} -e ${SNAP_ENV}"
   fi
 else
   # settings for bare metal deployment
@@ -137,13 +190,11 @@ else
   DEPLOY_CMD="${DEPLOY_CMD} -i ${INVENTORY_FILE}"
 fi
 
-if [[ "$BRANCH" == "master" ]]; then
-  echo "Upstream deployment detected"
-  DEPLOY_CMD="${DEPLOY_CMD} --upstream"
-fi
-
 if [ "$IPV6_FLAG" == "True" ]; then
   NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_v6.yaml"
+elif [[ "$CSIT_ENV_FLAG" == "True"  || "$FUNCTEST_ENV_FLAG" == "True" ]]; then
+  # We use csit network settings which is single network for snapshots
+  NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_csit.yaml"
 else
   NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings.yaml"
 fi
@@ -157,16 +208,6 @@ fi
 # start deployment
 sudo ${DEPLOY_CMD} -d ${DEPLOY_FILE} -n ${NETWORK_FILE} --debug
 
-if [[ "$JOB_NAME" == *csit* ]]; then
-  echo "CSIT job: setting host route for floating ip routing"
-  # csit route to allow docker container to reach floating ips
-  UNDERCLOUD=$(sudo virsh domifaddr undercloud | grep -Eo "[0-9\.]+{3}[0-9]+")
-  if sudo route | grep 192.168.37.128 > /dev/null; then
-    sudo route del -net 192.168.37.128 netmask 255.255.255.128
-  fi
-  sudo route add -net 192.168.37.128 netmask 255.255.255.128 gw ${UNDERCLOUD}
-fi
-
 echo
 echo "--------------------------------------------------------"
 echo "Done!"
index 3efe1cb..bc3311d 100755 (executable)
@@ -18,8 +18,8 @@ else
 fi
 
 if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
-  if [ "$BRANCH" == 'master' ]; then
-    echo "Skipping download of artifacts for master branch"
+  if [[ "$BRANCH" != 'stable/fraser' ]]; then
+    echo "Skipping download of artifacts for master/gambia branch"
   else
     # dev build
     GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
@@ -45,10 +45,9 @@ else
 
   RPM_INSTALL_PATH=$(echo "http://"$OPNFV_RPM_URL | sed 's/\/'"$(basename $OPNFV_RPM_URL)"'//')
   RPM_LIST=$(basename $OPNFV_RPM_URL)
-
-  if [ "$BRANCH" != 'master' ]; then
-    # find version of RPM
-    VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)')
+  # find version of RPM
+  VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)')
+  if [ "$BRANCH" == 'stable/fraser' ]; then
     # build RPM List which already includes base Apex RPM
     RPM_LIST+=" opnfv-apex-undercloud-${VERSION_EXTENSION}.noarch.rpm"
     RPM_LIST+=" python34-opnfv-apex-${VERSION_EXTENSION}.noarch.rpm"
diff --git a/jjb/apex/apex-fetch-snap-info.sh b/jjb/apex/apex-fetch-snap-info.sh
new file mode 100755 (executable)
index 0000000..3324aca
--- /dev/null
@@ -0,0 +1,46 @@
+#!/usr/bin/env bash
+
+##############################################################################
+# Copyright (c) 2018 Tim Rozet (Red Hat) and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+set -o errexit
+set -o nounset
+set -o pipefail
+
+echo "Fetching overcloudrc, ssh key, and node.yaml from deployment..."
+
+SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
+
+tmp_dir=/tmp/snap
+rm -rf ${tmp_dir}
+mkdir -p ${tmp_dir}
+
+# TODO(trozet) remove this after fix goes in for tripleo_inspector to copy these
+pushd ${tmp_dir} > /dev/null
+echo "Copying overcloudrc and ssh key from Undercloud..."
+# Store overcloudrc
+UNDERCLOUD=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+')
+sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:overcloudrc ./
+# Copy out ssh key of stack from undercloud
+sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:.ssh/id_rsa ./
+sudo chmod 0600 id_rsa
+popd > /dev/null
+
+echo "Gathering introspection information"
+git clone https://gerrit.opnfv.org/gerrit/sdnvpn.git
+pushd sdnvpn/odl-pipeline/lib > /dev/null
+sudo ./tripleo_introspector.sh --out-file ${tmp_dir}/node.yaml
+popd > /dev/null
+sudo rm -rf sdnvpn
+
+sudo chown jenkins-ci:jenkins-ci ${tmp_dir}/*
+
+ls -lrt ${tmp_dir}
+
+echo "Fetch complete"
diff --git a/jjb/apex/apex-functest-scenario.sh b/jjb/apex/apex-functest-scenario.sh
new file mode 100644 (file)
index 0000000..dcbed44
--- /dev/null
@@ -0,0 +1,18 @@
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
+features=$(echo $DEPLOY_SCENARIO | sed -r -n 's/os-.+-(.+)-(noha|ha)/\1/p')
+if [ "$features" == 'rocky' ]; then
+  functest_scenario=$(echo $DEPLOY_SCENARIO | sed -r -n 's/(os-.+?)-rocky-(noha|ha)/\1-nofeature-\2/p')
+  echo "DOCKER_TAG=hunter" > functest_scenario
+elif [[ "$features" =~ 'rocky' ]]; then
+  functest_scenario=$(echo $DEPLOY_SCENARIO | sed -r -n 's/(os-.+?)-(.+)_rocky-(noha|ha)/\1-\2-\3/p')
+  echo "DOCKER_TAG=hunter" > functest_scenario
+else
+  functest_scenario=$DEPLOY_SCENARIO
+  echo "DOCKER_TAG=$([[ ${BRANCH##*/} == "master" ]] && \
+    echo "latest" || echo ${BRANCH##*/})" > functest_scenario
+fi
+echo "DEPLOY_SCENARIO=$functest_scenario" >> functest_scenario
index f349376..c29d7cb 100755 (executable)
@@ -8,8 +8,8 @@ echo "Starting the Apex iso verify."
 echo "--------------------------------------------------------"
 echo
 
-if [ "$BRANCH" == 'master' ]; then
-  echo "Skipping Apex iso verify for master branch"
+if [ "$BRANCH" != 'stable/fraser' ]; then
+  echo "Skipping Apex iso verify for ${BRANCH} branch"
   exit 0
 fi
 
index 0da47b5..58dc4ff 100644 (file)
@@ -20,7 +20,8 @@ def render_jjb():
     gspathname = dict()
     branch = dict()
     build_slave = dict()
-    env = Environment(loader=FileSystemLoader('./'), autoescape=True)
+    env = Environment(loader=FileSystemLoader('./'), autoescape=True,
+                      keep_trailing_newline=True)
 
     with open('scenarios.yaml.hidden') as _:
         scenarios = yaml.safe_load(_)
@@ -45,5 +46,6 @@ def render_jjb():
     with open('./apex.yaml', 'w') as fh:
         fh.write(output)
 
+
 if __name__ == "__main__":
     render_jjb()
index 700ff60..42567ea 100644 (file)
@@ -9,6 +9,11 @@
           gs-pathname: ''
           concurrent-builds: 3
           disabled: false
+      - gambia: &gambia
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          concurrent-builds: 3
+          disabled: false
       - fraser: &fraser
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
@@ -68,7 +73,7 @@
           option: 'project'
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify-iso-{stream}'
 
diff --git a/jjb/apex/apex-rtd-jobs.yaml b/jjb/apex/apex-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..48e4949
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: apex-rtd
+    project: apex
+    project-name: apex
+
+    project-pattern: 'apex'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-apex/47262/'
+    rtd-token: '134db049c774ab06c41db432e3a042a982f50edf'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 342896c..e8bf60b 100644 (file)
@@ -27,22 +27,8 @@ echo
 tmp_dir=$(pwd)/.tmp
 mkdir -p ${tmp_dir}
 
-# TODO(trozet) remove this after fix goes in for tripleo_inspector to copy these
-pushd ${tmp_dir} > /dev/null
-echo "Copying overcloudrc and ssh key from Undercloud..."
-# Store overcloudrc
-UNDERCLOUD=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+')
-sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:overcloudrc ./
-# Copy out ssh key of stack from undercloud
-sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:.ssh/id_rsa ./
-popd > /dev/null
-
-echo "Gathering introspection information"
-git clone https://gerrit.opnfv.org/gerrit/sdnvpn.git
-pushd sdnvpn/odl-pipeline/lib > /dev/null
-sudo ./tripleo_introspector.sh --out-file ${tmp_dir}/node.yaml
-popd > /dev/null
-sudo rm -rf sdnvpn
+# info should have already been collected in apex-fetch-snap-info so copy it
+cp -r /tmp/snap/* ${tmp_dir}/
 
 echo "Shutting down nodes"
 # Shut down nodes
@@ -78,9 +64,7 @@ for node in $nodes; do
 done
 
 # copy virsh net definitions
-for net in admin api external storage tenant; do
-  sudo virsh net-dumpxml ${net} > ${net}.xml
-done
+sudo virsh net-dumpxml admin > admin.xml
 
 sudo chown jenkins-ci:jenkins-ci *
 
@@ -92,13 +76,30 @@ sudo rm -rf ${tmp_dir}
 echo "Snapshot saved as apex-${SNAP_TYPE}-snap-${DATE}.tar.gz"
 
 # update opnfv properties file
-if [ "$SNAP_TYPE" == 'csit' ]; then
-  curl -O -L http://$GS_URL/snapshot.properties
-  sed -i '/^OPNFV_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#};${x;/^$/{s##OPNFV_SNAP_URL='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties
-  snap_sha=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)
-  sed -i '/^OPNFV_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//OPNFV_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties
-  echo "OPNFV_SNAP_URL=$GS_URL/apex-csit-snap-${DATE}.tar.gz"
-  echo "OPNFV_SNAP_SHA512SUM=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)"
-  echo "Updated properties file: "
-  cat snapshot.properties
+snap_sha=$(sha512sum apex-${SNAP_TYPE}-snap-${DATE}.tar.gz | cut -d' ' -f1)
+
+if curl --fail -O -L http://$GS_URL/snapshot.properties; then
+  # TODO(trozet): deprecate OPNFV_SNAP_URL for CSIT_SNAP_URL
+  if [ "$SNAP_TYPE" == 'csit' ]; then
+    sed -i '/^OPNFV_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#};${x;/^$/{s##OPNFV_SNAP_URL='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties
+    sed -i '/^OPNFV_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//OPNFV_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties
+  fi
+  sed -i '/^'${SNAP_TYPE}'_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-'${SNAP_TYPE}'-snap-'${DATE}'.tar.gz#};${x;/^$/{s##'${SNAP_TYPE}'_SNAP_URL='${GS_URL}'/apex-'${SNAP_TYPE}'-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties
+  sed -i '/^'${SNAP_TYPE}'_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//'${SNAP_TYPE}'_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties
+else
+  cat << EOF > snapshot.properties
+${SNAP_TYPE}_SNAP_URL=${GS_URL}/apex-${SNAP_TYPE}-snap-${DATE}.tar.gz
+${SNAP_TYPE}_SNAP_SHA512SUM=${snap_sha}
+EOF
+  # TODO(trozet): deprecate OPNFV_SNAP_URL for CSIT_SNAP_URL
+  if [ "$SNAP_TYPE" == 'csit' ]; then
+    cat << EOF >> snapshot.properties
+OPNFV_SNAP_URL=${GS_URL}/apex-csit-snap-${DATE}.tar.gz
+OPNFV_SNAP_SHA512SUM=${snap_sha}
+EOF
+  fi
 fi
+echo "${SNAP_TYPE}_SNAP_URL=$GS_URL/apex-${SNAP_TYPE}-snap-${DATE}.tar.gz"
+echo "${SNAP_TYPE}_SNAP_SHA512SUM=$(sha512sum apex-${SNAP_TYPE}-snap-${DATE}.tar.gz | cut -d' ' -f1)"
+echo "Updated properties file: "
+cat snapshot.properties
index a93421c..dd69df3 100644 (file)
@@ -20,32 +20,44 @@ echo "Deploying Apex snapshot..."
 echo "--------------------------"
 echo
 
+if [ -z "$SNAP_TYPE" ]; then
+  echo "ERROR: SNAP_TYPE not provided...exiting"
+  exit 1
+fi
+
 echo "Cleaning server"
 pushd ci > /dev/null
 sudo opnfv-clean
 popd > /dev/null
 
+full_snap_url="gs://${GS_URL}/${OS_VERSION}/${TOPOLOGY}"
+
 echo "Downloading latest snapshot properties file"
-if ! wget -O $WORKSPACE/opnfv.properties http://$GS_URL/snapshot.properties; then
-  echo "ERROR: Unable to find snapshot.properties at ${GS_URL}...exiting"
+if ! gsutil cp ${full_snap_url}/snapshot.properties $WORKSPACE/opnfv.properties; then
+  echo "ERROR: Unable to find snapshot.properties at ${full_snap_url}...exiting"
   exit 1
 fi
 
+echo "Properties contents:"
+cat ${WORKSPACE}/opnfv.properties
+
 # find latest check sum
-latest_snap_checksum=$(cat opnfv.properties | grep OPNFV_SNAP_SHA512SUM | awk -F "=" '{print $2}')
+latest_snap_checksum=$(cat ${WORKSPACE}/opnfv.properties | grep ${SNAP_TYPE}_SNAP_SHA512SUM | awk -F "=" '{print $2}')
 if [ -z "$latest_snap_checksum" ]; then
   echo "ERROR: checksum of latest snapshot from snapshot.properties is null!"
   exit 1
 fi
 
 local_snap_checksum=""
+SNAP_CACHE=${SNAP_CACHE}/${OS_VERSION}/${TOPOLOGY}
 
 # check snap cache directory exists
 # if snapshot cache exists, find the checksum
 if [ -d "$SNAP_CACHE" ]; then
-  latest_snap=$(ls ${SNAP_CACHE} | grep tar.gz | tail -n 1)
+  latest_snap=$(ls ${SNAP_CACHE} | grep tar.gz | grep $SNAP_TYPE | tail -n 1)
   if [ -n "$latest_snap" ]; then
     local_snap_checksum=$(sha512sum ${SNAP_CACHE}/${latest_snap} | cut -d' ' -f1)
+    echo "Local snap checksum is: ${local_snap_checksum}"
   fi
 else
   mkdir -p ${SNAP_CACHE}
@@ -53,7 +65,12 @@ fi
 
 # compare check sum and download latest snap if not up to date
 if [ "$local_snap_checksum" != "$latest_snap_checksum" ]; then
-  snap_url=$(cat opnfv.properties | grep OPNFV_SNAP_URL | awk -F "=" '{print $2}')
+  snap_url=$(cat opnfv.properties | grep ${SNAP_TYPE}_SNAP_URL | awk -F "=" '{print $2}')
+  # TODO(trozet): Remove this once OPNFV url is deprecated
+  if [[ -z "$snap_url" && "$SNAP_TYPE" == 'csit' ]]; then
+      echo "WARN: Unable to find snap url for ${SNAP_TYPE}, attempting to use OPNFV"
+      snap_url=$(cat opnfv.properties | grep OPNFV_SNAP_URL | awk -F "=" '{print $2}')
+  fi
   if [ -z "$snap_url" ]; then
     echo "ERROR: Snap URL from snapshot.properties is null!"
     exit 1
@@ -61,7 +78,7 @@ if [ "$local_snap_checksum" != "$latest_snap_checksum" ]; then
   echo "INFO: SHA mismatch, will download latest snapshot"
   # wipe cache
   rm -rf ${SNAP_CACHE}/*
-  wget --directory-prefix=${SNAP_CACHE}/ ${snap_url}
+  gsutil cp "gs://${snap_url}" ${SNAP_CACHE}/
   snap_tar=$(basename ${snap_url})
 else
   snap_tar=${latest_snap}
@@ -141,22 +158,23 @@ done
 mkdir -p $HOME/cloner-info
 cp -f overcloudrc $HOME/cloner-info/
 
-admin_controller_ip=$(cat overcloudrc | grep -Eo -m 1 "192.0.2.[0-9]+")
+admin_controller_ip=$(cat overcloudrc | grep -Eo -m 1 "192.0.2.[0-9]+" | head -1)
 netvirt_url="http://${admin_controller_ip}:8081/restconf/operational/network-topology:network-topology/topology/netvirt:1"
 
 source overcloudrc
 counter=1
 while [ "$counter" -le 10 ]; do
-  if curl --fail --silent ${admin_controller_ip}:80 > /dev/null; then
-    echo "Overcloud Horizon is up...Checking if OpenDaylight NetVirt is up..."
-    if curl --fail --silent -u admin:admin ${netvirt_url} > /dev/null; then
+  echo "Checking if OpenStack is up"
+  if nc -z ${admin_controller_ip} 9696 > /dev/null; then
+    echo "Overcloud Neutron is up...Checking if OpenDaylight NetVirt is up..."
+    if curl --fail --silent -u admin:${SDN_CONTROLLER_PASSWORD} ${netvirt_url} > /dev/null; then
       echo "OpenDaylight is up.  Overcloud deployment complete"
       exit 0
     else
       echo "OpenDaylight not yet up, try ${counter}"
     fi
   else
-    echo "Horizon/Apache not yet up, try ${counter}"
+    echo "Neutron not yet up, try ${counter}"
   fi
   counter=$((counter+1))
   sleep 60
index 8743368..00a0a1c 100755 (executable)
@@ -24,7 +24,9 @@ importkey () {
   git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
   #this is where we import the siging key
   if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
-    source $WORKSPACE/releng/utils/gpg_import_key.sh
+    if ! $WORKSPACE/releng/utils/gpg_import_key.sh; then
+      echo "WARNING: Failed to run gpg key import"
+    fi
   fi
 }
 
@@ -75,14 +77,15 @@ uploadrpm () {
 uploadsnap () {
   # Uploads snapshot artifact and updated properties file
   echo "Uploading snapshot artifacts"
+  # snapshot dir is the same node in the create job workspace
+  # only 1 promotion job can run at a time on a slave
+  snapshot_dir="${WORKSPACE}/../apex-create-snapshot"
   if [ -z "$SNAP_TYPE" ]; then
     echo "ERROR: SNAP_TYPE not provided...exiting"
     exit 1
   fi
-  gsutil cp $WORKSPACE/apex-${SNAP_TYPE}-snap-`date +%Y-%m-%d`.tar.gz gs://$GS_URL/ > gsutil.iso.log
-  if [ "$SNAP_TYPE" == 'csit' ]; then
-    gsutil cp $WORKSPACE/snapshot.properties gs://$GS_URL/snapshot.properties > gsutil.latest.log
-  fi
+  gsutil cp ${snapshot_dir}/apex-${SNAP_TYPE}-snap-`date +%Y-%m-%d`.tar.gz gs://$GS_URL/ > gsutil.iso.log
+  gsutil cp ${snapshot_dir}/snapshot.properties gs://$GS_URL/snapshot.properties > gsutil.latest.log
   echo "Upload complete for Snapshot"
 }
 
@@ -109,8 +112,8 @@ fi
 if [ "$ARTIFACT_TYPE" == 'snapshot' ]; then
   uploadsnap
 elif [ "$ARTIFACT_TYPE" == 'iso' ]; then
-  if [[ "$ARTIFACT_VERSION" =~ dev || "$BRANCH" == 'master' ]]; then
-    echo "Skipping ISO artifact upload for ${ARTIFACT_TYPE} due to dev/master build"
+  if [[ "$ARTIFACT_VERSION" =~ dev || "$BRANCH" != 'stable/fraser' ]]; then
+    echo "Skipping ISO artifact upload for ${ARTIFACT_TYPE} due to dev/${BRANCH} build"
     exit 0
   fi
   if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
@@ -119,8 +122,8 @@ elif [ "$ARTIFACT_TYPE" == 'iso' ]; then
   uploadiso
 elif [ "$ARTIFACT_TYPE" == 'rpm' ]; then
   if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
-    if [ "$BRANCH" == 'master' ]; then
-      echo "will not upload artifacts, master uses upstream"
+    if [[ "$BRANCH" != 'stable/fraser' ]]; then
+      echo "will not upload artifacts, ${BRANCH} uses upstream"
       ARTIFACT_TYPE=none
     else
       echo "dev build detected, will upload image tarball"
@@ -133,7 +136,7 @@ elif [ "$ARTIFACT_TYPE" == 'rpm' ]; then
     RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
     SRPM_INSTALL_PATH=$BUILD_DIRECTORY
     SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
-    if [ "$BRANCH" != 'master' ]; then
+    if [[ "$BRANCH" == 'stable/fraser' ]]; then
       VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
       RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
       RPM_LIST+=" ${RPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
index 7dbd670..15e6826 100644 (file)
       - master: &master
           branch: '{stream}'
           gs-pathname: ''
-          verify-scenario: 'os-odl-nofeature-ha'
+          verify-scenario: 'os-nosdn-nofeature-noha'
+          disabled: false
+      - gambia: &gambia
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          verify-scenario: 'os-nosdn-nofeature-ha'
           disabled: false
       - fraser: &fraser
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
-          verify-scenario: 'os-odl-nofeature-ha'
+          verify-scenario: 'os-nosdn-nofeature-ha'
           disabled: false
       - danube: &danube
           branch: 'stable/{stream}'
           max-per-node: 3
           max-total: 10
           option: 'project'
-
+      - build-blocker:
+          use-build-blocker: true
+          blocking-level: 'NODE'
+          blocking-jobs:
+            - 'apex-.*-promote.*'
     builders:
       - description-setter:
           description: "Built on $NODE_NAME"
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
+                FUNCTEST_SUITE_NAME=tempest_smoke
+                FUNCTEST_MODE=testcase
+              node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
               git-revision: true
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify.*'
             - 'apex-virtual.*'
+            - 'apex-.*-promote.*'
+            - 'odl-netvirt.*'
       - throttle:
           max-per-node: 1
           max-total: 10
               kill-phase-on: FAILURE
               abort-all-job: true
               git-revision: true
+      - shell: |
+          features=$(echo $DEPLOY_SCENARIO | sed -r -n 's/os-.+-(.+)-(noha|ha)/\1/p')
+          if [ "$features" == 'rocky' ]; then
+            echo "DOCKER_TAG=hunter" > functest_tag
+          elif [[ "$features" =~ 'rocky' ]]; then
+            echo "DOCKER_TAG=hunter" > functest_tag
+          else
+            echo "DOCKER_TAG=''" > functest_tag
+          fi
+      - inject:
+          properties-file: functest_tag
+          override-build-parameters: true
       - multijob:
           name: functest-smoke
           condition: ALWAYS
               current-parameters: false
               predefined-parameters: |
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                FUNCTEST_MODE=tier
-                FUNCTEST_TIER=healthcheck
+                DOCKER_TAG=$DOCKER_TAG
+                FUNCTEST_SUITE_NAME=tempest_smoke
+                FUNCTEST_MODE=testcase
                 GERRIT_BRANCH=$GERRIT_BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
               node-parameters: true
               kill-phase-on: NEVER
+              enable-condition: "def m = '$DEPLOY_SCENARIO' != 'k8s-nosdn-nofeature-noha'"
               abort-all-job: true
               git-revision: false
       - multijob:
index 4e8e7cf..80a4d82 100644 (file)
@@ -8,12 +8,13 @@
       - 'apex-virtual-{stream}'
       - 'apex-deploy-{platform}-{stream}'
       - 'apex-daily-{stream}'
-      - 'apex-csit-promote-daily-{stream}-{os_version}'
+      - 'apex-{snap_type}-promote-daily-{stream}-os-{os_version}-{topology}'
       - 'apex-fdio-promote-daily-{stream}'
       - 'apex-{scenario}-baremetal-{scenario_stream}'
       - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
       - 'apex-upload-snapshot'
       - 'apex-create-snapshot'
+      - 'apex-fetch-snap-info'
       - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
       - 'apex-dovetail-daily-os-nosdn-nofeature-ha-baremetal-danube'
     # stream:    branch with - in place of / (eg. stable-arno)
           build-slave: 'apex-build-master'
           virtual-slave: 'apex-virtual-master'
           baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-odl-nofeature-ha'
+          verify-scenario: 'os-nosdn-nofeature-noha'
           scenario_stream: 'master'
           disable_daily: false
           disable_promote: false
+      - gambia: &gambia
+          branch: 'stable/gambia'
+          gs-pathname: '/gambia'
+          build-slave: 'apex-build-master'
+          virtual-slave: 'apex-virtual-master'
+          baremetal-slave: 'apex-baremetal-master'
+          verify-scenario: 'os-nosdn-nofeature-ha'
+          scenario_stream: 'gambia'
+          disable_daily: false
+          disable_promote: false
       - fraser: &fraser
           branch: 'stable/fraser'
           gs-pathname: '/fraser'
           build-slave: 'apex-build-master'
           virtual-slave: 'apex-virtual-master'
           baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-odl-nofeature-ha'
+          verify-scenario: 'os-nosdn-nofeature-ha'
           scenario_stream: 'fraser'
           disable_daily: false
           disable_promote: true
           disable_promote: true
 
     scenario:
-      - 'os-nosdn-nofeature-noha':
-          <<: *fraser
       - 'os-nosdn-nofeature-ha':
           <<: *fraser
-      - 'os-odl-nofeature-ha':
+      - 'os-odl-bgpvpn-ha':
           <<: *fraser
+      - 'os-nosdn-nofeature-noha':
+          <<: *gambia
+      - 'os-nosdn-nofeature-ha':
+          <<: *gambia
+      - 'os-nosdn-nofeature-ha-ipv6':
+          <<: *gambia
       - 'os-odl-nofeature-noha':
-          <<: *fraser
+          <<: *gambia
+      - 'os-odl-nofeature-ha':
+          <<: *gambia
+      - 'k8s-nosdn-nofeature-noha':
+          <<: *gambia
       - 'os-odl-bgpvpn-ha':
-          <<: *fraser
-      - 'os-ovn-nofeature-noha':
-          <<: *fraser
-      - 'os-nosdn-fdio-noha':
-          <<: *fraser
-      - 'os-nosdn-fdio-ha':
-          <<: *fraser
-      - 'os-nosdn-bar-ha':
-          <<: *fraser
-      - 'os-nosdn-bar-noha':
-          <<: *fraser
-      - 'os-nosdn-ovs_dpdk-noha':
-          <<: *fraser
-      - 'os-nosdn-ovs_dpdk-ha':
-          <<: *fraser
-      - 'os-odl-sfc-noha':
-          <<: *fraser
+          <<: *gambia
+      - 'os-odl-bgpvpn-noha':
+          <<: *gambia
       - 'os-odl-sfc-ha':
-          <<: *fraser
+          <<: *gambia
+      - 'os-odl-sfc-noha':
+          <<: *gambia
       - 'os-nosdn-calipso-noha':
-          <<: *fraser
+          <<: *gambia
+      - 'os-ovn-nofeature-ha':
+          <<: *gambia
       - 'os-nosdn-nofeature-noha':
           <<: *danube
       - 'os-nosdn-nofeature-ha':
           <<: *danube
       - 'os-ovn-nofeature-noha':
           <<: *danube
+      - 'os-nosdn-nofeature-noha':
+          <<: *master
+      - 'os-nosdn-nofeature-ha':
+          <<: *master
+      - 'os-nosdn-nofeature-ha-ipv6':
+          <<: *master
       - 'os-odl-nofeature-noha':
           <<: *master
       - 'os-odl-nofeature-ha':
           <<: *master
-      - 'os-odl-queens-noha':
+      - 'os-nosdn-rocky-noha':
+          <<: *master
+      - 'os-nosdn-rocky-ha':
+          <<: *master
+      - 'os-nosdn-rocky-ha-ipv6':
+          <<: *master
+      - 'os-odl-rocky-noha':
+          <<: *master
+      - 'os-odl-rocky-ha':
           <<: *master
-      - 'os-odl-queens-ha':
+      - 'k8s-nosdn-nofeature-noha':
+          <<: *master
+      - 'os-odl-bgpvpn-ha':
+          <<: *master
+      - 'os-odl-bgpvpn-noha':
+          <<: *master
+      - 'os-odl-bgpvpn_queens-ha':
+          <<: *master
+      - 'os-odl-bgpvpn_queens-noha':
+          <<: *master
+      - 'os-odl-sfc-ha':
+          <<: *master
+      - 'os-odl-sfc-noha':
+          <<: *master
+      - 'os-odl-sfc_rocky-ha':
+          <<: *master
+      - 'os-odl-sfc_rocky-noha':
+          <<: *master
+      - 'os-nosdn-calipso-noha':
+          <<: *master
+      - 'os-nosdn-calipso_rocky-noha':
+          <<: *master
+      - 'os-ovn-nofeature-ha':
+          <<: *master
+      - 'os-ovn-rocky-ha':
           <<: *master
       - 'os-nosdn-nofeature-noha':
           <<: *euphrates
           <<: *euphrates
       - 'os-odl-sfc-ha':
           <<: *euphrates
-      - 'os-nosdn-calipso-noha':
-          <<: *euphrates
 
     platform:
       - 'baremetal'
       - 'virtual'
 
     os_version:
-      - 'pike'
-      - 'queens'
-      - 'master'
-
-
+      - 'queens':
+          os_scenario: 'nofeature'
+          odl_branch: 'stable/oxygen'
+      - 'rocky':
+          os_scenario: 'rocky'
+          odl_branch: 'stable/oxygen'
+      - 'master':
+          os_scenario: 'nofeature'
+          odl_branch: 'stable/fluorine'
+
+    topology:
+      - 'noha'
+      - 'ha'
+      - 'noha-allinone'
+
+    snap_type:
+      - csit:
+          sdn: 'odl'
+      - functest:
+          sdn: 'nosdn'
 # Fetch Logs Job
 - job-template:
     name: 'apex-fetch-logs-{stream}'
       - logrotate-default
       - build-blocker:
           use-build-blocker: false
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-deploy.*'
       - throttle:
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-deploy.*'
             - 'functest.*'
             - 'yardstick.*'
             - 'dovetail.*'
             - 'storperf.*'
+            - 'odl-netvirt.*'
       - throttle:
           max-per-node: 1
           max-total: 10
     parameters:
       - '{project}-defaults'
       - '{project}-virtual-{stream}-defaults'
+      - 'functest-suite-parameter'
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-runner.*'
-            - 'apex-.*-promote.*'
             - 'apex-run.*'
             - 'apex-virtual-.*'
             - 'apex-verify-gate-.*'
+            - 'odl-netvirt.*'
+            - 'apex-.*-promote.*'
       - throttle:
           max-per-node: 1
           max-total: 10
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                PROMOTE=$PROMOTE
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
               current-parameters: false
               predefined-parameters: |
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                FUNCTEST_SUITE_NAME=healthcheck
+                FUNCTEST_MODE=$FUNCTEST_MODE
+                FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
                 GERRIT_BRANCH=$GERRIT_BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify.*'
             - 'apex-runner.*'
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
+      - shell:
+          !include-raw-escape: ./apex-functest-scenario.sh
+      - inject:
+          properties-file: functest_scenario
+          override-build-parameters: true
       - multijob:
           name: 'OPNFV Test Suite'
           condition: ALWAYS
             - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
               node-parameters: true
               current-parameters: false
-              predefined-parameters:
+              predefined-parameters: |
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                DOCKER_TAG=$DOCKER_TAG
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
           name: DEPLOY_SCENARIO
           default: '{scenario}'
           description: "Scenario to deploy with."
+      - string:
+          name: DOCKER_TAG
+          default: ''
+          description: Default docker tag to pass to functest
 
     properties:
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify.*'
             - 'apex-runner.*'
-            - 'apex-.*-promote.*'
             - 'apex-run.*'
             - 'apex-testsuite-.+-baremetal-.+'
       - throttle:
             - name: 'functest-apex-baremetal-daily-{scenario_stream}'
               node-parameters: true
               current-parameters: false
-              predefined-parameters:
+              predefined-parameters: |
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                DOCKER_TAG=$DOCKER_TAG
               kill-phase-on: NEVER
               abort-all-job: false
               git-revision: false
           name: Dovetail-proposed_tests
           condition: ALWAYS
           projects:
-            - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
+            - name: 'dovetail-apex-baremetal-default-mandatory-{scenario_stream}'
               node-parameters: true
               current-parameters: false
               predefined-parameters:
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
               kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/
-                                 && $BUILD_NUMBER % 2 == 1"
+              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
               abort-all-job: false
               git-revision: false
       - multijob:
           name: Dovetail-default
           condition: ALWAYS
           projects:
-            - name: 'dovetail-apex-baremetal-default-{scenario_stream}'
+            - name: 'dovetail-apex-baremetal-default-optional-{scenario_stream}'
               node-parameters: true
               current-parameters: false
               predefined-parameters:
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
               kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/
-                                 && $BUILD_NUMBER % 2 == 0"
+              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
               abort-all-job: false
               git-revision: false
       - multijob:
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-daily.*'
 
               git-revision: true
       - apex-builder-{stream}
 
+# snapshot info fetch
+- job-template:
+    name: 'apex-fetch-snap-info'
+
+    disabled: false
+
+    parameters:
+      - '{project}-defaults'
+
+    builders:
+      - shell:
+          !include-raw-escape: ./apex-fetch-snap-info.sh
+
 # snapshot create
 - job-template:
     name: 'apex-create-snapshot'
 
-    # Job template for clean
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-
     disabled: false
 
+    parameters:
+      - '{project}-defaults'
+
     builders:
       - shell:
           !include-raw-escape: ./apex-snapshot-create.sh
 - job-template:
     name: 'apex-upload-snapshot'
 
-    # Job template for clean
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-
     disabled: false
 
+    parameters:
+      - '{project}-defaults'
+
     builders:
       - inject:
           properties-content: ARTIFACT_TYPE=snapshot
 
 # CSIT promote
 - job-template:
-    name: 'apex-csit-promote-daily-{stream}-{os_version}'
+    name: 'apex-{snap_type}-promote-daily-{stream}-os-{os_version}-{topology}'
 
     # Job template for promoting CSIT Snapshots
     #
           branch: '{branch}'
       - apex-parameter:
           gs-pathname: '{gs-pathname}'
-
+      - string:
+          name: ARTIFACT_VERSION
+          default: dev
+          description: "Used for overriding the ARTIFACT_VERSION"
+      - string:
+          name: PROMOTE
+          default: 'True'
+          description: "Used for overriding the PROMOTE"
+      - string:
+          name: GS_URL
+          default: 'artifacts.opnfv.org/apex/{os_version}/{topology}'
+          description: "User for overriding GS_URL from apex params"
+      - string:
+          name: OS_VERSION
+          default: '{os_version}'
+          description: OpenStack version short name
+      - string:
+          name: ODL_BRANCH
+          default: '{odl_branch}'
+          description: ODL branch being used
+      - string:
+          name: FORCE_PROMOTE
+          default: 'False'
+          description: "Used to force promotion and skip CSIT"
+      - string:
+          name: SNAP_TYPE
+          default: '{snap_type}'
+          description: Type of snapshot to promote
     properties:
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify.*'
-            - 'apex-deploy.*'
             - 'apex-runner.*'
             - 'apex-daily.*'
+            - 'apex-.*-promote.*'
+            - 'odl-netvirt.*'
+      - throttle:
+          max-per-node: 1
+          max-total: 10
+          option: 'project'
 
     triggers:
-      - timed: '0 12 * * 0'
+      - '{stream}-{snap_type}-{os_version}'
 
     builders:
       - multijob:
-          name: deploy-virtual
+          name: apex-virtual-deploy
           condition: SUCCESSFUL
           projects:
             - name: 'apex-deploy-virtual-{stream}'
-              current-parameters: false
+              current-parameters: true
               predefined-parameters: |
-                DEPLOY_SCENARIO=os-odl-{os_version}-noha
+                DEPLOY_SCENARIO=os-{sdn}-{os_scenario}-{topology}
                 OPNFV_CLEAN=yes
                 GERRIT_BRANCH=$GERRIT_BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-                PROMOTE=True
+              node-parameters: true
+              kill-phase-on: FAILURE
+              abort-all-job: true
+              git-revision: true
+      - multijob:
+          name: fetch snapshot info
+          condition: SUCCESSFUL
+          projects:
+            - name: 'apex-fetch-snap-info'
+              current-parameters: true
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
               git-revision: false
       - multijob:
-          name: functest-smoke
+          name: test phase
           condition: SUCCESSFUL
+          execution-type: SEQUENTIALLY
           projects:
+            - name: cperf-apex-csit-master
+              predefined-parameters: |
+                ODL_BRANCH=$ODL_BRANCH
+                RC_FILE_PATH=/tmp/csit/overcloudrc
+                NODE_FILE_PATH=/tmp/csit/node.yaml
+                SSH_KEY_PATH=/tmp/csit/id_rsa
+                ODL_CONTAINERIZED=true
+                OS_VERSION=$OS_VERSION
+                SKIP_CSIT=$FORCE_PROMOTE
+                SNAP_TYPE=$SNAP_TYPE
+              node-parameters: true
+              kill-phase-on: NEVER
+              abort-all-job: false
+              enable-condition: "def m = '$SNAP_TYPE' ==~ /csit/"
+            - name: cperf-upload-logs-csit
+              predefined-parameters: |
+                ODL_BRANCH=$ODL_BRANCH
+                OS_VERSION=$OS_VERSION
+                SNAP_TYPE=$SNAP_TYPE
+              node-parameters: true
+              kill-phase-on: FAILURE
+              abort-all-job: false
+              enable-condition: "def m = '$SNAP_TYPE' ==~ /csit/"
             - name: 'functest-apex-virtual-suite-{stream}'
               current-parameters: false
               predefined-parameters: |
-                DEPLOY_SCENARIO=os-odl-{os_version}-noha
-                FUNCTEST_SUITE_NAME=tempest_smoke_serial
+                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                DOCKER_TAG=$DOCKER_TAG
+                FUNCTEST_SUITE_NAME=tempest_smoke
+                FUNCTEST_MODE=testcase
                 GERRIT_BRANCH=$GERRIT_BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
               node-parameters: true
-              kill-phase-on: FAILURE
+              kill-phase-on: NEVER
+              enable-condition: "def m = '$SNAP_TYPE' ==~ /functest/"
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-fetch-logs-{stream}'
+              current-parameters: false
+              predefined-parameters: |
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+              node-parameters: true
+              kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
       - multijob:
           condition: SUCCESSFUL
           projects:
             - name: 'apex-create-snapshot'
-              current-parameters: false
-              predefined-parameters: |
-                SNAP_TYPE=csit
+              current-parameters: true
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
           condition: SUCCESSFUL
           projects:
             - name: 'apex-upload-snapshot'
-              current-parameters: false
-              predefined-parameters: |
-                SNAP_TYPE=csit
+              current-parameters: true
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
     properties:
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify.*'
             - 'apex-deploy.*'
 
     project-type: 'multijob'
 
-    disabled: false
+    disabled: true
 
     node: 'flex-pod2'
 
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify.*'
             - 'apex-runner.*'
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify.*'
             - 'apex-runner.*'
           name: Baremetal Deploy and Test Phase
           condition: SUCCESSFUL
           projects:
-            - name: 'apex-os-nosdn-nofeature-noha-baremetal-fraser'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
             - name: 'apex-os-nosdn-nofeature-ha-baremetal-fraser'
               node-parameters: false
               current-parameters: false
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-odl-nofeature-ha-baremetal-fraser'
+            - name: 'apex-os-odl-bgpvpn-ha-baremetal-fraser'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-odl-nofeature-noha-baremetal-fraser'
+
+# gambia Builder
+- builder:
+    name: apex-builder-gambia
+    builders:
+      - multijob:
+          name: Baremetal Deploy and Test Phase
+          condition: SUCCESSFUL
+          projects:
+            - name: 'apex-os-nosdn-nofeature-noha-baremetal-gambia'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-odl-bgpvpn-ha-baremetal-fraser'
+            - name: 'apex-os-nosdn-nofeature-ha-baremetal-gambia'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-ovn-nofeature-noha-baremetal-fraser'
+            - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-gambia'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-nosdn-fdio-noha-baremetal-fraser'
+            - name: 'apex-os-odl-nofeature-noha-baremetal-gambia'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-nosdn-fdio-ha-baremetal-fraser'
+            - name: 'apex-os-odl-nofeature-ha-baremetal-gambia'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-nosdn-bar-ha-baremetal-fraser'
+            - name: 'apex-k8s-nosdn-nofeature-noha-baremetal-gambia'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-nosdn-bar-noha-baremetal-fraser'
+            - name: 'apex-os-odl-bgpvpn-ha-baremetal-gambia'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-nosdn-ovs_dpdk-noha-baremetal-fraser'
+            - name: 'apex-os-odl-bgpvpn-noha-baremetal-gambia'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-nosdn-ovs_dpdk-ha-baremetal-fraser'
+            - name: 'apex-os-odl-sfc-ha-baremetal-gambia'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-odl-sfc-noha-baremetal-fraser'
+            - name: 'apex-os-odl-sfc-noha-baremetal-gambia'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-odl-sfc-ha-baremetal-fraser'
+            - name: 'apex-os-nosdn-calipso-noha-baremetal-gambia'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-nosdn-calipso-noha-baremetal-fraser'
+            - name: 'apex-os-ovn-nofeature-ha-baremetal-gambia'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
           name: Baremetal Deploy and Test Phase
           condition: SUCCESSFUL
           projects:
+            - name: 'apex-os-nosdn-nofeature-noha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-nosdn-nofeature-ha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
             - name: 'apex-os-odl-nofeature-noha-baremetal-master'
               node-parameters: false
               current-parameters: false
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-odl-queens-noha-baremetal-master'
+            - name: 'apex-os-nosdn-rocky-noha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-nosdn-rocky-ha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-nosdn-rocky-ha-ipv6-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-odl-rocky-noha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-odl-rocky-ha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-k8s-nosdn-nofeature-noha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-odl-bgpvpn-ha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-odl-bgpvpn-noha-baremetal-master'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-odl-queens-ha-baremetal-master'
+            - name: 'apex-os-odl-bgpvpn_queens-ha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-odl-bgpvpn_queens-noha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-odl-sfc-ha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-odl-sfc-noha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-odl-sfc_rocky-ha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-odl-sfc_rocky-noha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-nosdn-calipso-noha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-nosdn-calipso_rocky-noha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-ovn-nofeature-ha-baremetal-master'
+              node-parameters: false
+              current-parameters: false
+              predefined-parameters: |
+                OPNFV_CLEAN=yes
+              kill-phase-on: NEVER
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-os-ovn-rocky-ha-baremetal-master'
               node-parameters: false
               current-parameters: false
               predefined-parameters: |
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
-            - name: 'apex-os-nosdn-calipso-noha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
 - builder:
     name: 'apex-upload-artifact'
     builders:
 
 #######################
 # trigger macros
+# timed is in format: 'min hour daymonth month dayweek'
 ########################
 - trigger:
     name: 'apex-master'
     triggers:
       - timed: '0 0 1-31/2 * *'
 
+- trigger:
+    name: 'apex-gambia'
+    triggers:
+      - timed: '0 4 2-30/2 * *'
+
 - trigger:
     name: 'apex-fraser'
     triggers:
     name: 'apex-danube'
     triggers:
       - timed: '0 3 1 1 7'
+
+- trigger:
+    name: 'master-csit-master'
+    triggers:
+      - timed: '0 5 * * *'
+
+- trigger:
+    name: 'master-csit-rocky'
+    triggers:
+      - timed: '0 5 * * *'
+
+- trigger:
+    name: 'master-csit-queens'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'gambia-csit-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'gambia-csit-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'gambia-csit-queens'
+    triggers:
+      - timed: '0 5 * * *'
+
+- trigger:
+    name: 'fraser-csit-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'fraser-csit-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'fraser-csit-queens'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'euphrates-csit-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'euphrates-csit-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'euphrates-csit-queens'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'danube-csit-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'danube-csit-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'danube-csit-queens'
+    triggers:
+      - timed: ''
+- trigger:
+    name: 'master-functest-master'
+    triggers:
+      - timed: '0 3 * * *'
+
+- trigger:
+    name: 'master-functest-rocky'
+    triggers:
+      - timed: '0 3 * * *'
+
+- trigger:
+    name: 'master-functest-queens'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'gambia-functest-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'gambia-functest-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'gambia-functest-queens'
+    triggers:
+      - timed: '0 3 * * *'
+
+- trigger:
+    name: 'fraser-functest-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'fraser-functest-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'fraser-functest-queens'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'euphrates-functest-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'euphrates-functest-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'euphrates-functest-queens'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'danube-functest-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'danube-functest-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'danube-functest-queens'
+    triggers:
+      - timed: ''
index 5230e7a..7203cb4 100644 (file)
@@ -8,12 +8,13 @@
       - 'apex-virtual-{stream}'
       - 'apex-deploy-{platform}-{stream}'
       - 'apex-daily-{stream}'
-      - 'apex-csit-promote-daily-{stream}-{os_version}'
+      - 'apex-{snap_type}-promote-daily-{stream}-os-{os_version}-{topology}'
       - 'apex-fdio-promote-daily-{stream}'
       - 'apex-{scenario}-baremetal-{scenario_stream}'
       - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
       - 'apex-upload-snapshot'
       - 'apex-create-snapshot'
+      - 'apex-fetch-snap-info'
       - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
       - 'apex-dovetail-daily-os-nosdn-nofeature-ha-baremetal-danube'
     # stream:    branch with - in place of / (eg. stable-arno)
           build-slave: 'apex-build-master'
           virtual-slave: 'apex-virtual-master'
           baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-odl-nofeature-ha'
+          verify-scenario: 'os-nosdn-nofeature-noha'
           scenario_stream: 'master'
           disable_daily: false
           disable_promote: false
+      - gambia: &gambia
+          branch: 'stable/gambia'
+          gs-pathname: '/gambia'
+          build-slave: 'apex-build-master'
+          virtual-slave: 'apex-virtual-master'
+          baremetal-slave: 'apex-baremetal-master'
+          verify-scenario: 'os-nosdn-nofeature-ha'
+          scenario_stream: 'gambia'
+          disable_daily: false
+          disable_promote: false
       - fraser: &fraser
           branch: 'stable/fraser'
           gs-pathname: '/fraser'
           build-slave: 'apex-build-master'
           virtual-slave: 'apex-virtual-master'
           baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-odl-nofeature-ha'
+          verify-scenario: 'os-nosdn-nofeature-ha'
           scenario_stream: 'fraser'
           disable_daily: false
           disable_promote: true
       - 'virtual'
 
     os_version:
-      - 'pike'
-      - 'queens'
-      - 'master'
-
-
+      - 'queens':
+          os_scenario: 'nofeature'
+          odl_branch: 'stable/oxygen'
+      - 'rocky':
+          os_scenario: 'rocky'
+          odl_branch: 'stable/oxygen'
+      - 'master':
+          os_scenario: 'nofeature'
+          odl_branch: 'stable/fluorine'
+
+    topology:
+      - 'noha'
+      - 'ha'
+      - 'noha-allinone'
+
+    snap_type:
+      - csit:
+          sdn: 'odl'
+      - functest:
+          sdn: 'nosdn'
 # Fetch Logs Job
 - job-template:
     name: 'apex-fetch-logs-{stream}'
       - logrotate-default
       - build-blocker:
           use-build-blocker: false
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-deploy.*'
       - throttle:
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-deploy.*'
             - 'functest.*'
             - 'yardstick.*'
             - 'dovetail.*'
             - 'storperf.*'
+            - 'odl-netvirt.*'
       - throttle:
           max-per-node: 1
           max-total: 10
     parameters:
       - '{project}-defaults'
       - '{project}-virtual-{stream}-defaults'
+      - 'functest-suite-parameter'
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-runner.*'
-            - 'apex-.*-promote.*'
             - 'apex-run.*'
             - 'apex-virtual-.*'
             - 'apex-verify-gate-.*'
+            - 'odl-netvirt.*'
+            - 'apex-.*-promote.*'
       - throttle:
           max-per-node: 1
           max-total: 10
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                PROMOTE=$PROMOTE
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
               current-parameters: false
               predefined-parameters: |
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                FUNCTEST_SUITE_NAME=healthcheck
+                FUNCTEST_MODE=$FUNCTEST_MODE
+                FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
                 GERRIT_BRANCH=$GERRIT_BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify.*'
             - 'apex-runner.*'
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
+      - shell:
+          !include-raw-escape: ./apex-functest-scenario.sh
+      - inject:
+          properties-file: functest_scenario
+          override-build-parameters: true
       - multijob:
           name: 'OPNFV Test Suite'
           condition: ALWAYS
             - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
               node-parameters: true
               current-parameters: false
-              predefined-parameters:
+              predefined-parameters: |
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                DOCKER_TAG=$DOCKER_TAG
               kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
           name: DEPLOY_SCENARIO
           default: '{scenario}'
           description: "Scenario to deploy with."
+      - string:
+          name: DOCKER_TAG
+          default: ''
+          description: Default docker tag to pass to functest
 
     properties:
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify.*'
             - 'apex-runner.*'
-            - 'apex-.*-promote.*'
             - 'apex-run.*'
             - 'apex-testsuite-.+-baremetal-.+'
       - throttle:
             - name: 'functest-apex-baremetal-daily-{scenario_stream}'
               node-parameters: true
               current-parameters: false
-              predefined-parameters:
+              predefined-parameters: |
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                DOCKER_TAG=$DOCKER_TAG
               kill-phase-on: NEVER
               abort-all-job: false
               git-revision: false
           name: Dovetail-proposed_tests
           condition: ALWAYS
           projects:
-            - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
+            - name: 'dovetail-apex-baremetal-default-mandatory-{scenario_stream}'
               node-parameters: true
               current-parameters: false
               predefined-parameters:
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
               kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/
-                                 && $BUILD_NUMBER % 2 == 1"
+              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
               abort-all-job: false
               git-revision: false
       - multijob:
           name: Dovetail-default
           condition: ALWAYS
           projects:
-            - name: 'dovetail-apex-baremetal-default-{scenario_stream}'
+            - name: 'dovetail-apex-baremetal-default-optional-{scenario_stream}'
               node-parameters: true
               current-parameters: false
               predefined-parameters:
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
               kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/
-                                 && $BUILD_NUMBER % 2 == 0"
+              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
               abort-all-job: false
               git-revision: false
       - multijob:
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-daily.*'
 
               git-revision: true
       - apex-builder-{stream}
 
+# snapshot info fetch
+- job-template:
+    name: 'apex-fetch-snap-info'
+
+    disabled: false
+
+    parameters:
+      - '{project}-defaults'
+
+    builders:
+      - shell:
+          !include-raw-escape: ./apex-fetch-snap-info.sh
+
 # snapshot create
 - job-template:
     name: 'apex-create-snapshot'
 
-    # Job template for clean
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-
     disabled: false
 
+    parameters:
+      - '{project}-defaults'
+
     builders:
       - shell:
           !include-raw-escape: ./apex-snapshot-create.sh
 - job-template:
     name: 'apex-upload-snapshot'
 
-    # Job template for clean
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-
     disabled: false
 
+    parameters:
+      - '{project}-defaults'
+
     builders:
       - inject:
           properties-content: ARTIFACT_TYPE=snapshot
 
 # CSIT promote
 - job-template:
-    name: 'apex-csit-promote-daily-{stream}-{os_version}'
+    name: 'apex-{snap_type}-promote-daily-{stream}-os-{os_version}-{topology}'
 
     # Job template for promoting CSIT Snapshots
     #
           branch: '{branch}'
       - apex-parameter:
           gs-pathname: '{gs-pathname}'
-
+      - string:
+          name: ARTIFACT_VERSION
+          default: dev
+          description: "Used for overriding the ARTIFACT_VERSION"
+      - string:
+          name: PROMOTE
+          default: 'True'
+          description: "Used for overriding the PROMOTE"
+      - string:
+          name: GS_URL
+          default: 'artifacts.opnfv.org/apex/{os_version}/{topology}'
+          description: "User for overriding GS_URL from apex params"
+      - string:
+          name: OS_VERSION
+          default: '{os_version}'
+          description: OpenStack version short name
+      - string:
+          name: ODL_BRANCH
+          default: '{odl_branch}'
+          description: ODL branch being used
+      - string:
+          name: FORCE_PROMOTE
+          default: 'False'
+          description: "Used to force promotion and skip CSIT"
+      - string:
+          name: SNAP_TYPE
+          default: '{snap_type}'
+          description: Type of snapshot to promote
     properties:
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify.*'
-            - 'apex-deploy.*'
             - 'apex-runner.*'
             - 'apex-daily.*'
+            - 'apex-.*-promote.*'
+            - 'odl-netvirt.*'
+      - throttle:
+          max-per-node: 1
+          max-total: 10
+          option: 'project'
 
     triggers:
-      - timed: '0 12 * * 0'
+      - '{stream}-{snap_type}-{os_version}'
 
     builders:
       - multijob:
-          name: deploy-virtual
+          name: apex-virtual-deploy
           condition: SUCCESSFUL
           projects:
             - name: 'apex-deploy-virtual-{stream}'
-              current-parameters: false
+              current-parameters: true
               predefined-parameters: |
-                DEPLOY_SCENARIO=os-odl-{os_version}-noha
+                DEPLOY_SCENARIO=os-{sdn}-{os_scenario}-{topology}
                 OPNFV_CLEAN=yes
                 GERRIT_BRANCH=$GERRIT_BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-                PROMOTE=True
+              node-parameters: true
+              kill-phase-on: FAILURE
+              abort-all-job: true
+              git-revision: true
+      - multijob:
+          name: fetch snapshot info
+          condition: SUCCESSFUL
+          projects:
+            - name: 'apex-fetch-snap-info'
+              current-parameters: true
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
               git-revision: false
       - multijob:
-          name: functest-smoke
+          name: test phase
           condition: SUCCESSFUL
+          execution-type: SEQUENTIALLY
           projects:
+            - name: cperf-apex-csit-master
+              predefined-parameters: |
+                ODL_BRANCH=$ODL_BRANCH
+                RC_FILE_PATH=/tmp/csit/overcloudrc
+                NODE_FILE_PATH=/tmp/csit/node.yaml
+                SSH_KEY_PATH=/tmp/csit/id_rsa
+                ODL_CONTAINERIZED=true
+                OS_VERSION=$OS_VERSION
+                SKIP_CSIT=$FORCE_PROMOTE
+                SNAP_TYPE=$SNAP_TYPE
+              node-parameters: true
+              kill-phase-on: NEVER
+              abort-all-job: false
+              enable-condition: "def m = '$SNAP_TYPE' ==~ /csit/"
+            - name: cperf-upload-logs-csit
+              predefined-parameters: |
+                ODL_BRANCH=$ODL_BRANCH
+                OS_VERSION=$OS_VERSION
+                SNAP_TYPE=$SNAP_TYPE
+              node-parameters: true
+              kill-phase-on: FAILURE
+              abort-all-job: false
+              enable-condition: "def m = '$SNAP_TYPE' ==~ /csit/"
             - name: 'functest-apex-virtual-suite-{stream}'
               current-parameters: false
               predefined-parameters: |
-                DEPLOY_SCENARIO=os-odl-{os_version}-noha
-                FUNCTEST_SUITE_NAME=tempest_smoke_serial
+                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                DOCKER_TAG=$DOCKER_TAG
+                FUNCTEST_SUITE_NAME=tempest_smoke
+                FUNCTEST_MODE=testcase
                 GERRIT_BRANCH=$GERRIT_BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
               node-parameters: true
-              kill-phase-on: FAILURE
+              kill-phase-on: NEVER
+              enable-condition: "def m = '$SNAP_TYPE' ==~ /functest/"
+              abort-all-job: true
+              git-revision: false
+            - name: 'apex-fetch-logs-{stream}'
+              current-parameters: false
+              predefined-parameters: |
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+              node-parameters: true
+              kill-phase-on: NEVER
               abort-all-job: true
               git-revision: false
       - multijob:
           condition: SUCCESSFUL
           projects:
             - name: 'apex-create-snapshot'
-              current-parameters: false
-              predefined-parameters: |
-                SNAP_TYPE=csit
+              current-parameters: true
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
           condition: SUCCESSFUL
           projects:
             - name: 'apex-upload-snapshot'
-              current-parameters: false
-              predefined-parameters: |
-                SNAP_TYPE=csit
+              current-parameters: true
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
     properties:
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify.*'
             - 'apex-deploy.*'
 
     project-type: 'multijob'
 
-    disabled: false
+    disabled: true
 
     node: 'flex-pod2'
 
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify.*'
             - 'apex-runner.*'
       - logrotate-default
       - build-blocker:
           use-build-blocker: true
-          block-level: 'NODE'
+          blocking-level: 'NODE'
           blocking-jobs:
             - 'apex-verify.*'
             - 'apex-runner.*'
 
 #######################
 # trigger macros
+# timed is in format: 'min hour daymonth month dayweek'
 ########################
 - trigger:
     name: 'apex-master'
     triggers:
       - timed: '0 0 1-31/2 * *'
 
+- trigger:
+    name: 'apex-gambia'
+    triggers:
+      - timed: '0 4 2-30/2 * *'
+
 - trigger:
     name: 'apex-fraser'
     triggers:
     triggers:
       - timed: '0 3 1 1 7'
 
+- trigger:
+    name: 'master-csit-master'
+    triggers:
+      - timed: '0 5 * * *'
+
+- trigger:
+    name: 'master-csit-rocky'
+    triggers:
+      - timed: '0 5 * * *'
+
+- trigger:
+    name: 'master-csit-queens'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'gambia-csit-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'gambia-csit-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'gambia-csit-queens'
+    triggers:
+      - timed: '0 5 * * *'
+
+- trigger:
+    name: 'fraser-csit-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'fraser-csit-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'fraser-csit-queens'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'euphrates-csit-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'euphrates-csit-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'euphrates-csit-queens'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'danube-csit-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'danube-csit-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'danube-csit-queens'
+    triggers:
+      - timed: ''
+- trigger:
+    name: 'master-functest-master'
+    triggers:
+      - timed: '0 3 * * *'
+
+- trigger:
+    name: 'master-functest-rocky'
+    triggers:
+      - timed: '0 3 * * *'
+
+- trigger:
+    name: 'master-functest-queens'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'gambia-functest-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'gambia-functest-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'gambia-functest-queens'
+    triggers:
+      - timed: '0 3 * * *'
+
+- trigger:
+    name: 'fraser-functest-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'fraser-functest-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'fraser-functest-queens'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'euphrates-functest-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'euphrates-functest-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'euphrates-functest-queens'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'danube-functest-master'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'danube-functest-rocky'
+    triggers:
+      - timed: ''
+
+- trigger:
+    name: 'danube-functest-queens'
+    triggers:
+      - timed: ''
index 8375f61..a221699 100644 (file)
@@ -1,24 +1,43 @@
 master:
+  - 'os-nosdn-nofeature-noha'
+  - 'os-nosdn-nofeature-ha'
+  - 'os-nosdn-nofeature-ha-ipv6'
   - 'os-odl-nofeature-noha'
   - 'os-odl-nofeature-ha'
-  - 'os-odl-queens-noha'
-  - 'os-odl-queens-ha'
-fraser:
+  - 'os-nosdn-rocky-noha'
+  - 'os-nosdn-rocky-ha'
+  - 'os-nosdn-rocky-ha-ipv6'
+  - 'os-odl-rocky-noha'
+  - 'os-odl-rocky-ha'
+  - 'k8s-nosdn-nofeature-noha'
+  - 'os-odl-bgpvpn-ha'
+  - 'os-odl-bgpvpn-noha'
+  - 'os-odl-bgpvpn_queens-ha'
+  - 'os-odl-bgpvpn_queens-noha'
+  - 'os-odl-sfc-ha'
+  - 'os-odl-sfc-noha'
+  - 'os-odl-sfc_rocky-ha'
+  - 'os-odl-sfc_rocky-noha'
+  - 'os-nosdn-calipso-noha'
+  - 'os-nosdn-calipso_rocky-noha'
+  - 'os-ovn-nofeature-ha'
+  - 'os-ovn-rocky-ha'
+gambia:
   - 'os-nosdn-nofeature-noha'
   - 'os-nosdn-nofeature-ha'
-  - 'os-odl-nofeature-ha'
+  - 'os-nosdn-nofeature-ha-ipv6'
   - 'os-odl-nofeature-noha'
+  - 'os-odl-nofeature-ha'
+  - 'k8s-nosdn-nofeature-noha'
   - 'os-odl-bgpvpn-ha'
-  - 'os-ovn-nofeature-noha'
-  - 'os-nosdn-fdio-noha'
-  - 'os-nosdn-fdio-ha'
-  - 'os-nosdn-bar-ha'
-  - 'os-nosdn-bar-noha'
-  - 'os-nosdn-ovs_dpdk-noha'
-  - 'os-nosdn-ovs_dpdk-ha'
-  - 'os-odl-sfc-noha'
+  - 'os-odl-bgpvpn-noha'
   - 'os-odl-sfc-ha'
+  - 'os-odl-sfc-noha'
   - 'os-nosdn-calipso-noha'
+  - 'os-ovn-nofeature-ha'
+fraser:
+  - 'os-nosdn-nofeature-ha'
+  - 'os-odl-bgpvpn-ha'
 euphrates:
   - 'os-nosdn-nofeature-noha'
   - 'os-nosdn-nofeature-ha'
@@ -37,7 +56,6 @@ euphrates:
   - 'os-nosdn-kvm_ovs_dpdk-ha'
   - 'os-odl-sfc-noha'
   - 'os-odl-sfc-ha'
-  - 'os-nosdn-calipso-noha'
 danube:
   - 'os-nosdn-nofeature-noha'
   - 'os-nosdn-nofeature-ha'
index b8ffc16..2ab1a6d 100644 (file)
     fraser: &fraser
       stream: fraser
       branch: 'stable/{stream}'
+      disabled: false
+      gs-pathname: '/{stream}'
+    gambia: &gambia
+      stream: gambia
+      branch: 'stable/{stream}'
       gs-pathname: '/{stream}'
       disabled: false
     # -------------------------------
     # -------------------------------
     # CI POD's
     # -------------------------------
-    #        fraser
-    # -------------------------------
+    # yamllint disable rule:key-duplicates
     pod:
-      # yamllint disable rule:key-duplicates
+      # -------------------------------
+      #        fraser
+      # -------------------------------
       - armband-baremetal:
           <<: *baremetal
           <<: *fraser
+      # -------------------------------
+      #        gambia
+      # -------------------------------
+      - armband-baremetal:
+          <<: *baremetal
+          <<: *gambia
       - armband-virtual:
           <<: *virtual
-          <<: *fraser
+          <<: *gambia
       # -------------------------------
       #        master
       # -------------------------------
           auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
       - 'os-nosdn-ovs-ha':
           auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
+      - 'os-nosdn-vpp-ha':
+          auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
 
       # NOHA scenarios
       - 'os-nosdn-nofeature-noha':
           auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
 
+    exclude:
+      # Dovetail (and only it) should run against Fraser HA baremetal scenarios
+      - scenario: os-nosdn-nofeature-noha
+        stream: fraser
+      - scenario: os-ovn-nofeature-ha
+        stream: fraser
+      - scenario: os-nosdn-vpp-ha
+        stream: fraser
+
     jobs:
       - '{installer}-{scenario}-{pod}-daily-{stream}'
       - '{installer}-deploy-{pod}-daily-{stream}'
           blocking-jobs:
             - '{installer}-os-.*?-{pod}-daily-.*'
             - 'armband-verify-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - build-name:
               DEPLOY_SCENARIO={scenario}
             same-node: true
             block: true
-      - trigger-builds:
-          - project: 'functest-{installer}-{pod}-arm-daily-{stream}'
-            current-parameters: false
-            predefined-parameters:
-              DEPLOY_SCENARIO={scenario}
-            same-node: true
-            block: true
-            block-thresholds:
-              build-step-failure-threshold: 'never'
-              failure-threshold: 'never'
-              unstable-threshold: 'FAILURE'
-      - trigger-builds:
-          - project: 'yardstick-{installer}-{pod}-daily-{stream}'
-            current-parameters: false
-            predefined-parameters:
-              DEPLOY_SCENARIO={scenario}
-            block: true
-            same-node: true
-            block-thresholds:
-              build-step-failure-threshold: 'never'
-              failure-threshold: 'never'
-              unstable-threshold: 'FAILURE'
+      - conditional-step:
+          condition-kind: not
+          condition-operand:
+            condition-kind: regex-match
+            regex: 'fraser'
+            label: '{stream}'
+          steps:
+            - trigger-builds:
+                - project: 'functest-{installer}-{pod}-daily-{stream}'
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO={scenario}
+                  same-node: true
+                  block: true
+                  block-thresholds:
+                    build-step-failure-threshold: 'never'
+                    failure-threshold: 'never'
+                    unstable-threshold: 'FAILURE'
+      - conditional-step:
+          condition-kind: not
+          condition-operand:
+            condition-kind: regex-match
+            regex: 'fraser'
+            label: '{stream}'
+          steps:
+            - trigger-builds:
+                - project: 'yardstick-{installer}-{pod}-daily-{stream}'
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO={scenario}
+                  block: true
+                  same-node: true
+                  block-thresholds:
+                    build-step-failure-threshold: 'never'
+                    failure-threshold: 'never'
+                    unstable-threshold: 'FAILURE'
       # 1.here the stream means the SUT stream, dovetail stream is defined in its own job
       # 2.testsuite proposed_tests here is for new test cases planning to add into OVP
       # 3.run proposed_tests on Monday, Wednesday and Friday against ha scenario
             - condition-kind: regex-match
               regex: '.*-ha'
               label: '{scenario}'
-            - condition-kind: day-of-week
-              day-selector: select-days
-              days:
-                SAT: true
-              use-build-time: true
+            - condition-kind: regex-match
+              regex: 'fraser'
+              label: '{stream}'
           steps:
             - trigger-builds:
                 - project: 'dovetail-{installer}-{pod}-proposed_tests-{stream}'
             - condition-kind: regex-match
               regex: '.*-ha'
               label: '{scenario}'
-            - condition-kind: day-of-week
-              day-selector: select-days
-              days:
-                SUN: true
-              use-build-time: true
+            - condition-kind: regex-match
+              regex: 'fraser'
+              label: '{stream}'
           steps:
             - trigger-builds:
                 - project: 'dovetail-{installer}-{pod}-default-{stream}'
                     failure-threshold: 'never'
                     unstable-threshold: 'FAILURE'
       # Armband uses Fuel's log collection project job, no need to duplicate
-      - conditional-step:
-          condition-kind: not
-          condition-operand:
-            condition-kind: regex-match
-            regex: 'danube'
-            label: '{stream}'
-          steps:
-            - trigger-builds:
-                - project: 'fuel-collect-logs-{deploy-type}-daily-{stream}'
-                  current-parameters: false
-                  predefined-parameters:
-                    DEPLOY_SCENARIO={scenario}
-                  block: true
-                  same-node: true
-                  block-thresholds:
-                    build-step-failure-threshold: 'never'
-                    failure-threshold: 'never'
-                    unstable-threshold: 'FAILURE'
+      - trigger-builds:
+          - project: 'fuel-collect-logs-{deploy-type}-daily-{stream}'
+            current-parameters: false
+            predefined-parameters:
+              DEPLOY_SCENARIO={scenario}
+            block: true
+            same-node: true
+            block-thresholds:
+              build-step-failure-threshold: 'never'
+              failure-threshold: 'never'
+              unstable-threshold: 'FAILURE'
 
 - job-template:
     name: '{installer}-deploy-{pod}-daily-{stream}'
           blocking-jobs:
             - '{installer}-deploy-{pod}-daily-{stream}'
             - '{installer}-deploy-generic-daily-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     parameters:
       - project-parameter:
       - string:
           name: DEPLOY_SCENARIO
           default: 'os-odl-nofeature-ha'
+      - string:
+          name: PROJECT
+          default: '{project}'
 
     scm:
       - git-scm
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-      - timed: '0 1 * * 1'
+      - timed: '0 1 * * 2,7'
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-armband-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-odl-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-      - timed: '0 1 * * 3,7'
+      - timed: '0 1 * * 4,6'
 - trigger:
     name: 'fuel-os-ovn-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-      - timed: ''
+      - timed: '0 1 * * 1'
 - trigger:
     name: 'fuel-os-nosdn-ovs-ha-armband-baremetal-master-trigger'
     triggers:
-      - timed: '0 1 * * 4'
+      - timed: '0 1 * * 3,5'
+- trigger:
+    name: 'fuel-os-nosdn-vpp-ha-armband-baremetal-master-trigger'
+    triggers:
+      - timed: ''
 # ---------------------------------------------------------------------
-# Enea Armband CI Baremetal Triggers running against fraser branch
+# Enea Armband CI Baremetal Triggers running against gambia branch
 # ---------------------------------------------------------------------
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-fraser-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-gambia-trigger'
     triggers:
-      - timed: '0 1 * * 2'
+      - timed: '10 1 * * 1,3'
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-armband-baremetal-fraser-trigger'
+    name: 'fuel-os-nosdn-nofeature-noha-armband-baremetal-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-odl-nofeature-ha-armband-baremetal-fraser-trigger'
+    name: 'fuel-os-odl-nofeature-ha-armband-baremetal-gambia-trigger'
+    triggers:
+      - timed: '10 1 * * 5,7'
+- trigger:
+    name: 'fuel-os-ovn-nofeature-ha-armband-baremetal-gambia-trigger'
+    triggers:
+      - timed: '10 1 * * 2'
+- trigger:
+    name: 'fuel-os-nosdn-ovs-ha-armband-baremetal-gambia-trigger'
     triggers:
-      - timed: '0 1 * * 5'
+      - timed: '10 1 * * 4,6'
 - trigger:
-    name: 'fuel-os-ovn-nofeature-ha-armband-baremetal-fraser-trigger'
+    name: 'fuel-os-nosdn-vpp-ha-armband-baremetal-gambia-trigger'
     triggers:
       - timed: ''
+# -------------------------------------------------------------------
+# Enea Armband CI Baremetal Triggers running against fraser branch (for Dovetail)
+# -------------------------------------------------------------------
+- trigger:
+    name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-fraser-trigger'
+    triggers:
+      - timed: '5 13 * * 6,7'
+- trigger:
+    name: 'fuel-os-odl-nofeature-ha-armband-baremetal-fraser-trigger'
+    triggers:
+      - timed: '0 13 * * 6'
 - trigger:
     name: 'fuel-os-nosdn-ovs-ha-armband-baremetal-fraser-trigger'
     triggers:
-      - timed: '0 1 * * 6'
-
+      - timed: '0 13 * * 7'
 # --------------------------------------------------------------
 # Enea Armband CI Virtual Triggers running against master branch
 # --------------------------------------------------------------
     name: 'fuel-os-nosdn-ovs-ha-armband-virtual-master-trigger'
     triggers:
       - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-vpp-ha-armband-virtual-master-trigger'
+    triggers:
+      - timed: ''
 # -------------------------------------------------------------------
-# Enea Armband CI Virtual Triggers running against fraser branch
+# Enea Armband CI Virtual Triggers running against gambia branch
 # -------------------------------------------------------------------
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-fraser-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-gambia-trigger'
+    triggers:
+      - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-nofeature-noha-armband-virtual-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-armband-virtual-fraser-trigger'
+    name: 'fuel-os-odl-nofeature-ha-armband-virtual-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-odl-nofeature-ha-armband-virtual-fraser-trigger'
+    name: 'fuel-os-ovn-nofeature-ha-armband-virtual-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-ovn-nofeature-ha-armband-virtual-fraser-trigger'
+    name: 'fuel-os-nosdn-ovs-ha-armband-virtual-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-ovs-ha-armband-virtual-fraser-trigger'
+    name: 'fuel-os-nosdn-vpp-ha-armband-virtual-gambia-trigger'
     triggers:
       - timed: ''
diff --git a/jjb/armband/armband-rtd-jobs.yaml b/jjb/armband/armband-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..1304008
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: armband-rtd
+    project: armband
+    project-name: armband
+
+    project-pattern: 'armband'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-armband/47349/'
+    rtd-token: 'b8f7de9a1f2baf063ccc6afb52dbc8e6308b6ab5'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 08cf3c0..57d80ae 100644 (file)
@@ -4,7 +4,6 @@
 
     project: 'armband'
 
-    installer: 'fuel'
     #####################################
     # branch definitions
     #####################################
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
-      - fraser:
+      - gambia:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
     #####################################
-    # patch verification phases
-    #####################################
-    phase:
-      - 'deploy-virtual':
-          slave-label: 'armband-virtual'
-    #####################################
     # jobs
     #####################################
     jobs:
       - 'armband-verify-{stream}'
-      - 'armband-verify-{phase}-{stream}'
+
 #####################################
 # job templates
 #####################################
 - job-template:
     name: 'armband-verify-{stream}'
 
-    project-type: multijob
-
     disabled: '{obj:disabled}'
 
     concurrent: true
       - logrotate-default
       - throttle:
           enabled: true
-          max-total: 4
+          max-total: 2
           option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - 'fuel-os-.*?-virtual-daily-.*'
-            - 'armband-verify-.*'
-          block-level: 'NODE'
 
     scm:
       - git-scm-gerrit
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
-      - 'armband-virtual-defaults':
-          installer: '{installer}'
-      - '{installer}-defaults':
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: 'os-nosdn-nofeature-ha'
+      - 'opnfv-build-ubuntu-arm-defaults'
 
     builders:
       - description-setter:
           description: "Built on $NODE_NAME"
-      - multijob:
-          name: deploy-virtual
-          condition: SUCCESSFUL
-          projects:
-            - name: 'armband-verify-deploy-virtual-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-
-      - multijob:
-          name: smoke-test
-          condition: SUCCESSFUL
-          projects:
-            # Use Functest job definition from jjb/functest/functest-daily-jobs
-            - name: 'functest-fuel-armband-virtual-suite-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                FUNCTEST_MODE=tier
-                FUNCTEST_TIER=healthcheck
-                # Should be in sync with fuel-deploy.sh default scenario
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-            - name: 'functest-fuel-armband-virtual-suite-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                FUNCTEST_MODE=testcase
-                FUNCTEST_SUITE_NAME=vping_ssh
-                # Should be in sync with fuel-deploy.sh default scenario
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-
-- job-template:
-    name: 'armband-verify-{phase}-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 2
-          max-per-node: 1
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - 'armband-verify-deploy-.*'
-          block-level: 'NODE'
-
-    scm:
-      - git-scm-gerrit
+      - 'armband-verify-builder-macro'
 
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 360
-          fail: true
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{slave-label}-defaults'
-      - 'armband-virtual-defaults':
-          installer: '{installer}'
-      - '{installer}-defaults':
-          gs-pathname: '{gs-pathname}'
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - '{project}-verify-{phase}-macro'
 #####################################
 # builder macros
 #####################################
 - builder:
-    name: 'armband-verify-deploy-virtual-macro'
+    name: 'armband-verify-builder-macro'
     builders:
-      - shell:
-          !include-raw: ../fuel/fuel-deploy.sh
+      - shell: |
+          #!/bin/bash
+          make fuel-patches-import
diff --git a/jjb/auto/auto-rtd-jobs.yaml b/jjb/auto/auto-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..f23fe95
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: auto-rtd
+    project: auto
+    project-name: auto
+
+    project-pattern: 'auto'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-auto/47350/'
+    rtd-token: '3c2277762678c97e59d24c83201194d6b56d7983'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index c28dc56..abba9c8 100644 (file)
@@ -1,8 +1,219 @@
 ---
+# jenkins job templates for Auto
 - project:
-    name: auto
+    name: 'auto-ci-jobs'
+    project: 'auto'
 
-    project: '{name}'
+    # -------------------------------
+    # BRANCH ANCHORS
+    # -------------------------------
+    stream:
+      - master:
+          branch: '{stream}'
+          gs-pathname: ''
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
+      - fraser:
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
+    # -------------------------------
+    # DEPLOY TYPE ANCHORS
+    # -------------------------------
+    baremetal: &baremetal
+      installer: 'fuel'
+      slave-label: 'auto-baremetal'
+    # -------------------------------
+    # POD, INSTALLER, AND BRANCH MAPPING
+    # -------------------------------
+    # CI POD's
+    # -------------------------------
+    pod:
+      - auto-baremetal:
+          <<: *baremetal
+    # -------------------------------
+    #       scenarios
+    # -------------------------------
+    scenario:
+      # HA scenarios
+      - 'os-nosdn-onap-ha':
+          auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
 
     jobs:
-      - '{project}-verify-basic'
+      - '{installer}-{scenario}-{pod}-auto-daily-{stream}'
+      - '{installer}-deploy-{pod}-daily-{stream}'
+      - 'auto-verify-{stream}'
+      - 'auto-merge-{stream}'
+
+########################
+# job templates
+########################
+- job-template:
+    name: '{installer}-{scenario}-{pod}-auto-daily-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    concurrent: false
+
+    properties:
+      - logrotate-default
+      - throttle:
+          enabled: true
+          max-total: 4
+          max-per-node: 1
+          option: 'project'
+      - build-blocker:
+          use-build-blocker: true
+          blocking-jobs:
+            - '{installer}-os-.*?-{pod}-auto-daily-.*'
+          blocking-level: 'NODE'
+
+    wrappers:
+      - build-name:
+          name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+    triggers:
+      - '{auto-trigger-name}'
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - '{installer}-defaults':
+          gs-pathname: '{gs-pathname}'
+      - '{slave-label}-defaults':
+          installer: '{installer}'
+      - string:
+          name: DEPLOY_SCENARIO
+          default: '{scenario}'
+
+    builders:
+      - trigger-builds:
+          - project: '{installer}-deploy-{pod}-daily-{stream}'
+            current-parameters: false
+            predefined-parameters: |
+              DEPLOY_SCENARIO=os-nosdn-nofeature-ha
+              PROJECT=armband
+            same-node: true
+            block: true
+
+- job-template:
+    name: 'auto-verify-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    project-type: freestyle
+
+    concurrent: true
+
+    properties:
+      - logrotate-default
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+      - git-scm-gerrit
+
+    triggers:
+      - gerrit:
+          server-name: 'gerrit.opnfv.org'
+          trigger-on:
+            - patchset-created-event:
+                exclude-drafts: 'false'
+                exclude-trivial-rebase: 'false'
+                exclude-no-code-change: 'false'
+            - draft-published-event
+            - comment-added-contains-event:
+                comment-contains-value: 'recheck'
+            - comment-added-contains-event:
+                comment-contains-value: 'reverify'
+          projects:
+            - project-compare-type: 'ANT'
+              project-pattern: '{project}'
+              branches:
+                - branch-compare-type: 'ANT'
+                  branch-pattern: '**/{branch}'
+              forbidden-file-paths:
+                - compare-type: ANT
+                  pattern: 'docs/**'
+
+    builders:
+      - shell: |
+          pwd
+          ./ci/build-auto.sh verify
+
+- job-template:
+    name: 'auto-merge-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    project-type: freestyle
+
+    concurrent: true
+
+    properties:
+      - logrotate-default
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+      - git-scm
+
+    triggers:
+      - gerrit:
+          server-name: 'gerrit.opnfv.org'
+          trigger-on:
+            - change-merged-event
+            - comment-added-contains-event:
+                comment-contains-value: 'remerge'
+          projects:
+            - project-compare-type: 'ANT'
+              project-pattern: '{project}'
+              branches:
+                - branch-compare-type: 'ANT'
+                  branch-pattern: '**/{branch}'
+              forbidden-file-paths:
+                - compare-type: ANT
+                  pattern: 'docs/**'
+
+    builders:
+      - shell: |
+          pwd
+          ./ci/build-auto.sh merge
+
+########################
+# trigger macros
+########################
+# CI PODs
+# ----------------------------------------------------------------
+# Auto CI Baremetal Triggers running against master branch
+# ----------------------------------------------------------------
+- trigger:
+    name: 'fuel-os-nosdn-onap-ha-auto-baremetal-master-trigger'
+    triggers:
+      - timed: ''
+# ---------------------------------------------------------------------
+# Auto CI Baremetal Triggers running against fraser branch
+# ---------------------------------------------------------------------
+- trigger:
+    name: 'fuel-os-nosdn-onap-ha-auto-baremetal-fraser-trigger'
+    triggers:
+      - timed: ''
+# ---------------------------------------------------------------------
+# Auto CI Baremetal Triggers running against gambia branch
+# ---------------------------------------------------------------------
+- trigger:
+    name: 'fuel-os-nosdn-onap-ha-auto-baremetal-gambia-trigger'
+    triggers:
+      - timed: ''
diff --git a/jjb/availability/availability-rtd-jobs.yaml b/jjb/availability/availability-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..e245e59
--- /dev/null
@@ -0,0 +1,21 @@
+---
+- project:
+    name: availability-rtd
+    project: availability
+    project-name: availability
+
+    gerrit-skip-vote: true
+    project-pattern: 'availability'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-availability/47351/'
+    rtd-token: '3ae514b14073e1eacb697d3eddee62a26c8c891c'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/barometer/barometer-rtd-jobs.yaml b/jjb/barometer/barometer-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..936d1f2
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: barometer-rtd
+    project: barometer
+    project-name: barometer
+
+    project-pattern: 'barometer'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-barometer/47353/'
+    rtd-token: 'aef70b8a0148b295e25dd92474110bcd622bacb0'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 9217620..9cb222a 100644 (file)
@@ -18,7 +18,7 @@
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
-      - fraser: &fraser
+      - gambia: &gambia
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
@@ -32,7 +32,7 @@
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
+      - 'lf-build2-defaults'
 
     scm:
       - git-scm-gerrit
@@ -89,7 +89,7 @@
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
+      - 'lf-build2-defaults'
 
     scm:
       - git-scm
index 41b73a2..0a48eed 100644 (file)
@@ -19,8 +19,8 @@
       gs-packagepath: '/{suite}'
       # docker tag used for version control
       docker-tag: 'latest'
-    fraser: &fraser
-      stream: fraser
+    gambia: &gambia
+      stream: gambia
       branch: 'stable/{stream}'
       gs-pathname: '/{stream}'
       gs-packagepath: '/{stream}/{suite}'
           slave-label: compass-baremetal-branch
           installer: compass
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *fraser
+          <<: *gambia
       - virtual:
           slave-label: compass-virtual-branch
           installer: compass
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *fraser
+          <<: *gambia
 
     # -------------------------------
     #        None-CI PODs
@@ -79,6 +79,7 @@
       - 'posca_feature_moon_tenants'
       - 'posca_feature_vnf_scale_out'
       - 'posca_factor_soak_throughputs'
+      - 'kubestone_deployment_capacity'
 
     jobs:
       - 'bottlenecks-{installer}-{suite}-{pod}-daily-{stream}'
index d4930d7..dac1659 100644 (file)
           # This is used for different test suite dependent packages storage
           gs-packagepath: '/{suite}'
           disabled: false
-      - fraser: &fraser
+      - gambia:
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          gs-packagepath: '/{stream}/{suite}'
+          disabled: false
+      - fraser:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           gs-packagepath: '/{stream}/{suite}'
@@ -70,7 +75,6 @@
                   branch-pattern: '**/{branch}'
 
     builders:
-      # - bottlenecks-hello
       - bottlenecks-unit-tests
 
 - job-template:
 
     builders:
       - bottlenecks-hello
-      # - bottlenecks-unit-tests
 
 - job-template:
     name: 'bottlenecks-{suite}-upload-artifacts-{stream}'
diff --git a/jjb/bottlenecks/bottlenecks-rtd-jobs.yaml b/jjb/bottlenecks/bottlenecks-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..9730f24
--- /dev/null
@@ -0,0 +1,21 @@
+---
+- project:
+    name: bottlenecks-rtd
+    project: bottlenecks
+    project-name: bottlenecks
+
+    gerrit-skip-vote: true
+    project-pattern: 'bottlenecks'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-bottlenecks/47355/'
+    rtd-token: '95dd0dbdde4a219b5196ffb86e15401b7b927885'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 7a3db00..9b5e8ee 100644 (file)
@@ -24,7 +24,26 @@ OPENRC=/tmp/admin_rc.sh
 OS_CACERT=/tmp/os_cacert
 
 BOTTLENECKS_CONFIG=/tmp
-
+KUBESTONE_TEST_DIR=/home/opnfv/bottlenecks/testsuites/kubestone/testcases
+
+# Pulling Bottlenecks docker and passing environment variables
+echo "INFO: pulling Bottlenecks docker ${DOCKER_TAG}"
+docker pull opnfv/bottlenecks:${DOCKER_TAG} >$redirect
+
+opts="--privileged=true -id"
+envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
+      -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
+      -e BRANCH=${BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
+      -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL} \
+      -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} -e BUILD_TAG=${BUILD_TAG}"
+docker_volume="-v /var/run/docker.sock:/var/run/docker.sock -v /tmp:/tmp"
+
+cmd="docker run ${opts} ${envs} --name bottlenecks-load-master ${docker_volume} opnfv/bottlenecks:${DOCKER_TAG} /bin/bash"
+echo "BOTTLENECKS INFO: running docker run commond: ${cmd}"
+${cmd} >$redirect
+sleep 5
+
+# Run test suite
 if [[ $SUITE_NAME == *posca* ]]; then
     POSCA_SCRIPT=/home/opnfv/bottlenecks/testsuites/posca
     sudo rm -f ${OPENRC}
@@ -116,23 +135,6 @@ if [[ $SUITE_NAME == *posca* ]]; then
         sudo ls -al ${BOTTLENECKS_CONFIG}
     fi
 
-    # Pulling Bottlenecks docker and passing environment variables
-    echo "INFO: pulling Bottlenecks docker ${DOCKER_TAG}"
-    docker pull opnfv/bottlenecks:${DOCKER_TAG} >$redirect
-
-    opts="--privileged=true -id"
-    envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
-          -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
-          -e BRANCH=${BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
-          -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL} \
-          -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} -e BUILD_TAG=${BUILD_TAG}"
-    docker_volume="-v /var/run/docker.sock:/var/run/docker.sock -v /tmp:/tmp"
-
-    cmd="docker run ${opts} ${envs} --name bottlenecks-load-master ${docker_volume} opnfv/bottlenecks:${DOCKER_TAG} /bin/bash"
-    echo "BOTTLENECKS INFO: running docker run commond: ${cmd}"
-    ${cmd} >$redirect
-    sleep 5
-
     # Running test cases through Bottlenecks docker
     if [[ $SUITE_NAME == posca_stress_traffic ]]; then
         TEST_CASE=posca_factor_system_bandwidth
@@ -144,4 +146,11 @@ if [[ $SUITE_NAME == *posca* ]]; then
     testcase_cmd="docker exec bottlenecks-load-master python ${POSCA_SCRIPT}/../run_testsuite.py testcase $TEST_CASE $REPORT"
     echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
     ${testcase_cmd} >$redirect
+elif [[ $SUITE_NAME == *kubestone* ]]; then
+    if [[ $SUITE_NAME == kubestone_deployment_capacity ]]; then
+        TEST_CASE=${KUBESTONE_TEST_DIR}/deployment_capacity.yaml
+    fi
+    testcase_cmd="docker exec bottlenecks-load-master python ${KUBESTONE_TEST_DIR}/../stress_test.py -c $TEST_CASE"
+    echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
+    ${testcase_cmd} >$redirect
 fi
diff --git a/jjb/calipso/calipso-rtd-jobs.yaml b/jjb/calipso/calipso-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..e6b61d7
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: calipso-rtd
+    project: calipso
+    project-name: calipso
+
+    gerrit-skip-vote: true
+    project-pattern: 'calipso'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-calipso/47356/'
+    rtd-token: '9b88e25a769998fc316b25efe15eca2b7c1474f4'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 2e500be..365afdd 100644 (file)
           name: SLAVE_LABEL
           default: 'opnfv-build'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - project-parameter:
           project: $GERRIT_PROJECT
           branch: '{branch}'
index 31eed8f..4abbc08 100644 (file)
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
-      - fraser: &fraser
+      - gambia:
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
+      - fraser:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
           blocking-jobs:
             - 'clover-daily-deploy-.*?'
             - 'container4nfv-daily-deploy-.*?'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - timeout:
diff --git a/jjb/clover/clover-rtd-jobs.yaml b/jjb/clover/clover-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..b58b71e
--- /dev/null
@@ -0,0 +1,21 @@
+---
+- project:
+    name: clover-rtd
+    project: clover
+    project-name: clover
+
+    gerrit-skip-vote: true
+    project-pattern: 'clover'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-clover/47357/'
+    rtd-token: '8b47c0a3c1cfe7de885bf217628b58dd91f14f2e'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 8eeaabf..9dc8074 100644 (file)
       gs-pathname: ''
       ppa-pathname: '/{stream}'
       disabled: false
-      openstack-version: pike
-    fraser: &fraser
-      stream: fraser
+      openstack-version: queens
+    gambia: &gambia
+      stream: gambia
       branch: 'stable/{stream}'
       disabled: false
       gs-pathname: '/{stream}'
       ppa-pathname: '/{stream}'
-      openstack-version: pike
+      openstack-version: queens
     danube: &danube
       stream: danube
       branch: 'stable/{stream}'
       - baremetal:
           slave-label: compass-baremetal-branch
           os-version: 'xenial'
-          <<: *fraser
+          <<: *gambia
       - virtual:
           slave-label: compass-virtual-branch
           os-version: 'xenial'
-          <<: *fraser
+          <<: *gambia
       # -------------------------------
       #        master
       # -------------------------------
       - 'k8-nosdn-stor4nfv-noha':
           disabled: false
           auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+      - 'os-nosdn-stor4nfv-ha':
+          disabled: false
+          auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
 
     jobs:
       - 'compass-{scenario}-{pod}-daily-{stream}'
             - 'compass-os-.*?-baremetal-daily-.*?'
             - 'compass-k8-.*?-baremetal-daily-.*?'
             - 'compass-verify-[^-]*-[^-]*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - fix-workspace-permissions
               unstable-threshold: 'FAILURE'
       # here the stream means the SUT stream, dovetail stream is defined in its own job
       # only run on os-(nosdn|odl_l3)-nofeature-ha scenario
-      # run with testsuite default, dovetail docker image with latest tag(Monday, Tuesday)
-      # run with testsuite proposed_tests, dovetail docker image with latest tag(Thursday, Friday)
+      # run with testsuite default, testarea mandatory, dovetail docker image with latest tag
+      # run with testsuite default, testarea optional, dovetail docker image with latest tag
       - conditional-step:
           condition-kind: and
           condition-operands:
             - condition-kind: regex-match
               regex: os-(nosdn|odl_l3)-nofeature-ha
               label: '{scenario}'
-            - condition-kind: day-of-week
-              day-selector: select-days
-              days:
-                MON: true
-                TUES: true
-              use-build-time: true
           steps:
             - trigger-builds:
-                - project: 'dovetail-compass-{pod}-default-{stream}'
+                - project: 'dovetail-compass-{pod}-default-mandatory-{stream}'
                   current-parameters: false
                   predefined-parameters: |
                     DOCKER_TAG=latest
             - condition-kind: regex-match
               regex: os-(nosdn|odl_l3)-nofeature-ha
               label: '{scenario}'
-            - condition-kind: day-of-week
-              day-selector: select-days
-              days:
-                THURS: true
-                FRI: true
-              use-build-time: true
           steps:
             - trigger-builds:
-                - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
+                - project: 'dovetail-compass-{pod}-default-optional-{stream}'
                   current-parameters: false
-                  predefined-parameters:
+                  predefined-parameters: |
+                    DOCKER_TAG=latest
                     DEPLOY_SCENARIO={scenario}
                   block: true
                   same-node: true
                     build-step-failure-threshold: 'never'
                     failure-threshold: 'never'
                     unstable-threshold: 'FAILURE'
+      - conditional-step:
+          condition-kind: and
+          condition-operands:
+            - condition-kind: regex-match
+              regex: k8-nosdn-nofeature-ha
+              label: '{scenario}'
+          steps:
+            - trigger-builds:
+                - project: 'bottlenecks-compass-kubestone_deployment_capacity-{pod}-daily-{stream}'
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO={scenario}
+                  block: true
+                  same-node: true
+                  block-thresholds:
+                    build-step-failure-threshold: 'never'
+                    failure-threshold: 'never'
+                    unstable-threshold: 'FAILURE'
       - conditional-step:
           condition-kind: and
           condition-operands:
           blocking-jobs:
             - 'compass-deploy-{pod}-daily-.*?'
             - 'compass-verify-deploy-.*?'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - build-name:
                 !include-raw-escape: ./compass-deploy.sh
       - conditional-step:
           condition-kind: regex-match
-          regex: (danube|fraser)
+          regex: (danube|gambia)
           label: '{stream}'
           steps:
             - shell:
           description: "Directory where the cache to be used during the build is located."
       - string:
           name: PPA_REPO
-          default: "http://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
+          default: "https://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
       - string:
           name: PPA_CACHE
           default: "$WORKSPACE/work/repo/"
     name: 'compass-k8-nosdn-stor4nfv-ha-baremetal-centos-master-trigger'
     triggers:
       - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-stor4nfv-ha-baremetal-centos-master-trigger'
+    triggers:
+      - timed: ''
 
 # ----------------------------
 # noha-baremetal-centos-master
     name: 'compass-k8-nosdn-stor4nfv-ha-huawei-pod7-danube-trigger'
     triggers:
       - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-stor4nfv-ha-huawei-pod7-danube-trigger'
+    triggers:
+      - timed: ''
 
 # ----------------------------
 # noha-huawei-pod7-danube
     name: 'compass-k8-nosdn-stor4nfv-ha-baremetal-master-trigger'
     triggers:
       - timed: '0 16 1-29/2 * *'
+- trigger:
+    name: 'compass-os-nosdn-stor4nfv-ha-baremetal-master-trigger'
+    triggers:
+      - timed: '0 20 1-29/2 * *'
 
 # ---------------------
 # noha-baremetal-master
       - timed: ''
 
 # -------------------
-# ha-baremetal-fraser
+# ha-baremetal-gambia
 # -------------------
 - trigger:
-    name: 'compass-os-nosdn-nofeature-ha-baremetal-fraser-trigger'
+    name: 'compass-os-nosdn-nofeature-ha-baremetal-gambia-trigger'
     triggers:
       - timed: '0 1 1-29/2 * *'
 - trigger:
-    name: 'compass-os-nosdn-openo-ha-baremetal-fraser-trigger'
+    name: 'compass-os-nosdn-openo-ha-baremetal-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-odl_l3-nofeature-ha-baremetal-fraser-trigger'
+    name: 'compass-os-odl_l3-nofeature-ha-baremetal-gambia-trigger'
     triggers:
       - timed: '0 21 2-30/2 * *'
 - trigger:
-    name: 'compass-os-onos-nofeature-ha-baremetal-fraser-trigger'
+    name: 'compass-os-onos-nofeature-ha-baremetal-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-ocl-nofeature-ha-baremetal-fraser-trigger'
+    name: 'compass-os-ocl-nofeature-ha-baremetal-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-onos-sfc-ha-baremetal-fraser-trigger'
+    name: 'compass-os-onos-sfc-ha-baremetal-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-odl_l2-moon-ha-baremetal-fraser-trigger'
+    name: 'compass-os-odl_l2-moon-ha-baremetal-gambia-trigger'
     triggers:
       - timed: ''  # '0 5 1-29/2 * *'
 - trigger:
-    name: 'compass-os-nosdn-kvm-ha-baremetal-fraser-trigger'
+    name: 'compass-os-nosdn-kvm-ha-baremetal-gambia-trigger'
     triggers:
       - timed: '0 13 2-30/2 * *'
 - trigger:
-    name: 'compass-os-nosdn-ovs_dpdk-ha-baremetal-fraser-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-ha-baremetal-gambia-trigger'
     triggers:
       - timed: '0 9 1-29/2 * *'
 - trigger:
-    name: 'compass-k8-nosdn-nofeature-ha-baremetal-fraser-trigger'
+    name: 'compass-k8-nosdn-nofeature-ha-baremetal-gambia-trigger'
     triggers:
       - timed: '0 5 1-29/2 * *'
 - trigger:
-    name: 'compass-os-odl-sfc-ha-baremetal-fraser-trigger'
+    name: 'compass-os-odl-sfc-ha-baremetal-gambia-trigger'
     triggers:
       - timed: '0 17 2-30/2 * *'
 - trigger:
-    name: 'compass-os-nosdn-bar-ha-baremetal-fraser-trigger'
+    name: 'compass-os-nosdn-bar-ha-baremetal-gambia-trigger'
     triggers:
       - timed: '0 21 1-29/2 * *'
 - trigger:
-    name: 'compass-k8-nosdn-stor4nfv-ha-baremetal-fraser-trigger'
+    name: 'compass-k8-nosdn-stor4nfv-ha-baremetal-gambia-trigger'
     triggers:
       - timed: '0 7 2-30/2 * *'
+- trigger:
+    name: 'compass-os-nosdn-stor4nfv-ha-baremetal-gambia-trigger'
+    triggers:
+      - timed: ''
 
 # ---------------------
-# noha-baremetal-fraser
+# noha-baremetal-gambia
 # ---------------------
 - trigger:
-    name: 'compass-os-nosdn-kvm-noha-baremetal-fraser-trigger'
+    name: 'compass-os-nosdn-kvm-noha-baremetal-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-nosdn-nofeature-noha-baremetal-fraser-trigger'
+    name: 'compass-os-nosdn-nofeature-noha-baremetal-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-odl_l3-nofeature-noha-baremetal-fraser-trigger'
+    name: 'compass-os-odl_l3-nofeature-noha-baremetal-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-odl_l2-moon-noha-baremetal-fraser-trigger'
+    name: 'compass-os-odl_l2-moon-noha-baremetal-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-odl-sfc-noha-baremetal-fraser-trigger'
+    name: 'compass-os-odl-sfc-noha-baremetal-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-nosdn-ovs_dpdk-noha-baremetal-fraser-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-noha-baremetal-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-nosdn-bar-noha-baremetal-fraser-trigger'
+    name: 'compass-os-nosdn-bar-noha-baremetal-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-k8-nosdn-stor4nfv-noha-baremetal-fraser-trigger'
+    name: 'compass-k8-nosdn-stor4nfv-noha-baremetal-gambia-trigger'
     triggers:
       - timed: ''
 
     name: 'compass-k8-nosdn-stor4nfv-ha-virtual-master-trigger'
     triggers:
       - timed: '0 15 2-30/2 * *'
+- trigger:
+    name: 'compass-os-nosdn-stor4nfv-ha-virtual-master-trigger'
+    triggers:
+      - timed: '0 19 2-30/2 * *'
 
 # -------------------
 # noha-virtual-master
       - timed: '0 10 2-30/2 * *'
 
 # -----------------
-# ha-virtual-fraser
+# ha-virtual-gambia
 # -----------------
 - trigger:
-    name: 'compass-os-nosdn-nofeature-ha-virtual-fraser-trigger'
+    name: 'compass-os-nosdn-nofeature-ha-virtual-gambia-trigger'
     triggers:
       - timed: '0 23 1-29/2 * *'
 - trigger:
-    name: 'compass-os-nosdn-openo-ha-virtual-fraser-trigger'
+    name: 'compass-os-nosdn-openo-ha-virtual-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-odl_l3-nofeature-ha-virtual-fraser-trigger'
+    name: 'compass-os-odl_l3-nofeature-ha-virtual-gambia-trigger'
     triggers:
       - timed: '0 22 2-30/2 * *'
 - trigger:
-    name: 'compass-os-onos-nofeature-ha-virtual-fraser-trigger'
+    name: 'compass-os-onos-nofeature-ha-virtual-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-ocl-nofeature-ha-virtual-fraser-trigger'
+    name: 'compass-os-ocl-nofeature-ha-virtual-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-onos-sfc-ha-virtual-fraser-trigger'
+    name: 'compass-os-onos-sfc-ha-virtual-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'compass-os-odl_l2-moon-ha-virtual-fraser-trigger'
+    name: 'compass-os-odl_l2-moon-ha-virtual-gambia-trigger'
     triggers:
       - timed: '0 20 1-29/2 * *'
 - trigger:
-    name: 'compass-os-nosdn-kvm-ha-virtual-fraser-trigger'
+    name: 'compass-os-nosdn-kvm-ha-virtual-gambia-trigger'
     triggers:
       - timed: '0 16 2-30/2 * *'
 - trigger:
-    name: 'compass-os-nosdn-ovs_dpdk-ha-virtual-fraser-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-ha-virtual-gambia-trigger'
     triggers:
       - timed: '0 14 1-29/2 * *'
 - trigger:
-    name: 'compass-os-odl-sfc-ha-virtual-fraser-trigger'
+    name: 'compass-os-odl-sfc-ha-virtual-gambia-trigger'
     triggers:
       - timed: '0 18 2-30/2 * *'
 - trigger:
-    name: 'compass-k8-nosdn-nofeature-ha-virtual-fraser-trigger'
+    name: 'compass-k8-nosdn-nofeature-ha-virtual-gambia-trigger'
     triggers:
       - timed: '5 1 2-30/2 * *'
 - trigger:
-    name: 'compass-os-nosdn-bar-ha-virtual-fraser-trigger'
+    name: 'compass-os-nosdn-bar-ha-virtual-gambia-trigger'
     triggers:
       - timed: '0 19 1-29/2 * *'
 - trigger:
-    name: 'compass-k8-nosdn-stor4nfv-ha-virtual-fraser-trigger'
+    name: 'compass-k8-nosdn-stor4nfv-ha-virtual-gambia-trigger'
     triggers:
       - timed: '0 15 1-29/2 * *'
+- trigger:
+    name: 'compass-os-nosdn-stor4nfv-ha-virtual-gambia-trigger'
+    triggers:
+      - timed: ''
 
 # -------------------
-# noha-virtual-fraser
+# noha-virtual-gambia
 # -------------------
 - trigger:
-    name: 'compass-os-nosdn-kvm-noha-virtual-fraser-trigger'
+    name: 'compass-os-nosdn-kvm-noha-virtual-gambia-trigger'
     triggers:
       - timed: '0 15 1-29/2 * *'
 - trigger:
-    name: 'compass-os-nosdn-nofeature-noha-virtual-fraser-trigger'
+    name: 'compass-os-nosdn-nofeature-noha-virtual-gambia-trigger'
     triggers:
       - timed: '0 17 2-30/2 * *'
 - trigger:
-    name: 'compass-os-odl_l3-nofeature-noha-virtual-fraser-trigger'
+    name: 'compass-os-odl_l3-nofeature-noha-virtual-gambia-trigger'
     triggers:
       - timed: '0 23 1-29/2 * *'
 - trigger:
-    name: 'compass-os-odl_l2-moon-noha-virtual-fraser-trigger'
+    name: 'compass-os-odl_l2-moon-noha-virtual-gambia-trigger'
     triggers:
       - timed: '0 21 2-30/2 * *'
 - trigger:
-    name: 'compass-os-odl-sfc-noha-virtual-fraser-trigger'
+    name: 'compass-os-odl-sfc-noha-virtual-gambia-trigger'
     triggers:
       - timed: '0 19 1-29/2 * *'
 - trigger:
-    name: 'compass-os-nosdn-ovs_dpdk-noha-virtual-fraser-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-noha-virtual-gambia-trigger'
     triggers:
       - timed: '0 12 2-30/2 * *'
 - trigger:
-    name: 'compass-os-nosdn-bar-noha-virtual-fraser-trigger'
+    name: 'compass-os-nosdn-bar-noha-virtual-gambia-trigger'
     triggers:
       - timed: '0 12 1-29/2 * *'
 - trigger:
-    name: 'compass-k8-nosdn-stor4nfv-noha-virtual-fraser-trigger'
+    name: 'compass-k8-nosdn-stor4nfv-noha-virtual-gambia-trigger'
     triggers:
       - timed: '0 13 2-30/2 * *'
index ac649b9..4c5448e 100644 (file)
@@ -35,6 +35,8 @@ export OPENSTACK_VERSION=${COMPASS_OPENSTACK_VERSION}
 
 if [[ "${DEPLOY_SCENARIO}" =~ "-ocl" ]]; then
     export NETWORK_CONF_FILE=network_ocl.yml
+elif [[ "${DEPLOY_SCENARIO}" =~ "-odl" ]]; then
+    export NETWORK_CONF_FILE=network_odl.yml
 elif [[ "${DEPLOY_SCENARIO}" =~ "-onos" ]]; then
     export NETWORK_CONF_FILE=network_onos.yml
 elif [[ "${DEPLOY_SCENARIO}" =~ "-openo" ]]; then
index c090863..ec62da3 100644 (file)
@@ -51,7 +51,7 @@
           blocking-jobs:
             - 'compass-os-.*?-{pod}-daily-.*?'
             - 'compass-os-.*?-{pod}-weekly-.*?'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - build-name:
             - 'compass-deploy-{pod}-daily-.*?'
             - 'compass-deploy-{pod}-weekly-.*'
             - 'compass-verify-deploy-.*?'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - build-name:
index dc935f0..17745a2 100644 (file)
@@ -20,7 +20,7 @@
           gs-pathname: '/{stream}'
           ppa-pathname: '/{stream}'
           disabled: false
-      - fraser:
+      - gambia:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           ppa-pathname: '/{stream}'
           description: "URL to Google Storage."
       - string:
           name: PPA_REPO
-          default: "http://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
+          default: "https://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
       - string:
           name: PPA_CACHE
           default: "$WORKSPACE/work/repo/"
index c357ff6..daedff7 100644 (file)
           gs-pathname: ''
           ppa-pathname: '/{stream}'
           disabled: false
-          openstack-version: 'pike'
+          openstack-version: 'queens'
           branch-type: 'master'
-      - fraser:
+      - gambia:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           ppa-pathname: '/{stream}'
           disabled: false
-          openstack-version: 'pike'
+          openstack-version: 'queens'
           branch-type: 'master'
 
     distro:
@@ -71,7 +71,7 @@
             - 'compass-verify-[^-]*-[^-]*'
             - 'compass-os-.*?-virtual-daily-.*?'
             - 'compass-k8-.*?-virtual-daily-.*?'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - ssh-agent-wrapper
               node-parameters: true
               kill-phase-on: NEVER
               abort-all-job: true
-            - name: 'functest-compass-virtual-suite-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                FUNCTEST_MODE=testcase
-                FUNCTEST_SUITE_NAME=vping_ssh
-                DEPLOY_SCENARIO=os-nosdn-nofeature-ha
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
 
 - job-template:
     name: 'compass-verify-k8-{distro}-{stream}'
           blocking-jobs:
             - 'compass-verify-[^-]*-[^-]*'
             - 'compass-os-.*?-virtual-daily-.*?'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - ssh-agent-wrapper
             - 'compass-os-.*?-virtual-daily-.*?'
             - 'compass-verify-deploy-.*'
             - 'functest-compass-virtual.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - ssh-agent-wrapper
           description: "URL to Google Storage."
       - string:
           name: PPA_REPO
-          default: "http://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
+          default: "https://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
       - string:
           name: PPA_CACHE
           default: "$WORKSPACE/work/repo/"
diff --git a/jjb/compass4nfv/compass4nfv-rtd-jobs.yaml b/jjb/compass4nfv/compass4nfv-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..7d13688
--- /dev/null
@@ -0,0 +1,21 @@
+---
+- project:
+    name: compass4nfv-rtd
+    project: compass4nfv
+    project-name: compass4nfv
+
+    gerrit-skip-vote: true
+    project-pattern: 'compass4nfv'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-compass4nfv/47358/'
+    rtd-token: '4208e8492be2e4eab7f18f1e70b5d78247d1b249'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 696f7ff..48381cb 100755 (executable)
@@ -7,8 +7,8 @@ COMPASS_WORK_DIR=$WORKSPACE/../compass-work
 mkdir -p $COMPASS_WORK_DIR
 ln -s $COMPASS_WORK_DIR work
 
-#TODO: remove workaround after all arm64 patches merged
-curl -s http://people.linaro.org/~yibo.cai/compass/compass4nfv-arm64-fixup.sh | bash -s {scenario}
+sudo docker rm -f `docker ps | grep compass | cut -f1 -d' '` || true
 
-# build tarball
-COMPASS_ISO_REPO='http://people.linaro.org/~yibo.cai/compass' ./build.sh
+curl -s http://people.linaro.org/~yibo.cai/compass/compass4nfv-arm64-fixup.sh | bash || true
+
+./build.sh
index a22af09..7889c3b 100755 (executable)
@@ -5,11 +5,11 @@ cd compass4nfv
 
 export ADAPTER_OS_PATTERN='(?i)CentOS-7.*arm.*'
 export OS_VERSION="centos7"
-export KUBERNETES_VERSION="v1.7.5"
+export KUBERNETES_VERSION="v1.9.1"
 if [[ "$NODE_NAME" =~ "-virtual" ]]; then
     export DHA="deploy/conf/vm_environment/k8-nosdn-nofeature-noha.yml"
     export NETWORK="deploy/conf/vm_environment/network.yml"
-    export VIRT_NUMBER=2 VIRT_CPUS=4 VIRT_MEM=8192 VIRT_DISK=50G
+    export VIRT_NUMBER=2 VIRT_CPUS=8 VIRT_MEM=8192 VIRT_DISK=50G
 else
     export DHA="deploy/conf/hardware_environment/huawei-pod8/k8-nosdn-nofeature-noha.yml"
     export NETWORK="deploy/conf/hardware_environment/huawei-pod8/network.yml"
diff --git a/jjb/container4nfv/arm64/deploy-cni.sh b/jjb/container4nfv/arm64/deploy-cni.sh
new file mode 100755 (executable)
index 0000000..9afb980
--- /dev/null
@@ -0,0 +1,17 @@
+#!/bin/bash -e
+
+cd container4nfv/src/arm/cni-deploy
+
+DEPLOY_SCENARIO={scenario}
+
+virtualenv .venv
+source .venv/bin/activate
+pip install ansible==2.6.1
+
+ansible-playbook -i inventory/inventory.cfg deploy.yml --tags flannel,multus
+
+if [ "$DEPLOY_SCENARIO" == "k8-sriov-nofeature-noha" ]; then
+    ansible-playbook -i inventory/inventory.cfg deploy.yml --tags sriov
+elif [ "$DEPLOY_SCENARIO" == "k8-vpp-nofeature-noha" ]; then
+    ansible-playbook -i inventory/inventory.cfg deploy.yml --tags vhost-vpp
+fi
index 5f5bc86..c5ba3be 100644 (file)
@@ -7,28 +7,35 @@
     stream:
       - master:
           branch: master
-      - fraser:
-          branch: stable/fraser
+      - gambia:
+          branch: stable/gambia
     scenario:
       - 'k8-multus-nofeature-noha':
           disabled: false
       - 'k8-sriov-nofeature-noha':
           disabled: false
+      - 'k8-vpp-nofeature-noha':
+          disabled: false
     pod:
       - virtual:
           slave-label: arm-packet01
       - baremetal:
           slave-label: compass-baremetal-arm
     jobs:
+      - 'container4nfv-arm-deploy-{pod}-daily-{stream}'
       - 'container4nfv-{scenario}-{pod}-daily-{stream}'
 
 
 - job-template:
-    name: 'container4nfv-{scenario}-{pod}-daily-{stream}'
-    disabled: '{obj:disabled}'
-    concurrent: false
+    name: 'container4nfv-arm-deploy-{pod}-daily-{stream}'
+    disabled: false
     node: '{slave-label}'
 
+    wrappers:
+      - timeout:
+          timeout: 300
+          fail: true
+
     scm:
       - git:
           url: https://gerrit.opnfv.org/gerrit/compass4nfv
           wipe-workspace: true
 
     triggers:
-      - 'trigger-{scenario}-{pod}-{stream}'
-
-    wrappers:
-      - timeout:
-          timeout: 150
-          fail: true
+      - 'trigger-deploy-{pod}-{stream}'
 
     builders:
       - shell:
           - project: yardstick-arm64-compass-arm-virtual03-daily-master
             current-parameters: false
             same-node: true
-            block: false
+            block: true
+            block-thresholds:
+              build-step-failure-threshold: 'never'
+              failure-threshold: 'never'
+              unstable-threshold: 'Failure'
+          - project: functest-compass-arm-virtual-daily-master
+            current-parameters: false
+            predefined-parameters:
+              DEPLOY_SCENARIO='k8-nosdn-nofeature-ha'
+            same-node: true
+            block: true
+            block-thresholds:
+              build-step-failure-threshold: 'never'
+              failure-threshold: 'never'
+              unstable-threshold: 'Failure'
+          - project: container4nfv-k8-multus-nofeature-noha-{pod}-daily-{stream}
+            current-parameters: true
+            same-node: true
+            block: true
+            block-thresholds:
+              build-step-failure-threshold: 'never'
+              failure-threshold: 'never'
+              unstable-threshold: 'Failure'
+          - project: container4nfv-k8-sriov-nofeature-noha-{pod}-daily-{stream}
+            current-parameters: true
+            same-node: true
+            block: true
+            block-thresholds:
+              build-step-failure-threshold: 'never'
+              failure-threshold: 'never'
+              unstable-threshold: 'Failure'
+          - project: container4nfv-k8-vpp-nofeature-noha-{pod}-daily-{stream}
+            current-parameters: true
+            same-node: true
+            block: true
+            block-thresholds:
+              build-step-failure-threshold: 'never'
+              failure-threshold: 'never'
+              unstable-threshold: 'never'
 
 
-- trigger:
-    name: 'trigger-k8-multus-nofeature-noha-virtual-master'
-    triggers:
-      - timed: '0 12 * * *'
-- trigger:
-    name: 'trigger-k8-sriov-nofeature-noha-virtual-master'
-    triggers:
-      - timed: '0 15 * * *'
+- job-template:
+    name: 'container4nfv-{scenario}-{pod}-daily-{stream}'
+    disabled: '{obj:disabled}'
+    node: '{slave-label}'
 
-- trigger:
-    name: 'trigger-k8-multus-nofeature-noha-virtual-fraser'
-    triggers:
-      - timed: '0 18 * * *'
-- trigger:
-    name: 'trigger-k8-sriov-nofeature-noha-virtual-fraser'
-    triggers:
-      - timed: '0 21 * * *'
+    scm:
+      - git:
+          url: https://gerrit.opnfv.org/gerrit/container4nfv
+          branches:
+            - master
+          basedir: container4nfv
+          wipe-workspace: true
+
+    builders:
+      - shell:
+          !include-raw: arm64/deploy-cni.sh
 
 
 - trigger:
-    name: 'trigger-k8-multus-nofeature-noha-baremetal-master'
+    name: 'trigger-deploy-virtual-master'
     triggers:
       - timed: '0 12 * * *'
 - trigger:
-    name: 'trigger-k8-sriov-nofeature-noha-baremetal-master'
+    name: 'trigger-deploy-virtual-gambia'
     triggers:
-      - timed: '0 15 * * *'
-
+      - timed: '0 18 * * *'
 - trigger:
-    name: 'trigger-k8-multus-nofeature-noha-baremetal-fraser'
+    name: 'trigger-deploy-baremetal-master'
     triggers:
-      - timed: '0 18 * * *'
+      - timed: '0 12 * * *'
 - trigger:
-    name: 'trigger-k8-sriov-nofeature-noha-baremetal-fraser'
+    name: 'trigger-deploy-baremetal-gambia'
     triggers:
-      - timed: '0 21 * * *'
+      - timed: '0 18 * * *'
index 194a1a9..f1d58da 100644 (file)
@@ -18,7 +18,7 @@
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
-      - fraser: &fraser
+      - gambia:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
           blocking-jobs:
             - 'clover-daily-deploy-.*?'
             - 'container4nfv-daily-deploy-.*?'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - timeout:
diff --git a/jjb/container4nfv/container4nfv-rtd-jobs.yaml b/jjb/container4nfv/container4nfv-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..b55f958
--- /dev/null
@@ -0,0 +1,21 @@
+---
+- project:
+    name: container4nfv-rtd
+    project: container4nfv
+    project-name: container4nfv
+
+    gerrit-skip-vote: true
+    project-pattern: 'container4nfv'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-container4nfv/47359/'
+    rtd-token: '82f917a788d006dc15df14ecd3c991115490bf8a'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/copper/copper-rtd-jobs.yaml b/jjb/copper/copper-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..6198d22
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: copper-rtd
+    project: copper
+    project-name: copper
+
+    gerrit-skip-vote: true
+    project-pattern: 'copper'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-copper/47360/'
+    rtd-token: '1b423ddc3af602f8078c1ece0d689e0b4d3c3f79'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/cperf/cirros-upload.yaml.ansible b/jjb/cperf/cirros-upload.yaml.ansible
new file mode 100644 (file)
index 0000000..855bb1f
--- /dev/null
@@ -0,0 +1,39 @@
+---
+- hosts: all
+  tasks:
+    - copy:
+        src: "{{ lookup('env', 'WORKSPACE') }}/{{ item }}"
+        dest: "/home/heat-admin/{{ item }}"
+        owner: heat-admin
+        group: heat-admin
+        mode: 0775
+      with_items:
+        - cirros-0.3.5-x86_64-disk.img
+        - overcloudrc
+    - name: Upload cirros glance image
+      shell: >
+        source /home/heat-admin/overcloudrc && openstack image create
+        cirros-0.3.5-x86_64-disk --public
+        --file /home/heat-admin/cirros-0.3.5-x86_64-disk.img
+        --disk-format qcow2 --container-format bare
+    - name: Create nano flavor
+      shell: >
+        source /home/heat-admin/overcloudrc && openstack flavor create
+        --id 42 --ram 64 --disk 0 --vcpus 1 m1.nano
+    - name: Open CSIT TCP port for netcat
+      iptables:
+        chain: INPUT
+        action: insert
+        protocol: tcp
+        destination_port: 12345
+        jump: ACCEPT
+      become: yes
+    - name: Open CSIT UDP port for netcat
+      iptables:
+        chain: INPUT
+        action: insert
+        protocol: udp
+        destination_port: 12345
+        jump: ACCEPT
+      become: yes
+
index fdd3509..61bdebd 100644 (file)
@@ -9,47 +9,30 @@
     # -------------------------------
     # BRANCH ANCHORS
     # -------------------------------
-    master: &master
-      stream: master
-      branch: '{stream}'
-      gs-pathname: ''
-      docker-tag: 'latest'
-    danube: &danube
-      stream: danube
-      branch: 'stable/{stream}'
-      gs-pathname: '/{stream}'
-      docker-tag: 'stable'
+    stream: master
+    branch: '{stream}'
+    gs-pathname: ''
+    docker-tag: 'latest'
 
-    # -------------------------------
-    # POD, INSTALLER, AND BRANCH MAPPING
-    # -------------------------------
-    pod:
-      # -------------------------------
-      #        master
-      # -------------------------------
-      - intel-pod2:
-          installer: apex
-          <<: *master
-      - intel-pod2:
-          installer: apex
-          <<: *danube
+    installer: apex
 
     testsuite:
-      - 'daily'
+      - csit
+      - cbench
 
     jobs:
-      - 'cperf-{installer}-{pod}-{testsuite}-{stream}'
+      - 'cperf-{installer}-{testsuite}-{stream}'
+      - 'cperf-upload-logs-csit'
 
 ################################
 # job template
 ################################
 - job-template:
-    name: 'cperf-{installer}-{pod}-{testsuite}-{stream}'
+    name: 'cperf-{installer}-{testsuite}-{stream}'
 
     concurrent: true
 
     properties:
-      - logrotate-default
       - throttle:
           enabled: true
           max-per-node: 1
 
     wrappers:
       - build-name:
-          name: '$BUILD_NUMBER Suite: $CPERF_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+          name: '$BUILD_NUMBER Suite: $CPERF_SUITE_NAME ODL BRANCH: $ODL_BRANCH'
       - timeout:
           timeout: 400
           abort: true
 
     parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{pod}-defaults'
-      - '{installer}-defaults'
       - cperf-parameter:
           testsuite: '{testsuite}'
           gs-pathname: '{gs-pathname}'
           docker-tag: '{docker-tag}'
-
-    scm:
-      - git-scm
+          stream: '{stream}'
 
     builders:
       - 'cperf-{testsuite}-builder'
 
+- job-template:
+    name: 'cperf-upload-logs-csit'
+
+    concurrent: true
+
+    disabled: false
+
+    parameters:
+      - cperf-parameter:
+          testsuite: 'csit'
+          gs-pathname: '{gs-pathname}'
+          docker-tag: '{docker-tag}'
+          stream: '{stream}'
+
+    # yamllint enable rule:line-length
+    properties:
+      - logrotate-default
+      - throttle:
+          max-per-node: 1
+          max-total: 10
+          option: 'project'
+
+    builders:
+      - 'cperf-upload-logs-csit'
+
 ########################
 # parameter macros
 ########################
           name: CPERF_SUITE_NAME
           default: '{testsuite}'
           description: "Suite name to run"
+      - string:
+          name: ODL_BRANCH
+          default: 'master'
+          description: "Branch that OpenDaylight is running"
+      - string:
+          name: OS_VERSION
+          default: 'master'
+          description: "OpenStack version (short name, no stable/ prefix)"
       - string:
           name: GS_PATHNAME
           default: '{gs-pathname}'
           name: DOCKER_TAG
           default: '{docker-tag}'
           description: 'Tag to pull docker image'
+      - string:
+          name: RC_FILE_PATH
+          default: ''
+          description: "Path to the OS credentials file if given"
+      - string:
+          name: SSH_KEY_PATH
+          default: ''
+          description: "Path to the private SSH key to access OPNFV nodes"
+      - string:
+          name: NODE_FILE_PATH
+          default: ''
+          description: "Path to the yaml file describing overcloud nodes"
+      - string:
+          name: ODL_CONTAINERIZED
+          default: 'true'
+          description: "boolean set true if ODL on overcloud is a container"
 
 ########################
 # trigger macros
 # builder macros
 ########################
 - builder:
-    name: cperf-daily-builder
+    name: cperf-csit-builder
+    builders:
+      - 'cperf-cleanup'
+      - 'cperf-prepare-robot'
+      - 'cperf-robot-netvirt-csit'
+
+- builder:
+    name: cperf-cbench-builder
     builders:
       - 'cperf-cleanup'
+      - 'cperf-prepare-robot'
       - 'cperf-robot-cbench'
 
+- builder:
+    name: cperf-prepare-robot
+    builders:
+      - shell:
+          !include-raw: ./cperf-prepare-robot.sh
+
 - builder:
     name: cperf-robot-cbench
     builders:
           set -o errexit
           set -o nounset
           set -o pipefail
-          undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
-                            grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
-          INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
-
-          sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/overcloudrc /tmp/overcloudrc
-          sudo chmod 755 /tmp/overcloudrc
-          source /tmp/overcloudrc
-
-          # robot suites need the ssh key to log in to controller nodes, so throwing it
-          # in tmp, and mounting /tmp as $HOME as far as robot is concerned
-          sudo rm -rf /tmp/.ssh
-          sudo mkdir /tmp/.ssh
-          sudo chmod 0700 /tmp/.ssh
-          sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/.ssh/id_rsa /tmp/.ssh/
-          sudo chown -R jenkins-ci:jenkins-ci /tmp/.ssh
-          # done with sudo. jenkins-ci is the user from this point
-          chmod 0600 /tmp/.ssh/id_rsa
 
           # cbench requires the openflow drop test feature to be installed.
           sshpass -p karaf ssh -o StrictHostKeyChecking=no \
                                -p 8101 karaf@$SDN_CONTROLLER_IP \
                                 feature:install odl-openflowplugin-flow-services-ui odl-openflowplugin-drop-test
 
-          docker pull opnfv/cperf:$DOCKER_TAG
-
           robot_cmd="pybot -e exclude -L TRACE -d /tmp \
                       -v ODL_SYSTEM_1_IP:${SDN_CONTROLLER_IP} \
                       -v ODL_SYSTEM_IP:${SDN_CONTROLLER_IP} \
 
           docker run -i -v /tmp:/tmp opnfv/cperf:$DOCKER_TAG ${robot_cmd} ${robot_suite}
 
+- builder:
+    name: cperf-robot-netvirt-csit
+    builders:
+      - shell:
+          !include-raw: ./cperf-robot-netvirt-csit.sh
+
 - builder:
     name: cperf-cleanup
     builders:
               docker ps -a | grep opnfv/cperf | awk '{print $1}' | xargs docker rm -f >${redirect}
           fi
 
-          # Remove existing images if exist
-          if [[ ! -z $(docker images | grep opnfv/cperf) ]]; then
-              echo "Docker images to remove:"
-              docker images | head -1 && docker images | grep opnfv/cperf >${redirect}
-              image_tags=($(docker images | grep opnfv/cperf | awk '{print $2}'))
-              for tag in "${image_tags[@]}"; do
-                  echo "Removing docker image opnfv/cperf:$tag..."
-                  docker rmi opnfv/cperf:$tag >/dev/null
-              done
-          fi
+- builder:
+    name: cperf-upload-logs-csit
+    builders:
+      - shell: !include-raw: ./cperf-upload-logs-csit.sh
diff --git a/jjb/cperf/cperf-prepare-robot.sh b/jjb/cperf/cperf-prepare-robot.sh
new file mode 100755 (executable)
index 0000000..d88c6d5
--- /dev/null
@@ -0,0 +1,32 @@
+#!/usr/bin/env bash
+
+set -o errexit
+set -o nounset
+set -o pipefail
+
+if [ -z ${RC_FILE_PATH+x} ]; then
+  undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
+                   grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
+  INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
+  sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/overcloudrc /tmp/overcloudrc
+else
+  cp -f $RC_FILE_PATH ${WORKSPACE}/overcloudrc
+fi
+
+sudo chmod 755 ${WORKSPACE}/overcloudrc
+source ${WORKSPACE}/overcloudrc
+
+# copy ssh key for robot
+
+if [ -z ${SSH_KEY_PATH+x} ]; then
+  sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/.ssh/id_rsa ${WORKSPACE}/
+  sudo chown -R jenkins-ci:jenkins-ci ${WORKSPACE}/
+  # done with sudo. jenkins-ci is the user from this point
+  chmod 0600 ${WORKSPACE}/id_rsa
+else
+  cp -f ${SSH_KEY_PATH} ${WORKSPACE}/
+fi
+
+docker pull opnfv/cperf:$DOCKER_TAG
+
+sudo mkdir -p /tmp/robot_results
diff --git a/jjb/cperf/cperf-robot-netvirt-csit.sh b/jjb/cperf/cperf-robot-netvirt-csit.sh
new file mode 100755 (executable)
index 0000000..892f0aa
--- /dev/null
@@ -0,0 +1,188 @@
+#!/usr/bin/env bash
+
+set -o errexit
+set -o nounset
+set -o pipefail
+
+if [[ ! -z ${SKIP_CSIT+x} && "$SKIP_CSIT" == "True" ]]; then
+  echo "Skipping csit run"
+  exit 0
+fi
+
+if [ "$OS_VERSION" == 'master' ]; then
+  FULL_OS_VER='master'
+else
+  FULL_OS_VER="stable/${OS_VERSION}"
+fi
+
+if [ "$ODL_BRANCH" == 'master' ]; then
+  ODL_STREAM='neon'
+else
+  ODL_STREAM=${ODL_BRANCH#"stable/"}
+fi
+
+echo "ODL Stream set: ${ODL_STREAM} and OS Version is ${FULL_OS_VER}"
+
+sudo rm -rf releng
+git clone https://gerrit.opnfv.org/gerrit/releng.git
+REL_PATH='releng/jjb/cperf'
+
+# NOTE: sourcing overcloudrc unsets any variable with OS_ prefix
+source ${WORKSPACE}/overcloudrc
+# note SDN_CONTROLLER_IP is set in overcloudrc, which is the VIP
+# for admin/public network (since we are running single network deployment)
+
+NUM_CONTROL_NODES=$(python ${REL_PATH}/parse-node-yaml.py num_nodes --file $NODE_FILE_PATH)
+NUM_COMPUTE_NODES=$(python ${REL_PATH}/parse-node-yaml.py num_nodes --node-type compute --file $NODE_FILE_PATH)
+
+echo "Number of Control nodes found: ${NUM_CONTROL_NODES}"
+echo "Number of Compute nodes found: ${NUM_COMPUTE_NODES}"
+
+# Only 1 combo or ctrl node is specified, even for OS HA deployments
+# Currently supported combinations are:
+# 0cmb-1ctl-2cmp
+# 1cmb-0ctl-0cmp
+# 1cmb-0ctl-1cmp
+if [ "$NUM_COMPUTE_NODES" -eq 0 ]; then
+  OPENSTACK_TOPO="1cmb-0ctl-0cmp"
+else
+  OPENSTACK_TOPO="0cmb-1ctl-2cmp"
+fi
+
+idx=1
+EXTRA_ROBOT_ARGS=""
+for idx in `seq 1 $NUM_CONTROL_NODES`; do
+  CONTROLLER_IP=$(python ${REL_PATH}/parse-node-yaml.py get_value -k address --node-number ${idx} --file $NODE_FILE_PATH)
+  EXTRA_ROBOT_ARGS+=" -v ODL_SYSTEM_${idx}_IP:${CONTROLLER_IP} \
+                      -v OS_CONTROL_NODE_${idx}_IP:${CONTROLLER_IP} \
+                      -v ODL_SYSTEM_${idx}_IP:${CONTROLLER_IP} \
+                      -v HA_PROXY_${idx}_IP:${SDN_CONTROLLER_IP}"
+done
+
+# In all-in-one these Compute IPs still need to be passed to robot
+if [ "$NUM_COMPUTE_NODES" -eq 0 ]; then
+  EXTRA_ROBOT_ARGS+=" -v OS_COMPUTE_1_IP:'' -v OS_COMPUTE_2_IP:''"
+else
+  idx=1
+  for idx in `seq 1 $NUM_COMPUTE_NODES`; do
+    COMPUTE_IP=$(python ${REL_PATH}/parse-node-yaml.py get_value -k address --node-type compute --node-number ${idx} --file $NODE_FILE_PATH)
+    EXTRA_ROBOT_ARGS+=" -v OS_COMPUTE_${idx}_IP:${COMPUTE_IP}"
+  done
+fi
+
+CONTROLLER_1_IP=$(python ${REL_PATH}/parse-node-yaml.py get_value -k address --node-number 1 --file $NODE_FILE_PATH)
+
+if [ "$ODL_CONTAINERIZED" == 'false' ]; then
+  EXTRA_ROBOT_ARGS+=" -v NODE_KARAF_COUNT_COMMAND:'ps axf | grep org.apache.karaf | grep -v grep | wc -l || echo 0' \
+                      -v NODE_START_COMMAND:'sudo systemctl start opendaylight_api' \
+                      -v NODE_KILL_COMMAND:'sudo systemctl stop opendaylight_api' \
+                      -v NODE_STOP_COMMAND:'sudo systemctl stop opendaylight_api' \
+                      -v NODE_FREEZE_COMMAND:'sudo systemctl stop opendaylight_api' "
+else
+  EXTRA_ROBOT_ARGS+=" -v NODE_KARAF_COUNT_COMMAND:'sudo docker ps | grep opendaylight_api | wc -l || echo 0' \
+                      -v NODE_START_COMMAND:'sudo docker start opendaylight_api' \
+                      -v NODE_KILL_COMMAND:'sudo docker stop opendaylight_api' \
+                      -v NODE_STOP_COMMAND:'sudo docker stop opendaylight_api' \
+                      -v NODE_FREEZE_COMMAND:'sudo docker stop opendaylight_api' "
+fi
+
+# FIXME(trozet) remove this once it is fixed in csit
+# Upload glance image into openstack
+wget -O ${WORKSPACE}/cirros-0.3.5-x86_64-disk.img http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-disk.img
+export ANSIBLE_HOST_KEY_CHECKING=False
+ansible-playbook -i ${CONTROLLER_1_IP}, -u heat-admin --key-file ${WORKSPACE}/id_rsa ${REL_PATH}/cirros-upload.yaml.ansible -vvv
+
+LOGS_LOCATION=/tmp/robot_results
+
+robot_cmd="pybot \
+  --removekeywords wuks \
+  --xunit robotxunit.xml \
+  --name 'CSIT' \
+  -e exclude \
+  -d $LOGS_LOCATION \
+  -v BUNDLEFOLDER:/opt/opendaylight \
+  -v CONTROLLER_USER:heat-admin \
+  -v DEFAULT_LINUX_PROMPT:\$ \
+  -v DEFAULT_LINUX_PROMPT_STRICT:]\$ \
+  -v DEFAULT_USER:heat-admin \
+  -v DEVSTACK_DEPLOY_PATH:/tmp \
+  -v EXTERNAL_GATEWAY:$CONTROLLER_1_IP \
+  -v EXTERNAL_PNF:$CONTROLLER_1_IP \
+  -v EXTERNAL_SUBNET:192.0.2.0/24 \
+  -v EXTERNAL_SUBNET_ALLOCATION_POOL:start=192.0.2.100,end=192.0.2.200 \
+  -v EXTERNAL_INTERNET_ADDR:$CONTROLLER_1_IP  \
+  -v HA_PROXY_IP:$SDN_CONTROLLER_IP \
+  -v NUM_ODL_SYSTEM:$NUM_CONTROL_NODES \
+  -v NUM_OS_SYSTEM:$(($NUM_CONTROL_NODES + $NUM_COMPUTE_NODES)) \
+  -v NUM_TOOLS_SYSTEM:0 \
+  -v ODL_SNAT_MODE:conntrack \
+  -v ODL_STREAM:$ODL_STREAM \
+  -v ODL_SYSTEM_IP:$CONTROLLER_1_IP \
+  -v OS_CONTROL_NODE_IP:$CONTROLLER_1_IP \
+  -v OPENSTACK_BRANCH:$FULL_OS_VER \
+  -v OPENSTACK_TOPO:$OPENSTACK_TOPO \
+  -v OS_USER:heat-admin \
+  -v ODL_ENABLE_L3_FWD:yes \
+  -v ODL_SYSTEM_USER:heat-admin \
+  -v ODL_SYSTEM_PROMPT:\$ \
+  -v PRE_CLEAN_OPENSTACK_ALL:True \
+  -v PUBLIC_PHYSICAL_NETWORK:datacentre \
+  -v RESTCONFPORT:8081 \
+  -v ODL_RESTCONF_USER:admin \
+  -v ODL_RESTCONF_PASSWORD:$SDN_CONTROLLER_PASSWORD \
+  -v KARAF_PROMPT_LOGIN:'opendaylight-user' \
+  -v KARAF_PROMPT:'opendaylight-user.*root.*>' \
+  -v SECURITY_GROUP_MODE:stateful \
+  -v USER:heat-admin \
+  -v USER_HOME:\$HOME \
+  -v TOOLS_SYSTEM_IP:'' \
+  -v NODE_ROLE_INDEX_START:0 \
+  -v WORKSPACE:/tmp  \
+  $EXTRA_ROBOT_ARGS \
+  -v of_port:6653 "
+
+SUITE_HOME='/home/opnfv/repos/odl_test/csit/suites'
+
+# Disabled suites
+#
+# ${SUITE_HOME}/openstack/connectivity/live_migration.robot
+# Live migration will not work unless we use a shared storage backend like
+# Ceph which we do not currently use with CSIT images
+#
+# ${SUITE_HOME}/netvirt/vpnservice/vpn_basic_ipv6.robot
+# This suite fails with an error indicating the connection was closed
+# to the overcloud control node:
+# https://build.opnfv.org/ci/job/cperf-apex-csit-master/104/consoleFull
+#
+# Minimize HA CSIT as it does not pass all suites
+if [ "$NUM_CONTROL_NODES" -eq 3 ]; then
+  suites="${SUITE_HOME}/openstack/connectivity/l2.robot \
+          ${SUITE_HOME}/openstack/connectivity/l3.robot"
+else
+  suites="${SUITE_HOME}/openstack/connectivity/l2.robot \
+          ${SUITE_HOME}/openstack/connectivity/l3.robot \
+          ${SUITE_HOME}/openstack/connectivity/external_network.robot \
+          ${SUITE_HOME}/openstack/connectivity/security_group.robot \
+          ${SUITE_HOME}/openstack/securitygroup/neutron_security_group.robot \
+          ${SUITE_HOME}/openstack/securitygroup/security_group_l3bcast.robot \
+          ${SUITE_HOME}/netvirt/vpnservice/vpn_basic.robot \
+          ${SUITE_HOME}/netvirt/elan/elan.robot \
+          ${SUITE_HOME}/netvirt/vpnservice/arp_learning.robot \
+          ${SUITE_HOME}/netvirt/l2l3_gatewaymac_arp.robot \
+          ${SUITE_HOME}/integration/Create_JVM_Plots.robot"
+fi
+
+echo "Robot command set: ${robot_cmd}"
+echo "Running robot..."
+docker run -i --net=host \
+  -v ${LOGS_LOCATION}:${LOGS_LOCATION} \
+  -v ${WORKSPACE}/id_rsa:/tmp/id_rsa \
+  -v ${WORKSPACE}/overcloudrc:/tmp/overcloudrc \
+  opnfv/cperf:$DOCKER_TAG \
+  /bin/bash -c "source /tmp/overcloudrc; mkdir -p \$HOME/.ssh; cp /tmp/id_rsa \$HOME/.ssh; \
+  cd /home/opnfv/repos/odl_test/ && git pull origin master; \
+  pip install odltools; \
+  ${robot_cmd} ${suites};"
+
+echo "Running post CSIT clean"
+ansible-playbook -i ${CONTROLLER_1_IP}, -u heat-admin --key-file ${WORKSPACE}/id_rsa ${REL_PATH}/csit-clean.yaml.ansible -vvv
diff --git a/jjb/cperf/cperf-upload-logs-csit.sh b/jjb/cperf/cperf-upload-logs-csit.sh
new file mode 100644 (file)
index 0000000..bd86804
--- /dev/null
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+
+set -o errexit
+set -o nounset
+set -o pipefail
+
+ODL_STREAM=${ODL_BRANCH#"stable/"}
+
+LOGS_LOCATION=/tmp/robot_results
+UPLOAD_LOCATION=artifacts.opnfv.org/cperf/cperf-apex-csit-${ODL_STREAM}/${BUILD_NUMBER}/
+echo "Uploading robot logs to ${UPLOAD_LOCATION}"
+gsutil -m cp -r -v ${LOGS_LOCATION} gs://${UPLOAD_LOCATION} > gsutil.latest_logs.log
diff --git a/jjb/cperf/csit-clean.yaml.ansible b/jjb/cperf/csit-clean.yaml.ansible
new file mode 100644 (file)
index 0000000..0151dd8
--- /dev/null
@@ -0,0 +1,11 @@
+---
+- hosts: all
+  tasks:
+    - name: Delete cirros glance image
+      shell: >
+        source /home/heat-admin/overcloudrc && openstack image delete
+        cirros-0.3.5-x86_64-disk
+    - name: Delete nano flavor
+      shell: >
+        source /home/heat-admin/overcloudrc && openstack flavor delete
+        m1.nano
diff --git a/jjb/cperf/parse-node-yaml.py b/jjb/cperf/parse-node-yaml.py
new file mode 100644 (file)
index 0000000..5a75755
--- /dev/null
@@ -0,0 +1,71 @@
+##############################################################################
+# Copyright (c) 2018 Tim Rozet (trozet@redhat.com) and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import argparse
+import sys
+import yaml
+
+
+def get_node_data_by_number(node_type, node_number):
+    node_idx = 1
+    for node_name, node_data in data['servers'].items():
+        if node_type == node_data['type']:
+            if node_idx == node_number:
+                return node_name, node_data
+            else:
+                node_idx += 1
+
+
+def get_node_value(node_type, node_number, key):
+    node_name, node_data = get_node_data_by_number(node_type, node_number)
+    if not key and node_name is not None:
+        return node_name
+    elif node_data and isinstance(node_data, dict) and key in node_data:
+        return node_data[key]
+
+
+def get_number_of_nodes(node_type):
+    nodes = data['servers']
+    num_nodes = 0
+    for node_name, node_data in nodes.items():
+        if node_data['type'] == node_type:
+            num_nodes += 1
+    return num_nodes
+
+
+FUNCTION_MAP = {'num_nodes':
+                {'func': get_number_of_nodes,
+                 'args': ['node_type']},
+                'get_value':
+                    {'func': get_node_value,
+                     'args': ['node_type', 'node_number', 'key']},
+                }
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser()
+    parser.add_argument('command', choices=FUNCTION_MAP.keys())
+    parser.add_argument('-f', '--file',
+                        dest='node_file',
+                        required=True)
+    parser.add_argument('--node-type',
+                        default='controller',
+                        required=False)
+    parser.add_argument('--node-number',
+                        default=1,
+                        type=int,
+                        required=False)
+    parser.add_argument('-k', '--key',
+                        required=False)
+    args = parser.parse_args(sys.argv[1:])
+    with open(args.node_file, 'r') as fh:
+        data = yaml.safe_load(fh)
+    assert 'servers' in data
+    func = FUNCTION_MAP[args.command]['func']
+    args = [getattr(args, x) for x in FUNCTION_MAP[args.command]['args']]
+    print(func(*args))
index ba03bc3..4a7e6e9 100644 (file)
             - 'daisy-os-.*?-{pod}-daily-.*?'
             - 'daisy-daily-.*'
             - 'daisy-kolla-build-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - build-name:
             - '{installer}-(build|deploy|test)-daily-(fraser|master)'
             - '{installer}-deploy-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
             - '(functest|yardstick)-{installer}-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     parameters:
       - project-parameter:
index ea57ab1..950b9be 100755 (executable)
@@ -17,7 +17,7 @@ if [[ ! "$NODE_NAME" =~ "-virtual" ]] && [[ ! "$LAB_NAME" =~ (zte) ]]; then
     exit $deploy_ret
 fi
 
-# clone the securedlab repo
+# clone the securedlab/pharos repo
 cd $WORKSPACE
 
 # There are no PDFs in euphrates branch of pharos repo.
index ec5ba71..7914548 100644 (file)
@@ -60,7 +60,7 @@
             - '{installer}-kolla-build-.*'
             - 'daisy-deploy-.*'
             - 'daisy-daily-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     scm:
       - git-scm
             - '{installer}-kolla-build-.*'
             - 'daisy4nfv-merge-build-.*'
             - 'daisy4nfv-verify-build-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     scm:
       - git-scm
             - '{installer}-daily-(build|deploy|test)-(fraser|master)'
             - '{installer}-.*-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
             - '(functest|yardstick)-{installer}-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     scm:
       - git-scm
diff --git a/jjb/daisy4nfv/daisy-rtd-jobs.yaml b/jjb/daisy4nfv/daisy-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..2e7689a
--- /dev/null
@@ -0,0 +1,12 @@
+---
+- project:
+    name: daisy-rtd
+    project: daisy
+    project-name: daisy
+
+    project-pattern: 'daisy'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-daisy/47361/'
+    rtd-token: '265efe14ff0bb3fa0d4ea66d6be1b7b511d5d713'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 7e03fab..c261c23 100644 (file)
@@ -61,7 +61,7 @@
           use-build-blocker: true
           blocking-jobs:
             - '{alias}-merge-(master|fraser)'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     scm:
       - git-scm
           blocking-jobs:
             - '{alias}-merge-{phase}-.*'
             - '{installer}-daily-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     scm:
       - git-scm
index 033beeb..c8c1db0 100644 (file)
@@ -55,7 +55,7 @@
           use-build-blocker: true
           blocking-jobs:
             - '{alias}-merge-build-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     scm:
       - git-scm-gerrit
             - '{alias}-merge-build-.*'
             - '{alias}-verify-build-.*'
             - '{installer}-daily-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     scm:
       - git-scm-gerrit
diff --git a/jjb/doctor/doctor-env-presetup.sh b/jjb/doctor/doctor-env-presetup.sh
new file mode 100755 (executable)
index 0000000..61e65c7
--- /dev/null
@@ -0,0 +1,62 @@
+#!/usr/bin/env bash
+set -o errexit
+set -o pipefail
+
+# set vars from env if not provided by user as options
+installer_key_file=${installer_key_file:-$HOME/installer_key_file}
+opnfv_installer=${opnfv_installer:-$HOME/opnfv-installer.sh}
+
+# Fetch INSTALLER_IP for APEX deployments
+if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
+
+    echo "Gathering IP information for Apex installer VM"
+    ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+    if sudo virsh list | grep undercloud; then
+        echo "Installer VM detected"
+        undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
+                  grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
+        export INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
+        echo "Installer ip is ${INSTALLER_IP}"
+    else
+        echo "No available installer VM exists and no credentials provided...exiting"
+        exit 1
+    fi
+
+    sudo cp /root/.ssh/id_rsa ${installer_key_file}
+    sudo chown `whoami`:`whoami` ${installer_key_file}
+
+elif [[ ${INSTALLER_TYPE} == 'daisy' ]]; then
+    echo "Gathering IP information for Daisy installer VM"
+    if sudo virsh list | grep daisy; then
+        echo "Installer VM detected"
+
+        bridge_name=$(sudo virsh domiflist daisy | grep vnet | awk '{print $3}')
+        echo "Bridge is $bridge_name"
+
+        installer_mac=$(sudo virsh domiflist daisy | grep vnet | \
+                      grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
+        export INSTALLER_IP=$(/usr/sbin/arp -e -i $bridge_name | grep ${installer_mac} | head -n 1 | awk {'print $1'})
+
+        echo "Installer ip is ${INSTALLER_IP}"
+    else
+        echo "No available installer VM exists...exiting"
+        exit 1
+    fi
+fi
+
+
+# Checking if destination path is valid
+if [ -d $opnfv_installer ]; then
+    error "Please provide the full destination path for the installer ip file including the filename"
+else
+    # Check if we can create the file (e.g. path is correct)
+    touch $opnfv_installer || error "Cannot create the file specified. Check that the path is correct and run the script again."
+fi
+
+
+# Write the installer info to the file
+echo export INSTALLER_TYPE=${INSTALLER_TYPE} > $opnfv_installer
+echo export INSTALLER_IP=${INSTALLER_IP} >> $opnfv_installer
+if [ -e ${installer_key_file} ]; then
+    echo export SSH_KEY=${installer_key_file} >> $opnfv_installer
+fi
diff --git a/jjb/doctor/doctor-rtd-jobs.yaml b/jjb/doctor/doctor-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..13ff860
--- /dev/null
@@ -0,0 +1,21 @@
+---
+- project:
+    name: doctor-rtd
+    project: doctor
+    project-name: doctor
+
+    gerrit-skip-vote: true
+    project-pattern: 'doctor'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-doctor/47362/'
+    rtd-token: 'bf8640556a3ba3151e4e5602facc5ed982dd88c8'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 3985356..1ebd22d 100644 (file)
@@ -3,28 +3,27 @@
     name: doctor
 
     project: '{name}'
+    project-name: '{name}'
+    project-pattern: '{project}'
 
     stream:
       - master:
           branch: '{stream}'
           gs-pathname: ''
-          docker-tag: 'latest'
           disabled: false
-      - fraser: &fraser
+      - gambia: &gambia
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
+      - fraser:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
-          docker-tag: 'stable'
           disabled: false
 
-    # feature projects' tests are not triggered by functest
-    # doctor verify Pods need to deploy with these scenario
     installer:
-      - 'apex':
-          scenario: 'os-nosdn-kvm-ha'
-      - 'fuel':
-          scenario: 'os-nosdn-ovs-ha'
-      - 'daisy':
-          scenario: 'os-nosdn-ovs_dpdk-noha'
+      - 'apex'
+      - 'fuel'
+      - 'daisy'
 
     arch:
       - 'x86_64'
         arch: 'aarch64'
       - installer: 'daisy'
         arch: 'aarch64'
+      # disabling the following tests due to limitation of PoD owners
+      # these would beenabled again once the PoDs are ready
+      - installer: 'fuel'
+        arch: 'x86_64'
 
     jobs:
-      - 'doctor-verify-{inspector}-{stream}'
-      - 'doctor-verify-{installer}-{inspector}-{arch}-{stream}'
+      - 'doctor-verify-{stream}'
+      - 'doctor-verify-fault_management-{installer}-{inspector}-{arch}-{stream}'
+      - 'doctor-verify-maintenance-{installer}-{inspector}-{arch}-{stream}'
+      - 'doctor-verify-all-{installer}-{inspector}-{arch}-{stream}'
 
 - job-template:
-    name: 'doctor-verify-{inspector}-{stream}'
+    name: 'doctor-verify-{stream}'
     disabled: '{obj:disabled}'
     project-type: 'multijob'
     parameters:
          # in phase jobs
          echo "Triggering phase jobs!"
       - multijob:
-          name: 'doctor-verify'
+          name: 'doctor-verify-sample'
           execution-type: PARALLEL
           projects:
-            - name: 'doctor-verify-apex-{inspector}-x86_64-{stream}'
-              predefined-parameters: |
-                PROJECT=$PROJECT
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              kill-phase-on: FAILURE
-              git-revision: true
-            - name: 'doctor-verify-fuel-{inspector}-x86_64-{stream}'
+            - name: 'doctor-verify-fault_management-daisy-sample-x86_64-{stream}'
               predefined-parameters: |
                 PROJECT=$PROJECT
                 GERRIT_BRANCH=$GERRIT_BRANCH
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
               kill-phase-on: FAILURE
               git-revision: true
-            - name: 'doctor-verify-fuel-{inspector}-aarch64-{stream}'
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              kill-phase-on: FAILURE
-              git-revision: true
-            - name: 'doctor-verify-daisy-{inspector}-x86_64-{stream}'
+            - name: 'doctor-verify-sample-all'
+              execution-type: PARALLEL
+              projects:
+                - name: 'doctor-verify-all-apex-sample-x86_64-{stream}'
+                  predefined-parameters: |
+                    PROJECT=$PROJECT
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                  kill-phase-on: FAILURE
+                  git-revision: true
+      - multijob:
+          name: 'doctor-verify-congress'
+          execution-type: PARALLEL
+          projects:
+            - name: 'doctor-verify-fault_management-apex-congress-x86_64-{stream}'
               predefined-parameters: |
                 PROJECT=$PROJECT
                 GERRIT_BRANCH=$GERRIT_BRANCH
               git-revision: true
 
 - job-template:
-    name: 'doctor-verify-{installer}-{inspector}-{arch}-{stream}'
+    name: 'doctor-verify-fault_management-{installer}-{inspector}-{arch}-{stream}'
     disabled: '{obj:disabled}'
     node: 'doctor-{installer}-{arch}'
     wrappers:
       - '{installer}-defaults'
       - 'doctor-slave-parameter'
       - 'doctor-parameter':
-          docker-tag: '{docker-tag}'
-          scenario: '{scenario}'
-      - 'doctor-functest-parameter'
+          inspector: '{inspector}'
+          test_case: 'fault_management'
+    scm:
+      - git-scm-gerrit
+    builders:
+      - 'doctor-verify-installer-inspector-builders-macro'
+    publishers:
+      - 'doctor-verify-publishers-macro'
+
+- job-template:
+    name: 'doctor-verify-maintenance-{installer}-{inspector}-{arch}-{stream}'
+    disabled: '{obj:disabled}'
+    node: 'nokia-pod1'
+    wrappers:
+      - ssh-agent-wrapper
+      - build-timeout:
+          timeout: 40
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - '{installer}-defaults'
+      - 'doctor-slave-parameter'
+      - 'doctor-parameter':
+          inspector: '{inspector}'
+          test_case: 'maintenance'
+    scm:
+      - git-scm-gerrit
+    builders:
+      - 'doctor-verify-installer-inspector-builders-macro'
+    publishers:
+      - 'doctor-verify-publishers-macro'
+
+- job-template:
+    name: 'doctor-verify-all-{installer}-{inspector}-{arch}-{stream}'
+    disabled: '{obj:disabled}'
+    node: 'doctor-{installer}-{arch}'
+    wrappers:
+      - ssh-agent-wrapper
+      - build-timeout:
+          timeout: 50
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - '{installer}-defaults'
+      - 'doctor-slave-parameter'
+      - 'doctor-parameter':
+          inspector: '{inspector}'
+          test_case: 'all'
     scm:
       - git-scm-gerrit
     builders:
     name: 'doctor-parameter'
     parameters:
       - string:
-          name: OS_CREDS
-          default: /home/jenkins/openstack.creds
-          description: 'OpenStack credentials'
-      - string:
-          name: DOCKER_TAG
-          default: '{docker-tag}'
-          description: 'Tag to pull docker image'
+          name: INSPECTOR_TYPE
+          default: '{inspector}'
+          description: 'inspector component'
       - string:
-          name: CLEAN_DOCKER_IMAGES
-          default: 'false'
-          description: 'Remove downloaded docker images (opnfv/functest:*)'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: '{scenario}'
-          description: 'Scenario to deploy and test'
+          name: TEST_CASE
+          default: '{test_case}'
+          description: 'test case: all, fault_management or maintenance'
+
 
-- parameter:
-    name: 'doctor-functest-parameter'
-    parameters:
-      # functest-suite-parameter
-      - string:
-          name: FUNCTEST_MODE
-          default: 'testcase'
-      - string:
-          name: FUNCTEST_SUITE_NAME
-          default: 'doctor-notification'
-      - string:
-          name: TESTCASE_OPTIONS
-          # yamllint disable rule:line-length
-          default: '-e INSPECTOR_TYPE={inspector} -v $WORKSPACE:/home/opnfv/repos/doctor'
-          # yamllint enable rule:line-length
-          description: 'Addtional parameters specific to test case(s)'
-      # functest-parameter
-      - string:
-          name: GS_PATHNAME
-          default: '{gs-pathname}'
-          # yamllint disable rule:line-length
-          description: "Version directory where the opnfv documents will be stored in gs repository"
-          # yamllint enable rule:line-length
-      - string:
-          name: FUNCTEST_REPO_DIR
-          default: "/home/opnfv/repos/functest"
-          description: "Directory where the Functest repository is cloned"
-      - string:
-          name: PUSH_RESULTS_TO_DB
-          default: "true"
-          description: "Push the results of all the tests to the resultDB"
-      - string:
-          name: CI_DEBUG
-          default: 'true'
-          description: "Show debug output information"
 # -------------------------------
 # builder macros
 # -------------------------------
 - builder:
     name: 'doctor-verify-installer-inspector-builders-macro'
     builders:
-      - 'clean-workspace-log'
-      # yamllint disable rule:line-length
-      - shell: |
-          # NOTE: Create symbolic link, so that we can archive file outside
-          #       of $WORKSPACE .
-          # NOTE: We are printing all logs under 'tests/' during test run,
-          #       so this symbolic link should not be in 'tests/'. Otherwise,
-          #       we'll have the same log twice in jenkins console log.
-          ln -sfn $HOME/opnfv/functest/results/{stream} functest_results
-          # NOTE: Get functest script in $WORKSPACE. This functest script is
-          #       needed to perform VM image download in set-functest-env.sh
-          #       from E release cycle.
-          mkdir -p functest/ci
-          wget https://git.opnfv.org/functest/plain/functest/ci/download_images.sh -O functest/ci/download_images.sh
-      - 'functest-suite-builder'
+      # yamllint disable rule:indentation
+      - shell:
+          !include-raw:
+              - ./doctor-env-presetup.sh
+              - ../../utils/fetch_os_creds.sh
       - shell: |
-          functest_log="$HOME/opnfv/functest/results/{stream}/$FUNCTEST_SUITE_NAME.log"
-          # NOTE: checking the test result, as the previous job could return
-          #       0 regardless the result of doctor test scenario.
-          grep -e 'doctor test successfully' $functest_log || exit 1
-      # yamllint enable rule:line-length
+          # prepare the env for test
+          source $HOME/opnfv-openrc.sh
+          if [ -f $HOME/os_cacert ]; then
+              export OS_CACERT=$HOME/os_cacert
+          fi
+          source $HOME/opnfv-installer.sh
+
+          # run tox to trigger the test
+          # As Jenkins user, it has no permission to send ICMP package
+          sudo -E tox -e py34
+
 
 # -------------------------------
 # publisher macros
     publishers:
       - archive:
           artifacts: 'doctor_tests/*.log'
-      - archive:
-          artifacts: 'functest_results/$FUNCTEST_SUITE_NAME.log'
       - email-jenkins-admins-on-failure
-
+      - workspace-cleanup
 
 #####################################
 # trigger macros
diff --git a/jjb/domino/domino-rtd-jobs.yaml b/jjb/domino/domino-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..26f2547
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: domino-rtd
+    project: domino
+    project-name: domino
+
+    gerrit-skip-vote: true
+    project-pattern: 'domino'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-domino/47363/'
+    rtd-token: 'c89c7fc281d94a985df61b49be2b824f544ba6fd'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/dovetail-webportal/dovetail-webportal-project-jobs.yaml b/jjb/dovetail-webportal/dovetail-webportal-project-jobs.yaml
new file mode 100644 (file)
index 0000000..a579af6
--- /dev/null
@@ -0,0 +1,102 @@
+---
+###################################################
+# Non-ci jobs for Dovetail project
+# They will only be enabled on request by projects!
+###################################################
+- project:
+    name: dovetail-webportal-project-jobs
+
+    project: 'dovetail-webportal'
+
+    jobs:
+      - 'dovetail-webportal-verify-{stream}'
+      - 'dovetail-webportal-merge-{stream}'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+
+################################
+# job templates
+################################
+
+- job-template:
+    name: 'dovetail-webportal-verify-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+      - git-scm-gerrit
+
+    triggers:
+      - gerrit:
+          server-name: 'gerrit.opnfv.org'
+          trigger-on:
+            - patchset-created-event:
+                exclude-drafts: 'false'
+                exclude-trivial-rebase: 'false'
+                exclude-no-code-change: 'false'
+            - draft-published-event
+            - comment-added-contains-event:
+                comment-contains-value: 'recheck'
+            - comment-added-contains-event:
+                comment-contains-value: 'reverify'
+          projects:
+            - project-compare-type: 'ANT'
+              project-pattern: '{project}'
+              branches:
+                - branch-compare-type: 'ANT'
+                  branch-pattern: '**/{branch}'
+    builders:
+      - dovetail-webportal-unit-tests
+
+- job-template:
+    name: 'dovetail-webportal-merge-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+      - git-scm
+
+    triggers:
+      - gerrit:
+          server-name: 'gerrit.opnfv.org'
+          trigger-on:
+            - change-merged-event
+            - comment-added-contains-event:
+                comment-contains-value: 'remerge'
+          projects:
+            - project-compare-type: 'ANT'
+              project-pattern: '{project}'
+              branches:
+                - branch-compare-type: 'ANT'
+                  branch-pattern: '**/{branch}'
+
+    builders:
+      - dovetail-webportal-unit-tests
+
+################################
+# builders for dovetail project
+###############################
+- builder:
+    name: dovetail-webportal-unit-tests
+    builders:
+      - shell: |
+          #!/bin/bash
+          set -o errexit
+          set -o pipefail
+
+          tox
index a95617a..abbffb2 100644 (file)
       dovetail-branch: '{stream}'
       gs-pathname: ''
       docker-tag: 'latest'
+      disabled: true
     fraser: &fraser
       stream: fraser
       branch: 'stable/{stream}'
-      dovetail-branch: master
+      dovetail-branch: 'stable/{stream}'
+      gs-pathname: '/{stream}'
+      docker-tag: 'ovp-2.0.0'
+    gambia: &gambia
+      stream: gambia
+      branch: 'stable/{stream}'
+      dovetail-branch: 'master'
       gs-pathname: '/{stream}'
       docker-tag: 'latest'
 
           SUT: fuel
           auto-trigger-name: 'daily-trigger-disabled'
           <<: *fraser
+      - baremetal:
+          slave-label: fuel-baremetal
+          SUT: fuel
+          auto-trigger-name: 'daily-trigger-disabled'
+          <<: *gambia
+      - virtual:
+          slave-label: fuel-virtual
+          SUT: fuel
+          auto-trigger-name: 'daily-trigger-disabled'
+          <<: *gambia
       # compass CI PODs
       - baremetal:
           slave-label: compass-baremetal
           SUT: compass
           auto-trigger-name: 'daily-trigger-disabled'
           <<: *fraser
+      - baremetal:
+          slave-label: compass-baremetal
+          SUT: compass
+          auto-trigger-name: 'daily-trigger-disabled'
+          <<: *gambia
+      - virtual:
+          slave-label: compass-virtual
+          SUT: compass
+          auto-trigger-name: 'daily-trigger-disabled'
+          <<: *gambia
       # -------------------------------
       #    Installers not using labels
       #            CI PODs
           SUT: apex
           auto-trigger-name: 'daily-trigger-disabled'
           <<: *fraser
+      - virtual:
+          slave-label: apex-virtual-master
+          SUT: apex
+          auto-trigger-name: 'daily-trigger-disabled'
+          <<: *gambia
+      - baremetal:
+          slave-label: apex-baremetal-master
+          SUT: apex
+          auto-trigger-name: 'daily-trigger-disabled'
+          <<: *gambia
       # armband CI PODs
       - armband-baremetal:
           slave-label: armband-baremetal
       - 'default'
       - 'proposed_tests'
 
+    testarea:
+      - 'mandatory'
+      - 'optional'
+
     jobs:
       - 'dovetail-{SUT}-{pod}-{testsuite}-{stream}'
+      - 'dovetail-{SUT}-{pod}-{testsuite}-{testarea}-{stream}'
 
 ################################
 # job templates
           name: TESTSUITE
           default: '{testsuite}'
           description: "dovetail testsuite to run"
+      - string:
+          name: TESTAREA
+          default: 'all'
+          description: "dovetail testarea to run"
+      - string:
+          name: DOVETAIL_REPO_DIR
+          default: "/home/opnfv/dovetail"
+          description: "Directory where the dovetail repository is cloned"
+      - string:
+          name: SUT_BRANCH
+          default: '{branch}'
+          description: "SUT branch"
+
+    scm:
+      - git-scm
+
+    builders:
+      - description-setter:
+          description: "POD: $NODE_NAME"
+      - 'dovetail-cleanup'
+      - 'dovetail-run'
+
+    publishers:
+      - archive:
+          artifacts: 'results/**/*'
+          allow-empty: true
+          fingerprint: true
+      - email-jenkins-admins-on-failure
+
+- job-template:
+    name: 'dovetail-{SUT}-{pod}-{testsuite}-{testarea}-{stream}'
+
+    disabled: false
+
+    concurrent: true
+
+    properties:
+      - logrotate-default
+      - throttle:
+          enabled: true
+          max-per-node: 1
+          option: 'project'
+
+    wrappers:
+      - build-name:
+          name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+      - timeout:
+          timeout: 300
+          abort: true
+      - fix-workspace-permissions
+
+    triggers:
+      - '{auto-trigger-name}'
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{dovetail-branch}'
+      - '{SUT}-defaults'
+      - '{slave-label}-defaults'
+      - string:
+          name: DEPLOY_SCENARIO
+          default: 'os-nosdn-nofeature-ha'
+      - string:
+          name: DOCKER_TAG
+          default: '{docker-tag}'
+          description: 'Tag to pull dovetail docker image'
+      - string:
+          name: CI_DEBUG
+          default: 'true'
+          description: "Show debug output information"
+      - string:
+          name: TESTSUITE
+          default: '{testsuite}'
+          description: "dovetail testsuite to run"
+      - string:
+          name: TESTAREA
+          default: '{testarea}'
+          description: "dovetail testarea to run"
       - string:
           name: DOVETAIL_REPO_DIR
           default: "/home/opnfv/dovetail"
diff --git a/jjb/dovetail/dovetail-rtd-jobs.yaml b/jjb/dovetail/dovetail-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..482ff46
--- /dev/null
@@ -0,0 +1,12 @@
+---
+- project:
+    name: dovetail-rtd
+    project: dovetail
+    project-name: dovetail
+
+    project-pattern: 'dovetail'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-dovetail/47364/'
+    rtd-token: '47df9d7358b153666c2cf5cc139cd3a65d135688'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index df681dd..04b11fd 100755 (executable)
@@ -16,7 +16,7 @@ set -e
 DEPLOY_TYPE=baremetal
 [[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
 
-DOVETAIL_HOME=${WORKSPACE}/cvp
+DOVETAIL_HOME=${WORKSPACE}/ovp
 [ -d ${DOVETAIL_HOME} ] && sudo rm -rf ${DOVETAIL_HOME}
 
 mkdir -p ${DOVETAIL_HOME}
@@ -27,13 +27,44 @@ mkdir -p ${DOVETAIL_CONFIG}
 DOVETAIL_IMAGES=${DOVETAIL_HOME}/images
 mkdir -p ${DOVETAIL_IMAGES}
 
+OPENRC=${DOVETAIL_CONFIG}/env_config.sh
+CACERT=${DOVETAIL_CONFIG}/os_cacert
+POD=${DOVETAIL_CONFIG}/pod.yaml
+
 ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
 
 sshkey=""
-# The path of openrc.sh is defined in fetch_os_creds.sh
-OPENRC=${DOVETAIL_CONFIG}/env_config.sh
-CACERT=${DOVETAIL_CONFIG}/os_cacert
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
+
+check_file_exists() {
+    if [[ -f $1 ]]; then
+        echo 0
+    else
+        echo 1
+    fi
+}
+
+get_cred_file_with_scripts() {
+    echo "INFO: clone releng repo..."
+    releng_repo=${WORKSPACE}/releng
+    [ -d ${releng_repo} ] && sudo rm -rf ${releng_repo}
+    git clone https://gerrit.opnfv.org/gerrit/releng ${releng_repo} >/dev/null
+
+    echo "INFO: clone pharos repo..."
+    pharos_repo=${WORKSPACE}/pharos
+    [ -d ${pharos_repo} ] && sudo rm -rf ${pharos_repo}
+    git clone https://git.opnfv.org/pharos ${pharos_repo} >/dev/null
+
+    echo "INFO: SUT branch is $SUT_BRANCH"
+    echo "INFO: dovetail branch is $BRANCH"
+    BRANCH_BACKUP=$BRANCH
+    export BRANCH=$SUT_BRANCH
+    cmd="${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} -o ${CACERT} >${redirect}"
+    echo "INFO: cmd is ${cmd}"
+    ${cmd}
+    export BRANCH=$BRANCH_BACKUP
+}
+
+get_apex_cred_file() {
     instack_mac=$(sudo virsh domiflist undercloud | grep default | \
                   grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
     INSTALLER_IP=$(/usr/sbin/arp -e | grep ${instack_mac} | awk {'print $1'})
@@ -43,67 +74,93 @@ if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
         sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
         sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
     fi
-elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then
-    # If production lab then creds may be retrieved dynamically
-    # creds are on the jumphost, always in the same folder
-    sudo cp $LAB_CONFIG/admin-openrc $OPENRC
-    # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
-    # replace the default one by the customized one provided by jenkins config
-fi
+    get_cred_file_with_scripts
+}
 
-# Set iptables rule to allow forwarding return traffic for container
-if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
-    sudo iptables -I FORWARD -j RETURN
-fi
+get_compass_cred_file() {
+    get_cred_file_with_scripts
+}
 
-releng_repo=${WORKSPACE}/releng
-[ -d ${releng_repo} ] && sudo rm -rf ${releng_repo}
-git clone https://gerrit.opnfv.org/gerrit/releng ${releng_repo} >/dev/null
+get_fuel_cred_file() {
+    get_cred_file_with_scripts
+}
 
-if [[ ${INSTALLER_TYPE} != 'joid' ]]; then
-    echo "SUT branch is $SUT_BRANCH"
-    echo "dovetail branch is $BRANCH"
-    BRANCH_BACKUP=$BRANCH
-    export BRANCH=$SUT_BRANCH
-    ${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} -o ${CACERT} >${redirect}
-    export BRANCH=$BRANCH_BACKUP
-fi
+get_joid_cred_file() {
+    # If production lab then creds may be retrieved dynamically
+    # creds are on the jumphost, always in the same folder
+    sudo cp $LAB_CONFIG/admin-openrc $OPENRC
+}
 
-if [[ -f $OPENRC ]]; then
-    echo "INFO: openstack credentials path is $OPENRC"
-    if [[ ! "${SUT_BRANCH}" =~ "danube" && ${INSTALLER_TYPE} == "compass" ]]; then
-        if [[ -f ${CACERT} ]]; then
-            echo "INFO: ${INSTALLER_TYPE} openstack cacert file is ${CACERT}"
+change_cred_file_cacert_path() {
+    if [[ ${INSTALLER_TYPE} == "apex" ]]; then
+        echo "INFO: apex doesn't need to set OS_CACERT."
+        return 0
+    fi
+    exists=`check_file_exists ${CACERT}`
+    if [[ $exists == 0 ]]; then
+        echo "INFO: set ${INSTALLER_TYPE} openstack cacert file to be ${CACERT}"
+        if [[ ${INSTALLER_TYPE} == "compass" ]]; then
             echo "export OS_CACERT=${CACERT}" >> ${OPENRC}
-        else
-            echo "ERROR: Can't find ${INSTALLER_TYPE} openstack cacert file. Please check if it is existing."
-            sudo ls -al ${DOVETAIL_CONFIG}
-            exit 1
+        elif [[ ${INSTALLER_TYPE} == "fuel" ]]; then
+            sed -i "s#/etc/ssl/certs/mcp_os_cacert#${CACERT}#g" ${OPENRC}
         fi
+    else
+        echo "ERROR: cannot find file ${CACERT}. Please check if it exists."
+        sudo ls -al ${DOVETAIL_CONFIG}
+        exit 1
     fi
-    echo "export EXTERNAL_NETWORK=${EXTERNAL_NETWORK}" >> ${OPENRC}
-else
-    echo "ERROR: cannot find file $OPENRC. Please check if it is existing."
-    sudo ls -al ${DOVETAIL_CONFIG}
-    exit 1
-fi
+}
 
-if [[ ! "${SUT_BRANCH}" =~ "danube" && ${INSTALLER_TYPE} == "fuel" ]]; then
-    sed -i "s#/etc/ssl/certs/mcp_os_cacert#${CACERT}#g" ${OPENRC}
-fi
-cat $OPENRC
+change_cred_file_ext_net() {
+    exists=`check_file_exists ${OPENRC}`
+    if [[ $exists == 0 ]]; then
+        echo "export EXTERNAL_NETWORK=${EXTERNAL_NETWORK}" >> ${OPENRC}
+    else
+        echo "ERROR: cannot find file $OPENRC. Please check if it exists."
+        sudo ls -al ${DOVETAIL_CONFIG}
+        exit 1
+    fi
+}
+
+get_cred_file() {
+    if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
+        get_apex_cred_file
+    elif [[ ${INSTALLER_TYPE} == 'compass' ]]; then
+        get_compass_cred_file
+    elif [[ ${INSTALLER_TYPE} == 'fuel' ]]; then
+        get_fuel_cred_file
+    elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then
+        get_joid_cred_file
+    fi
 
-if [[ ! "${SUT_BRANCH}" =~ "danube" && ${INSTALLER_TYPE} == "compass" ]]; then
+    exists=`check_file_exists ${OPENRC}`
+    if [[ $exists == 0 ]]; then
+        echo "INFO: original openstack credentials file is:"
+        cat $OPENRC
+        echo "INFO: change cacert file path in credentials file"
+        change_cred_file_cacert_path
+        echo "INFO: set external network in credentials file"
+        change_cred_file_ext_net
+        echo "INFO: final openstack credentials file is:"
+        cat $OPENRC
+    else
+        echo "ERROR: cannot find file $OPENRC. Please check if it exists."
+        sudo ls -al ${DOVETAIL_CONFIG}
+        exit 1
+    fi
+}
+
+get_compass_pod_file() {
     compass_repo=${WORKSPACE}/compass4nfv/
+    echo "INFO: clone compass repo..."
     git clone https://github.com/opnfv/compass4nfv.git ${compass_repo} >/dev/null
-    sudo pip install shyaml
     scenario_file=${compass_repo}/deploy/conf/hardware_environment/$NODE_NAME/os-nosdn-nofeature-ha.yml
     ipmiIp=$(cat ${scenario_file} | shyaml get-value hosts.0.ipmiIp)
     ipmiPass=$(cat ${scenario_file} | shyaml get-value hosts.0.ipmiPass)
     ipmiUser=root
     jumpserver_ip=$(ifconfig | grep -A 5 docker0 | grep "inet addr" | cut -d ':' -f 2 | cut -d ' ' -f 1)
 
-    cat << EOF >${DOVETAIL_CONFIG}/pod.yaml
+    cat << EOF >${POD}
 nodes:
 - {ip: ${jumpserver_ip}, name: node0, password: root, role: Jumpserver, user: root}
 - {ip: 10.1.0.50, name: node1, password: root, role: controller, user: root,
@@ -114,24 +171,51 @@ nodes:
 - {ip: 10.1.0.54, name: node5, password: root, role: compute, user: root}
 
 EOF
-fi
 
-if [[ ! "${SUT_BRANCH}" =~ "danube" && ${INSTALLER_TYPE} == 'fuel' && ${DEPLOY_TYPE} == 'baremetal' ]]; then
+}
+
+get_fuel_baremetal_pod_file() {
     fuel_ctl_ssh_options="${ssh_options} -i ${SSH_KEY}"
     ssh_user="ubuntu"
     fuel_ctl_ip=$(ssh 2>/dev/null ${fuel_ctl_ssh_options} "${ssh_user}@${INSTALLER_IP}" \
-            "sudo salt --out yaml 'ctl*' pillar.get _param:openstack_control_address | \
-                awk '{print \$2; exit}'") &> /dev/null
-    cat << EOF >${DOVETAIL_CONFIG}/pod.yaml
+            "sudo salt 'cfg*' pillar.get _param:openstack_control_address --out text| \
+                cut -f2 -d' '")
+    fuel_cmp_ip=$(ssh 2>/dev/null ${fuel_ctl_ssh_options} "${ssh_user}@${INSTALLER_IP}" \
+            "sudo salt 'cmp001*' pillar.get _param:openstack_control_address --out text| \
+                cut -f2 -d' '")
+    fuel_dbs_ip=$(ssh 2>/dev/null ${fuel_ctl_ssh_options} "${ssh_user}@${INSTALLER_IP}" \
+            "sudo salt 'dbs01*' pillar.get _param:openstack_database_node01_address --out text| \
+                cut -f2 -d' '")
+    fuel_msg_ip=$(ssh 2>/dev/null ${fuel_ctl_ssh_options} "${ssh_user}@${INSTALLER_IP}" \
+            "sudo salt 'msg01*' pillar.get _param:openstack_message_queue_node01_address --out text| \
+                cut -f2 -d' '")
+    ipmi_index=$(ssh 2>/dev/null ${fuel_ctl_ssh_options} "${ssh_user}@${INSTALLER_IP}" \
+            "sudo salt 'ctl*' network.ip_addrs cidr=${fuel_ctl_ip} --out text | grep ${fuel_ctl_ip} | cut -c 5")
+
+    organization="$(cut -d'-' -f1 <<< "${NODE_NAME}")"
+    pod_name="$(cut -d'-' -f2 <<< "${NODE_NAME}")"
+    pdf_file=${pharos_repo}/labs/${organization}/${pod_name}.yaml
+    ipmiIp=$(cat ${pdf_file} | shyaml get-value nodes.$[ipmi_index-1].remote_management.address)
+    ipmiIp="$(cut -d'/' -f1 <<< "${ipmiIp}")"
+    ipmiPass=$(cat ${pdf_file} | shyaml get-value nodes.$[ipmi_index-1].remote_management.pass)
+    ipmiUser=$(cat ${pdf_file} | shyaml get-value nodes.$[ipmi_index-1].remote_management.user)
+    [[ $ipmiUser == ENC* ]] && ipmiUser=$(eyaml decrypt -s ${ipmiUser//[[:blank:]]/})
+    [[ $ipmiPass == ENC* ]] && ipmiPass=$(eyaml decrypt -s ${ipmiPass//[[:blank:]]/})
+
+    cat << EOF >${POD}
 nodes:
-- {ip: ${fuel_ctl_ip}, name: node1, key_filename: /home/opnfv/userconfig/pre_config/id_rsa, role: controller, user: ${ssh_user}}
-
+- {ip: ${INSTALLER_IP}, name: node0, key_filename: /home/opnfv/userconfig/pre_config/id_rsa,
+   role: Jumpserver, user: ${ssh_user}}
+- {ip: ${fuel_ctl_ip}, name: node1, key_filename: /home/opnfv/userconfig/pre_config/id_rsa,
+   role: controller, user: ${ssh_user}, ipmi_ip: ${ipmiIp}, ipmi_user: ${ipmiUser}, ipmi_password: ${ipmiPass}}
+- {ip: ${fuel_msg_ip}, name: msg01, key_filename: /home/opnfv/userconfig/pre_config/id_rsa, role: controller, user: ${ssh_user}}
+- {ip: ${fuel_cmp_ip}, name: cmp01, key_filename: /home/opnfv/userconfig/pre_config/id_rsa, role: controller, user: ${ssh_user}}
+- {ip: ${fuel_dbs_ip}, name: dbs01, key_filename: /home/opnfv/userconfig/pre_config/id_rsa, role: controller, user: ${ssh_user}}
 EOF
-fi
+}
 
-if [[ ! -f ${DOVETAIL_CONFIG}/pod.yaml ]]; then
+get_pod_file_with_scripts() {
     set +e
-
     sudo pip install virtualenv
 
     cd ${releng_repo}/modules
@@ -143,45 +227,95 @@ if [[ ! -f ${DOVETAIL_CONFIG}/pod.yaml ]]; then
     if [[ ${INSTALLER_TYPE} == compass ]]; then
         options="-u root -p root"
     elif [[ ${INSTALLER_TYPE} == fuel ]]; then
-        options="-u root -p r00tme"
+        options="-u ubuntu -k /root/.ssh/id_rsa"
     elif [[ ${INSTALLER_TYPE} == apex ]]; then
         options="-u stack -k /root/.ssh/id_rsa"
     elif [[ ${INSTALLER_TYPE} == daisy ]]; then
         options="-u root -p r00tme"
     else
-        echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently."
-        echo "HA test cases may not run properly."
+        echo "WARNING: Don't support to generate ${POD} on ${INSTALLER_TYPE} currently."
+        echo "WARNING: HA test cases may not run properly."
     fi
 
     cmd="sudo python ${releng_repo}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \
-         -i ${INSTALLER_IP} ${options} -f ${DOVETAIL_CONFIG}/pod.yaml \
+         -i ${INSTALLER_IP} ${options} -f ${POD} \
          -s /home/opnfv/userconfig/pre_config/id_rsa"
-    echo ${cmd}
+    echo "INFO: cmd is ${cmd}"
     ${cmd}
 
     deactivate
-
     set -e
-
     cd ${WORKSPACE}
-fi
+}
 
-if [ -f ${DOVETAIL_CONFIG}/pod.yaml ]; then
-    sudo chmod 666 ${DOVETAIL_CONFIG}/pod.yaml
-    echo "Adapt process info for $INSTALLER_TYPE ..."
-    attack_process='rabbitmq'
-    cat << EOF >> ${DOVETAIL_CONFIG}/pod.yaml
+change_apex_pod_file_process_info() {
+    cat << EOF >> ${POD}
 process_info:
-- {testcase_name: dovetail.ha.tc010, attack_process: ${attack_process}}
+- {testcase_name: yardstick.ha.rabbitmq, attack_process: rabbitmq_server}
+- {testcase_name: yardstick.ha.cinder_api, attack_process: cinder_wsgi}
+EOF
+}
 
+change_fuel_pod_file_process_info() {
+    cat << EOF >> ${POD}
+process_info:
+- {testcase_name: yardstick.ha.cinder_api, attack_process: cinder-wsgi}
+- {testcase_name: yardstick.ha.rabbitmq, attack_process: rabbitmq_server, attack_host: msg01}
+- {testcase_name: yardstick.ha.neutron_l3_agent, attack_process: neutron-l3-agent, attack_host: cmp01}
+- {testcase_name: yardstick.ha.database, attack_process: mysqld, attack_host: dbs01}
 EOF
-    echo "file ${DOVETAIL_CONFIG}/pod.yaml:"
-    cat ${DOVETAIL_CONFIG}/pod.yaml
-else
-    echo "Error: cannot find file ${DOVETAIL_CONFIG}/pod.yaml. Please check if it is existing."
-    sudo ls -al ${DOVETAIL_CONFIG}
-    echo "HA test cases may not run properly."
-fi
+}
+
+change_compass_pod_file_process_info() {
+    cat << EOF >> ${POD}
+process_info:
+- {testcase_name: yardstick.ha.rabbitmq, attack_process: rabbitmq}
+EOF
+}
+
+change_pod_file_process_info() {
+    sudo chmod 666 ${POD}
+    echo "INFO: adapt process info for $INSTALLER_TYPE ..."
+    if [ "$INSTALLER_TYPE" == "apex" ]; then
+        change_apex_pod_file_process_info
+    elif [ "$INSTALLER_TYPE" == "fuel" ]; then
+        change_fuel_pod_file_process_info
+    elif [ "$INSTALLER_TYPE" == "compass" ]; then
+        change_compass_pod_file_process_info
+    fi
+}
+
+get_pod_file() {
+    # These packages are used for parsing yaml files and decrypting ipmi user and password.
+    sudo pip install shyaml
+    sudo yum install -y rubygems || sudo apt-get install -y ruby
+    sudo gem install hiera-eyaml
+    if [[ ${INSTALLER_TYPE} == 'compass' ]]; then
+        get_compass_pod_file
+    elif [[ ${INSTALLER_TYPE} == 'fuel' && ${DEPLOY_TYPE} == 'baremetal' ]]; then
+        get_fuel_baremetal_pod_file
+    fi
+
+    exists=`check_file_exists ${POD}`
+    if [[ $exists == 1 ]]; then
+        get_pod_file_with_scripts
+    fi
+
+    exists=`check_file_exists ${POD}`
+    if [[ $exists == 0 ]]; then
+        change_pod_file_process_info
+    else
+        echo "ERROR: cannot find file ${POD}. Please check if it exists."
+        sudo ls -al ${DOVETAIL_CONFIG}
+        exit 1
+    fi
+
+    echo "INFO: file ${POD} is:"
+    cat ${POD}
+}
+
+get_cred_file
+get_pod_file
 
 if [ "$INSTALLER_TYPE" == "fuel" ]; then
     if [[ "${SUT_BRANCH}" =~ "danube" ]]; then
@@ -211,7 +345,7 @@ fi
 ubuntu_image=${image_path}/ubuntu-16.04-server-cloudimg-amd64-disk1.img
 if [[ ! -f ${ubuntu_image} ]]; then
     echo "Download image ubuntu-16.04-server-cloudimg-amd64-disk1.img ..."
-    wget -q -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${image_path}
+    wget -q -nc https://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${image_path}
 fi
 sudo cp ${ubuntu_image} ${DOVETAIL_IMAGES}
 
@@ -276,7 +410,7 @@ sleep 5
 container_id=$(docker ps | grep "${DOCKER_REPO}:${DOCKER_TAG}" | awk '{print $1}' | head -1)
 echo "Container ID=${container_id}"
 if [ -z ${container_id} ]; then
-    echo "Cannot find ${DOCKER_REPO} container ID ${container_id}. Please check if it is existing."
+    echo "Cannot find ${DOCKER_REPO} container ID ${container_id}. Please check if it exists."
     docker ps -a
     exit 1
 fi
@@ -318,11 +452,28 @@ else
     testsuite="--testsuite ${TESTSUITE}"
 fi
 
-run_cmd="dovetail run ${testsuite} -d"
+if [[ ${TESTAREA} == 'mandatory' ]]; then
+    testarea='--mandatory'
+elif [[ ${TESTAREA} == 'optional' ]]; then
+    testarea="--optional"
+elif [[ ${TESTAREA} == 'all' ]]; then
+    testarea=""
+else
+    testarea="--testarea ${TESTAREA}"
+fi
+
+run_cmd="dovetail run ${testsuite} ${testarea} --deploy-scenario ${DEPLOY_SCENARIO} -d -r"
 echo "Container exec command: ${run_cmd}"
 docker exec $container_id ${run_cmd}
 
 sudo cp -r ${DOVETAIL_HOME}/results ./
+result_package=$(find ${DOVETAIL_HOME} -name 'logs_*')
+echo "Results package is ${result_package}"
+for item in ${result_package};
+do
+  sudo mv ${item} ./results
+done
+
 # To make sure the file owner is the current user, for the copied results files in the above line
 echo "Change owner of result files ..."
 CURRENT_USER=${SUDO_USER:-$USER}
@@ -333,7 +484,7 @@ sudo chown -R ${CURRENT_USER}:${PRIMARY_GROUP} ./results
 #remove useless files to save disk space
 sudo rm -rf ./results/workspace
 sudo rm -f ./results/yardstick.img
-sudo rm -f ./results/tmp*
+sudo rm -f ./results/bottlenecks/tmp*
 
 echo "Dovetail: done!"
 
diff --git a/jjb/edgecloud/edgecloud-rtd-jobs.yaml b/jjb/edgecloud/edgecloud-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..a3835f3
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: edgecloud-rtd
+    project: edgecloud
+    project-name: edgecloud
+
+    project-pattern: 'edgecloud'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-edgecloud/52895/'
+    rtd-token: '47989bec8e8da44ab2f33491cd6031f0411d319b'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/escalator/escalator-rtd-jobs.yaml b/jjb/escalator/escalator-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..762051c
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: escalator-rtd
+    project: escalator
+    project-name: escalator
+
+    gerrit-skip-vote: true
+    project-pattern: 'escalator'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-escalator/47365/'
+    rtd-token: 'cb75c70dab564cb40e6d9e8f2a99ced96f5b79b1'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/fds/fds-rtd-jobs.yaml b/jjb/fds/fds-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..9bf47b9
--- /dev/null
@@ -0,0 +1,12 @@
+---
+- project:
+    name: fds-rtd
+    project: fds
+    project-name: fds
+
+    project-pattern: 'fds'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-fds/47367/'
+    rtd-token: '756989c50a7c8f3350c4943f3d39a73762a4cd85'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 7110dae..fb619c3 100644 (file)
@@ -21,8 +21,8 @@
       branch: 'stable/{stream}'
       disabled: false
       gs-pathname: '/{stream}'
-    danube: &danube
-      stream: danube
+    gambia: &gambia
+      stream: gambia
       branch: 'stable/{stream}'
       disabled: false
       gs-pathname: '/{stream}'
           <<: *master
       - baremetal:
           slave-label: fuel-baremetal
-          <<: *fraser
+          <<: *gambia
       - virtual:
           slave-label: fuel-virtual
+          <<: *gambia
+      - baremetal:
+          slave-label: fuel-baremetal
           <<: *fraser
       # -------------------------------
       #        None-CI PODs
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
       - 'os-odl-nofeature-ha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-      - 'os-onos-sfc-ha':
-          auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-      - 'os-onos-nofeature-ha':
-          auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
       - 'os-ovn-nofeature-ha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-      - 'os-nosdn-kvm-ha':
-          auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
       - 'os-nosdn-ovs-ha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-      - 'os-nosdn-kvm_ovs-ha':
-          auto-trigger-name: 'daily-trigger-disabled'
-      - 'os-nosdn-kvm_ovs_dpdk-ha':
-          auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-      - 'os-nosdn-kvm_ovs_dpdk_bar-ha':
+      - 'os-nosdn-vpp-ha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
       # NOHA scenarios
       - 'os-nosdn-nofeature-noha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
       - 'os-odl-nofeature-noha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-      - 'os-onos-sfc-noha':
+      - 'os-odl-sfc-noha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-      - 'os-onos-nofeature-noha':
+      - 'os-odl-bgpvpn-noha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
       - 'os-ovn-nofeature-noha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-      - 'os-nosdn-kvm-noha':
+      - 'os-odl-ovs-noha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
       - 'os-nosdn-ovs-noha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-      - 'os-nosdn-kvm_ovs_dpdk-noha':
-          auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-      - 'os-nosdn-kvm_ovs_dpdk_bar-noha':
-          auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-      # danube scenario for Dovetail only
-      - 'os-odl_l2-bgpvpn-ha':
+      - 'os-nosdn-vpp-noha':
           auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
 
     exclude:
-      - scenario: os-odl_l2-bgpvpn-ha
-        stream: master
-      - scenario: os-odl_l2-bgpvpn-ha
+      # Dovetail (and only it) should run against Fraser HA baremetal scenarios
+      - scenario: os-nosdn-nofeature-noha
+        stream: fraser
+      - scenario: os-odl-nofeature-noha
+        stream: fraser
+      - scenario: os-odl-sfc-noha
+        stream: fraser
+      - scenario: os-odl-bgpvpn-noha
+        stream: fraser
+      - scenario: os-odl-ovs-noha
         stream: fraser
       - scenario: os-ovn-nofeature-noha
-        stream: danube
+        stream: fraser
+      - scenario: os-nosdn-ovs-noha
+        stream: fraser
+      - scenario: os-nosdn-vpp-noha
+        stream: fraser
       - scenario: os-ovn-nofeature-ha
-        stream: danube
+        stream: fraser
+      - scenario: os-nosdn-vpp-ha
+        stream: fraser
 
     jobs:
       - 'fuel-{scenario}-{pod}-daily-{stream}'
           use-build-blocker: true
           blocking-jobs:
             - 'fuel-os-.*?-{pod}-daily-.*'
-            - 'fuel-os-.*?-{pod}-weekly-.*'
             - 'fuel-verify-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - build-name:
               DEPLOY_SCENARIO={scenario}
             same-node: true
             block: true
-      - trigger-builds:
-          - project: 'functest-fuel-{pod}-daily-{stream}'
-            current-parameters: false
-            predefined-parameters:
-              DEPLOY_SCENARIO={scenario}
-            same-node: true
-            block: true
-            block-thresholds:
-              build-step-failure-threshold: 'never'
-              failure-threshold: 'never'
-              unstable-threshold: 'FAILURE'
-      - trigger-builds:
-          - project: 'yardstick-fuel-{pod}-daily-{stream}'
-            current-parameters: false
-            predefined-parameters:
-              DEPLOY_SCENARIO={scenario}
-            block: true
-            same-node: true
-            block-thresholds:
-              build-step-failure-threshold: 'never'
-              failure-threshold: 'never'
-              unstable-threshold: 'FAILURE'
-      # 1.here the stream means the SUT stream, dovetail stream is defined in its own job
-      # 2.testsuite proposed_tests here is for new test cases planning to add into OVP
-      # 3.run proposed_tests on Monday, Wednesday and Friday against ha scenario
-      # 4.testsuite default here is for the test cases already added into OVP
-      # 5.run default testsuite on Tuesday against ha scenario
-      # 6.not used for release criteria or compliance, only to debug the dovetail tool bugs
       - conditional-step:
-          condition-kind: and
-          condition-operands:
-            - condition-kind: regex-match
-              regex: '.*-ha'
-              label: '{scenario}'
-            - condition-kind: day-of-week
-              day-selector: select-days
-              days:
-                MON: true
-                WED: true
-                FRI: true
-              use-build-time: true
+          condition-kind: not
+          condition-operand:
+            condition-kind: regex-match
+            regex: 'fraser'
+            label: '{stream}'
+          steps:
+            - trigger-builds:
+                - project: 'functest-fuel-{pod}-daily-{stream}'
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO={scenario}
+                  same-node: true
+                  block: true
+                  block-thresholds:
+                    build-step-failure-threshold: 'never'
+                    failure-threshold: 'never'
+                    unstable-threshold: 'FAILURE'
+      - conditional-step:
+          condition-kind: not
+          condition-operand:
+            condition-kind: regex-match
+            regex: 'fraser'
+            label: '{stream}'
           steps:
             - trigger-builds:
-                - project: 'dovetail-fuel-{pod}-proposed_tests-{stream}'
+                - project: 'yardstick-fuel-{pod}-daily-{stream}'
                   current-parameters: false
                   predefined-parameters:
                     DEPLOY_SCENARIO={scenario}
                     build-step-failure-threshold: 'never'
                     failure-threshold: 'never'
                     unstable-threshold: 'FAILURE'
+      # 1.here the stream means the SUT stream, dovetail stream is defined in its own job
+      # 2.testsuite default here is for the test cases already added into OVP
+      # 3.run default testsuite mandatory test cases against ha scenario
+      # 4.run default testsuite optional test cases against ha scenario twice one week
+      # 5.not used for release criteria or compliance, only to debug the dovetail tool bugs
       - conditional-step:
           condition-kind: and
           condition-operands:
             - condition-kind: regex-match
               regex: '.*-ha'
               label: '{scenario}'
-            - condition-kind: day-of-week
-              day-selector: select-days
-              days:
-                TUES: true
-              use-build-time: true
           steps:
             - trigger-builds:
-                - project: 'dovetail-fuel-{pod}-default-{stream}'
+                - project: 'dovetail-fuel-{pod}-default-mandatory-{stream}'
                   current-parameters: false
                   predefined-parameters:
                     DEPLOY_SCENARIO={scenario}
                     failure-threshold: 'never'
                     unstable-threshold: 'FAILURE'
       - conditional-step:
-          condition-kind: not
-          condition-operand:
-            condition-kind: regex-match
-            regex: 'danube'
-            label: '{stream}'
+          condition-kind: and
+          condition-operands:
+            - condition-kind: regex-match
+              regex: '.*-ha'
+              label: '{scenario}'
+            - condition-kind: day-of-week
+              day-selector: select-days
+              days:
+                MON: true
+                WED: true
+              use-build-time: true
           steps:
             - trigger-builds:
-                - project: 'fuel-collect-logs-{pod}-daily-{stream}'
+                - project: 'dovetail-fuel-{pod}-default-optional-{stream}'
                   current-parameters: false
                   predefined-parameters:
                     DEPLOY_SCENARIO={scenario}
                     build-step-failure-threshold: 'never'
                     failure-threshold: 'never'
                     unstable-threshold: 'FAILURE'
+      - trigger-builds:
+          - project: 'fuel-collect-logs-{pod}-daily-{stream}'
+            current-parameters: false
+            predefined-parameters:
+              DEPLOY_SCENARIO={scenario}
+            block: true
+            same-node: true
+            block-thresholds:
+              build-step-failure-threshold: 'never'
+              failure-threshold: 'never'
+              unstable-threshold: 'FAILURE'
 
     publishers:
-      - email:
-          recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
+      - email-fuel-ptl
       - email-jenkins-admins-on-failure
 
 - job-template:
           use-build-blocker: true
           blocking-jobs:
             - 'fuel-deploy-{pod}-daily-.*'
-            - 'fuel-deploy-generic-daily-.*'
-            - 'fuel-deploy-{pod}-weekly-.*'
-            - 'fuel-deploy-generic-weekly-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     parameters:
       - project-parameter:
       - description-setter:
           description: "Built on $NODE_NAME"
       - track-begin-timestamp
-      - shell:
-          !include-raw-escape: ./fuel-download-artifact.sh
       - shell:
           !include-raw-escape: ./fuel-deploy.sh
 
     publishers:
-      - email:
-          recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
+      - email-fuel-ptl
       - email-jenkins-admins-on-failure
       - report-provision-result
 
     name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-master-trigger'
     triggers:
       - timed: '5 5 * * *'
-- trigger:
-    name: 'fuel-os-onos-sfc-ha-baremetal-daily-master-trigger'
-    triggers:
-      - timed: ''  # '5 5 * * *'
-- trigger:
-    name: 'fuel-os-onos-nofeature-ha-baremetal-daily-master-trigger'
-    triggers:
-      - timed: ''  # '5 8 * * *'
 - trigger:
     name: 'fuel-os-ovn-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-master-trigger'
+    name: 'fuel-os-nosdn-vpp-ha-baremetal-daily-master-trigger'
     triggers:
-      - timed: ''  # '5 17 * * *'
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-master-trigger'
-    triggers:
-      - timed: ''  # '30 12 * * *'
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-master-trigger'
-    triggers:
-      - timed: ''  # '30 8 * * *'
+      - timed: ''
+
 # NOHA Scenarios
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-onos-sfc-noha-baremetal-daily-master-trigger'
+    name: 'fuel-os-odl-sfc-noha-baremetal-daily-master-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-onos-nofeature-noha-baremetal-daily-master-trigger'
+    name: 'fuel-os-odl-bgpvpn-noha-baremetal-daily-master-trigger'
     triggers:
       - timed: ''
 - trigger:
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-master-trigger'
+    name: 'fuel-os-odl-ovs-noha-baremetal-daily-master-trigger'
     triggers:
       - timed: ''
 - trigger:
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-master-trigger'
+    name: 'fuel-os-nosdn-vpp-noha-baremetal-daily-master-trigger'
     triggers:
       - timed: ''
 # ----------------------------------------------
-# Triggers for job running on fuel-baremetal against fraser branch
+# Triggers for job running on fuel-baremetal against gambia branch
 # ----------------------------------------------
 # HA Scenarios
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-gambia-trigger'
     triggers:
-      - timed: ''  # '0 20 * * *'
+      - timed: '0 20 * * *'
 - trigger:
-    name: 'fuel-os-odl-nofeature-ha-baremetal-daily-fraser-trigger'
+    name: 'fuel-os-odl-nofeature-ha-baremetal-daily-gambia-trigger'
     triggers:
-      - timed: ''  # '0 2 * * *'
+      - timed: '0 2 * * *'
 - trigger:
-    name: 'fuel-os-onos-sfc-ha-baremetal-daily-fraser-trigger'
-    triggers:
-      - timed: ''  # '0 5 * * *'
-- trigger:
-    name: 'fuel-os-ovn-nofeature-ha-baremetal-daily-fraser-trigger'
+    name: 'fuel-os-ovn-nofeature-ha-baremetal-daily-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-onos-nofeature-ha-baremetal-daily-fraser-trigger'
-    triggers:
-      - timed: ''  # '0 8 * * *'
-- trigger:
-    name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-fraser-trigger'
-    triggers:
-      - timed: ''  # '0 17 * * *'
-- trigger:
-    name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-fraser-trigger'
-    triggers:
-      - timed: ''  # '0 20 * * *'
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-gambia-trigger'
     triggers:
-      - timed: ''  # '0 12 * * *'
+      - timed: '0 5 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-fraser-trigger'
-    triggers:
-      - timed: ''  # '0 8 * * *'
-# NOHA Scenarios
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-vpp-ha-baremetal-daily-gambia-trigger'
     triggers:
       - timed: ''
+# NOHA Scenarios
 - trigger:
-    name: 'fuel-os-odl-nofeature-noha-baremetal-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-onos-sfc-noha-baremetal-daily-fraser-trigger'
+    name: 'fuel-os-odl-nofeature-noha-baremetal-daily-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-onos-nofeature-noha-baremetal-daily-fraser-trigger'
+    name: 'fuel-os-odl-sfc-noha-baremetal-daily-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-ovn-nofeature-noha-baremetal-daily-fraser-trigger'
+    name: 'fuel-os-odl-bgpvpn-noha-baremetal-daily-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-fraser-trigger'
+    name: 'fuel-os-ovn-nofeature-noha-baremetal-daily-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-fraser-trigger'
+    name: 'fuel-os-odl-ovs-noha-baremetal-daily-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-vpp-noha-baremetal-daily-gambia-trigger'
     triggers:
       - timed: ''
 # ----------------------------------------------
     name: 'fuel-os-odl-nofeature-ha-virtual-daily-master-trigger'
     triggers:
       - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-ha-virtual-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-ha-virtual-daily-master-trigger'
-    triggers:
-      - timed: ''
 - trigger:
     name: 'fuel-os-ovn-nofeature-ha-virtual-daily-master-trigger'
     triggers:
       - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-ha-virtual-daily-master-trigger'
-    triggers:
-      - timed: ''
 - trigger:
     name: 'fuel-os-nosdn-ovs-ha-virtual-daily-master-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-master-trigger'
+    name: 'fuel-os-nosdn-vpp-ha-virtual-daily-master-trigger'
     triggers:
       - timed: ''
 # NOHA Scenarios
     triggers:
       - timed: '5 18 * * *'
 - trigger:
-    name: 'fuel-os-onos-sfc-noha-virtual-daily-master-trigger'
+    name: 'fuel-os-odl-sfc-noha-virtual-daily-master-trigger'
     triggers:
-      - timed: ''  # '35 20 * * *'
+      - timed: '5 4 * * *'
 - trigger:
-    name: 'fuel-os-onos-nofeature-noha-virtual-daily-master-trigger'
+    name: 'fuel-os-odl-bgpvpn-noha-virtual-daily-master-trigger'
     triggers:
-      - timed: ''  # '5 23 * * *'
+      - timed: '5 6 * * *'
 - trigger:
     name: 'fuel-os-ovn-nofeature-noha-virtual-daily-master-trigger'
     triggers:
       - timed: '5 23 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-kvm-noha-virtual-daily-master-trigger'
+    name: 'fuel-os-odl-ovs-noha-virtual-daily-master-trigger'
     triggers:
-      - timed: ''  # '35 6 * * *'
+      - timed: '5 2 * * *'
 - trigger:
     name: 'fuel-os-nosdn-ovs-noha-virtual-daily-master-trigger'
     triggers:
       - timed: '5 9 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-master-trigger'
-    triggers:
-      - timed: ''  # '30 16 * * *'
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-master-trigger'
+    name: 'fuel-os-nosdn-vpp-noha-virtual-daily-master-trigger'
     triggers:
-      - timed: ''  # '30 20 * * *'
+      - timed: ''
 # ----------------------------------------------
-# Triggers for job running on fuel-virtual against fraser branch
+# Triggers for job running on fuel-baremetal against fraser branch (for Dovetail)
 # ----------------------------------------------
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-fraser-trigger'
     triggers:
-      - timed: ''
+      - timed: '5 13 * * 6,7'
 - trigger:
-    name: 'fuel-os-odl-nofeature-ha-virtual-daily-fraser-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-ha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-odl-nofeature-ha-baremetal-daily-fraser-trigger'
     triggers:
-      - timed: ''
+      - timed: '0 13 * * 6'
 - trigger:
-    name: 'fuel-os-onos-nofeature-ha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-fraser-trigger'
     triggers:
-      - timed: ''
+      - timed: '0 13 * * 7'
+# ----------------------------------------------
+# Triggers for job running on fuel-virtual against gambia branch
+# ----------------------------------------------
 - trigger:
-    name: 'fuel-os-ovn-nofeature-ha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm-ha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-odl-nofeature-ha-virtual-daily-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-ovs-ha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-ovn-nofeature-ha-virtual-daily-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-ovs-ha-virtual-daily-gambia-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-vpp-ha-virtual-daily-gambia-trigger'
     triggers:
       - timed: ''
 # NOHA Scenarios
 - trigger:
-    name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-fraser-trigger'
-    triggers:
-      - timed: ''  # '0 13 * * *'
-- trigger:
-    name: 'fuel-os-odl-nofeature-noha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-gambia-trigger'
     triggers:
-      - timed: ''  # '0 18 * * *'
+      - timed: '0 13 * * *'
 - trigger:
-    name: 'fuel-os-ovn-nofeature-noha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-odl-nofeature-noha-virtual-daily-gambia-trigger'
     triggers:
-      - timed: '5 23 * * *'
+      - timed: '0 18 * * *'
 - trigger:
-    name: 'fuel-os-onos-sfc-noha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-odl-sfc-noha-virtual-daily-gambia-trigger'
     triggers:
-      - timed: ''  # '30 20 * * *'
+      - timed: '0 4 * * *'
 - trigger:
-    name: 'fuel-os-onos-nofeature-noha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-odl-bgpvpn-noha-virtual-daily-gambia-trigger'
     triggers:
-      - timed: ''  # '0 23 * * *'
+      - timed: '0 7 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-kvm-noha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-ovn-nofeature-noha-virtual-daily-gambia-trigger'
     triggers:
-      - timed: ''  # '30 6 * * *'
+      - timed: '0 23 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-ovs-noha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-odl-ovs-noha-virtual-daily-gambia-trigger'
     triggers:
-      - timed: ''  # '0 9 * * *'
+      - timed: '0 2 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-ovs-noha-virtual-daily-gambia-trigger'
     triggers:
-      - timed: ''  # '0 16 * * *'
+      - timed: '0 9 * * *'
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-fraser-trigger'
+    name: 'fuel-os-nosdn-vpp-noha-virtual-daily-gambia-trigger'
     triggers:
-      - timed: ''  # '0 20 * * *'
+      - timed: ''
 # ----------------------------------------------
 # ZTE POD1 Triggers running against master branch
 # ----------------------------------------------
     name: 'fuel-os-odl-nofeature-ha-zte-pod1-daily-master-trigger'
     triggers:
       - timed: ''
-- trigger:
-    name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-master-trigger'
-    triggers:
-      - timed: ''
 - trigger:
     name: 'fuel-os-ovn-nofeature-ha-zte-pod1-daily-master-trigger'
     triggers:
       - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-master-trigger'
-    triggers:
-      - timed: ''
 - trigger:
     name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-master-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-master-trigger'
+    name: 'fuel-os-nosdn-vpp-ha-zte-pod1-daily-master-trigger'
     triggers:
       - timed: ''
 # NOHA Scenarios
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-onos-sfc-noha-zte-pod1-daily-master-trigger'
+    name: 'fuel-os-odl-sfc-noha-zte-pod1-daily-master-trigger'
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-master-trigger'
+    name: 'fuel-os-odl-bgpvpn-noha-zte-pod1-daily-master-trigger'
     triggers:
       - timed: ''
 - trigger:
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-master-trigger'
+    name: 'fuel-os-odl-ovs-noha-zte-pod1-daily-master-trigger'
     triggers:
       - timed: ''
 - trigger:
     triggers:
       - timed: ''
 - trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-master-trigger'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-master-trigger'
+    name: 'fuel-os-nosdn-vpp-noha-zte-pod1-daily-master-trigger'
     triggers:
       - timed: ''
index 35bf3dc..8bc5fa2 100755 (executable)
@@ -1,7 +1,7 @@
 #!/bin/bash
 # SPDX-license-identifier: Apache-2.0
 ##############################################################################
-# Copyright (c) 2017 Ericsson AB, Mirantis Inc., Enea Software AB and others.
+# Copyright (c) 2018 Ericsson AB, Mirantis Inc., Enea Software AB and others.
 # All rights reserved. This program and the accompanying materials
 # are made available under the terms of the Apache License, Version 2.0
 # which accompanies this distribution, and is available at
@@ -12,83 +12,43 @@ set -o pipefail
 
 export TERM="vt220"
 
-if [[ "$BRANCH" =~ 'danube' ]]; then
-    # source the file so we get OPNFV vars
-    # shellcheck disable=SC1091
-    source latest.properties
-
-    # echo the info about artifact that is used during the deployment
-    echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
-
-    # for Danube deployments (no artifact for current master or newer branches)
-    # checkout the commit that was used for building the downloaded artifact
-    # to make sure the ISO and deployment mechanism uses same versions
-    echo "Checking out ${OPNFV_GIT_SHA1}"
-    git checkout "${OPNFV_GIT_SHA1}" --quiet
-
-    # releng wants us to use nothing else but opnfv.iso for now. We comply.
-    ISO_FILE_ARG="-i file://${WORKSPACE}/opnfv.iso"
-fi
-
-# shellcheck disable=SC2153
-if [[ "${JOB_NAME}" =~ 'verify' ]]; then
-    # set simplest scenario for virtual deploys to run for verify
-    DEPLOY_SCENARIO="os-nosdn-nofeature-noha"
-fi
-
 # set deployment parameters
 export TMPDIR=${HOME}/tmpdir
 # shellcheck disable=SC2153
 LAB_NAME=${NODE_NAME/-*}
 # shellcheck disable=SC2153
 POD_NAME=${NODE_NAME/*-}
-# Armband might override LAB_CONFIG_URL, all others use the default
-LAB_CONFIG_URL=${LAB_CONFIG_URL:-'ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab'}
-
-# Fuel requires deploy script to be ran with sudo, Armband does not
-SUDO='sudo -E'
-if [ "${PROJECT}" = 'fuel' ]; then
-    # Fuel currently supports ericsson, intel, lf and zte labs
-    if [[ ! "${LAB_NAME}" =~ (ericsson|intel|lf|zte) ]]; then
-        echo "Unsupported/unidentified lab ${LAB_NAME}. Cannot continue!"
-        exit 1
-    fi
-else
-    SUDO=
-    # Armband currently supports arm, enea labs
-    if [[ ! "${LAB_NAME}" =~ (arm|enea) ]]; then
-        echo "Unsupported/unidentified lab ${LAB_NAME}. Cannot continue!"
-        exit 1
-    fi
+
+# define Docker tag for stable branches
+[[ "${BRANCH}" != master ]] && export MCP_DOCKER_TAG=${BRANCH##*/}
+
+# Fuel currently supports arm, enea, ericsson, intel, lf, unh and zte labs
+if [[ ! "${LAB_NAME}" =~ (arm|enea|ericsson|intel|lf|unh|zte) ]]; then
+    echo "Unsupported/unidentified lab ${LAB_NAME}. Cannot continue!"
+    exit 1
 fi
 
 echo "Using configuration for ${LAB_NAME}"
 
 # create TMPDIR if it doesn't exist, change permissions
 mkdir -p "${TMPDIR}"
-chmod a+x "${HOME}" "${TMPDIR}"
+sudo chmod a+x "${HOME}" "${TMPDIR}"
 
 cd "${WORKSPACE}" || exit 1
-if [[ "$BRANCH" =~ (danube|euphrates) ]]; then
-    if [[ "${LAB_CONFIG_URL}" =~ ^(git|ssh):// ]]; then
-        echo "Cloning securedlab repo ${BRANCH}"
-        LOCAL_CFG="${TMPDIR}/securedlab"
-        rm -rf "${LOCAL_CFG}"
-        git clone --quiet --branch "${BRANCH}" "${LAB_CONFIG_URL}" "${LOCAL_CFG}"
-        LAB_CONFIG_ARG="-b file://${LOCAL_CFG}"
-        BRIDGE_ARG="-B ${BRIDGE:-pxebr}"
-    else
-        LAB_CONFIG_ARG="-b ${LAB_CONFIG_URL}"
-    fi
-fi
 
 # log file name
 FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
 
+# Limited scope for vPOD verify jobs running on armband-virtual
+[[ ! "${JOB_NAME}" =~ verify-deploy-virtual-arm64 ]] || EXTRA_ARGS='-e'
+
+# turn on DEBUG mode
+[[ ${CI_DEBUG,,} == true ]] && EXTRA_ARGS="-D ${EXTRA_ARGS:-}"
+
 # construct the command
-DEPLOY_COMMAND="${SUDO} ${WORKSPACE}/ci/deploy.sh ${LAB_CONFIG_ARG:-} \
-    -l ${LAB_NAME} -p ${POD_NAME} -s ${DEPLOY_SCENARIO} ${ISO_FILE_ARG:-} \
-    -S ${TMPDIR} ${BRIDGE_ARG:-} \
+DEPLOY_COMMAND="${WORKSPACE}/ci/deploy.sh \
+    -l ${LAB_NAME} -p ${POD_NAME} -s ${DEPLOY_SCENARIO} \
+    -S ${TMPDIR} ${EXTRA_ARGS:-} \
     -L ${WORKSPACE}/${FUEL_LOG_FILENAME}"
 
 # log info to console
@@ -97,7 +57,6 @@ echo "--------------------------------------------------------"
 echo "Scenario: ${DEPLOY_SCENARIO}"
 echo "Lab: ${LAB_NAME}"
 echo "POD: ${POD_NAME}"
-[[ "${BRANCH}" =~ 'danube' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
 echo
 echo "Starting the deployment using ${INSTALLER_TYPE}. This could take some time..."
 echo "--------------------------------------------------------"
@@ -117,7 +76,7 @@ echo "Deployment is done!"
 
 # upload logs for baremetal deployments
 # work with virtual deployments is still going on, so skip that for now
-if [[ "${JOB_NAME}" =~ (baremetal-daily|baremetal-weekly) ]]; then
+if [[ "${JOB_NAME}" =~ baremetal-daily ]]; then
     echo "Uploading deployment logs"
     gsutil cp "${WORKSPACE}/${FUEL_LOG_FILENAME}" \
         "gs://${GS_URL}/logs/${FUEL_LOG_FILENAME}" > /dev/null 2>&1
diff --git a/jjb/fuel/fuel-docker-jobs.yaml b/jjb/fuel/fuel-docker-jobs.yaml
new file mode 100644 (file)
index 0000000..c06848e
--- /dev/null
@@ -0,0 +1,204 @@
+---
+##############################################
+# job configuration for docker build and push
+##############################################
+- project:
+
+    name: fuel-docker
+
+    project: fuel
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    arch_tag:
+      - 'amd64':
+          slave_label: 'opnfv-build-ubuntu'
+      - 'arm64':
+          slave_label: 'opnfv-build-ubuntu-arm'
+
+    # settings for jobs run in multijob phases
+    build-job-settings: &build-job-settings
+      current-parameters: false
+      git-revision: true
+      node-parameters: false
+      predefined-parameters: |
+        PUSH_IMAGE=$PUSH_IMAGE
+        COMMIT_ID=$COMMIT_ID
+        GERRIT_REFNAME=$GERRIT_REFNAME
+      kill-phase-on: FAILURE
+      abort-all-jobs: false
+
+    manifest-job-settings: &manifest-job-settings
+      current-parameters: false
+      git-revision: true
+      node-parameters: false
+      predefined-parameters:
+        GERRIT_REFNAME=$GERRIT_REFNAME
+      kill-phase-on: FAILURE
+      abort-all-jobs: false
+
+    jobs:
+      - "fuel-docker-{stream}"
+      - "fuel-docker-build-{arch_tag}-{stream}"
+      - "fuel-docker-manifest-{stream}"
+
+########################
+# job templates
+########################
+- job-template:
+    name: 'fuel-docker-{stream}'
+
+    project-type: multijob
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+      - fuel-job-parameters:
+          project: '{project}'
+          branch: '{branch}'
+          slave_label: 'opnfv-build-ubuntu'
+          arch_tag: 'amd64'
+
+    properties:
+      - throttle:
+          max-per-node: 1
+          option: 'project'
+
+    scm:
+      - git-scm
+
+    triggers:
+      - pollscm:
+          cron: "*/25 * * * *"
+      - gerrit-trigger-tag-created:
+          project: '{project}'
+
+    builders:
+      - multijob:
+          name: 'build fuel images'
+          execution-type: PARALLEL
+          projects:
+            - name: 'fuel-docker-build-amd64-{stream}'
+              <<: *build-job-settings
+            - name: 'fuel-docker-build-arm64-{stream}'
+              <<: *build-job-settings
+      - multijob:
+          name: 'publish fuel manifests'
+          condition: SUCCESSFUL
+          execution-type: PARALLEL
+          projects:
+            - name: 'fuel-docker-manifest-{stream}'
+              <<: *manifest-job-settings
+
+    publishers:
+      - email-fuel-ptl
+
+- job-template:
+    name: 'fuel-docker-build-{arch_tag}-{stream}'
+    disabled: '{obj:disabled}'
+    parameters:
+      - fuel-job-parameters:
+          project: '{project}'
+          branch: '{branch}'
+          slave_label: '{slave_label}'
+          arch_tag: '{arch_tag}'
+    scm:
+      - git-scm
+    builders:
+      - shell: |
+          #!/bin/bash -ex
+          case "{stream}" in
+          "master")
+              tag="latest" ;;
+          *)
+              tag="{stream}" ;;
+          esac
+          ./ci/build.sh $tag
+
+- job-template:
+    name: 'fuel-docker-manifest-{stream}'
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - label:
+          name: SLAVE_LABEL
+          default: 'opnfv-build-ubuntu'
+          description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
+      - string:
+          name: PROJECT
+          default: "{project}"
+          description: "Project name used to enable job conditions"
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+      - string:
+          name: REPO
+          default: "opnfv"
+          description: "Repository name for fuel-docker images"
+
+
+    disabled: '{obj:disabled}'
+
+    builders:
+      - shell: |
+          #!/bin/bash -ex
+          case "{stream}" in
+          "master")
+              tag="latest" ;;
+          *)
+              tag="{stream}" ;;
+          esac
+          sudo manifest-tool push from-args \
+              --platforms linux/amd64,linux/arm64 \
+              --template $REPO/fuel:saltmaster-reclass-ARCH-$tag \
+              --target $REPO/fuel:saltmaster-reclass-$tag
+          exit $?
+
+# parameter macro
+- parameter:
+    name: fuel-job-parameters
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - label:
+          name: SLAVE_LABEL
+          default: '{slave_label}'
+          description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+      - string:
+          name: PUSH_IMAGE
+          default: "true"
+          description: "To enable/disable pushing the image to Dockerhub."
+      - string:
+          name: COMMIT_ID
+          default: ""
+          description: "commit id to make a snapshot docker image"
+      - string:
+          name: GERRIT_REFNAME
+          default: ""
+          description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
+      - string:
+          name: PROJECT
+          default: "{project}"
+          description: "Project name used to enable job conditions"
+      - string:
+          name: REPO
+          default: "opnfv"
+          description: "Repository name for fuel-docker images"
diff --git a/jjb/fuel/fuel-download-artifact.sh b/jjb/fuel/fuel-download-artifact.sh
deleted file mode 100755 (executable)
index 02ca103..0000000
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o pipefail
-
-# disable Fuel ISO download for master branch
-[[ ! "$BRANCH" =~ (danube) ]] && exit 0
-
-# use proxy url to replace the normal URL, or googleusercontent.com will be blocked randomly
-[[ "$NODE_NAME" =~ (zte) ]] && GS_URL=${GS_BASE_PROXY%%/*}/$GS_URL
-
-# get the latest.properties file in order to get info regarding latest artifact
-echo "Downloading http://$GS_URL/latest.properties"
-curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
-
-# check if we got the file
-[[ -f $WORKSPACE/latest.properties ]] || exit 1
-
-# source the file so we get artifact metadata
-source $WORKSPACE/latest.properties
-
-# echo the info about artifact that is used during the deployment
-OPNFV_ARTIFACT=${OPNFV_ARTIFACT_URL/*\/}
-echo "Using $OPNFV_ARTIFACT for deployment"
-
-# check if we already have the ISO to avoid redownload
-ISOSTORE="/iso_mount/opnfv_ci/${BRANCH##*/}"
-if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then
-    echo "ISO exists locally. Skipping the download and using the file from ISO store"
-    ln -s $ISOSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.iso
-    echo "--------------------------------------------------------"
-    echo
-    ls -al $WORKSPACE/opnfv.iso
-    echo
-    echo "--------------------------------------------------------"
-    echo "Done!"
-    exit 0
-fi
-
-[[ "$NODE_NAME" =~ (zte) ]] && OPNFV_ARTIFACT_URL=${GS_BASE_PROXY%%/*}/$OPNFV_ARTIFACT_URL
-
-# log info to console
-echo "Downloading the $INSTALLER_TYPE artifact using URL http://$OPNFV_ARTIFACT_URL"
-echo "This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-# download the file
-curl -L -s -o $WORKSPACE/opnfv.iso http://$OPNFV_ARTIFACT_URL > gsutil.iso.log 2>&1
-
-# list the file
-ls -al $WORKSPACE/opnfv.iso
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/fuel/fuel-lab-reconfig.sh b/jjb/fuel/fuel-lab-reconfig.sh
deleted file mode 100755 (executable)
index 4b42a39..0000000
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# check to see if ucs login info file exists
-if [ -e ~/.ssh/ucs_creds ];then
-    source ~/.ssh/ucs_creds
-else
-    echo "Unable to find UCS credentials for LF lab reconfiguration...Exiting"
-    exit 1
-fi
-
-# clone releng
-echo "Cloning releng repo..."
-if ! GIT_SSL_NO_VERIFY=true git clone https://gerrit.opnfv.org/gerrit/releng; then
-    echo "Unable to clone releng repo...Exiting"
-    exit 1
-fi
-
-# log info to console
-echo "Starting the lab reconfiguration for $INSTALLER_TYPE..."
-echo "--------------------------------------------------------"
-echo
-
-# create venv
-$WORKSPACE/releng/utils/lab-reconfiguration/create_venv.sh
-
-# disable nounset because 'activate' script contains unbound variable(s)
-set +o nounset
-# enter venv
-source $WORKSPACE/releng/utils/lab-reconfiguration/venv/bin/activate
-# set nounset back again
-set -o nounset
-
-# verify we are in venv
-if [[ ! $(which python | grep venv) ]]; then
-    echo "Unable to activate venv...Exiting"
-    exit 1
-fi
-
-python $WORKSPACE/releng/utils/lab-reconfiguration/reconfigUcsNet.py -i $ucs_host -u $ucs_user -p $ucs_password -f $WORKSPACE/releng/utils/lab-reconfiguration/fuel.yaml
-
-# while undergoing reboot
-sleep 30
-
-# check to see if slave is back up
-ping_counter=0
-ping_flag=0
-while [ "$ping_counter" -lt 20 ]; do
-    if [[ $(ping -c 5 172.30.10.72) ]]; then
-        ping_flag=1
-        break
-    fi
-    ((ping_counter++))
-    sleep 10
-done
-
-if [ "$ping_flag" -eq 1 ]; then
-    echo "Slave is pingable, now wait 180 seconds for services to start"
-    sleep 180
-else
-    echo "Slave did not come back up after reboot: please check lf-pod2"
-    exit 1
-fi
-
-set +o nounset
-deactivate
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/fuel/fuel-project-jobs.yaml b/jjb/fuel/fuel-project-jobs.yaml
deleted file mode 100644 (file)
index fc49f63..0000000
+++ /dev/null
@@ -1,80 +0,0 @@
----
-########################
-# Job configuration for fuel
-########################
-- project:
-    name: fuel-project-jobs
-
-    project: 'fuel'
-
-    installer: 'fuel'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - fraser:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-    jobs:
-      - 'fuel-deploy-generic-daily-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
-    name: 'fuel-deploy-generic-daily-{stream}'
-
-    concurrent: true
-
-    disabled: '{obj:disabled}'
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-per-node: 1
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - 'fuel-os-.*?-daily-.*'
-            - 'fuel-deploy-generic-daily-.*'
-          block-level: 'NODE'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{installer}-defaults':
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: GIT_BASE
-          default: https://gerrit.opnfv.org/gerrit/$PROJECT
-          description: 'Git URL to use on this Jenkins Slave'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: 'os-odl-nofeature-ha'
-      - node:
-          name: SLAVE_NAME
-          description: 'Slave name on Jenkins'
-          allowed-slaves:
-            - ericsson-pod2
-            - lf-pod2
-            - ericsson-pod1
-          default-slaves:
-            - ericsson-pod2
-
-    scm:
-      - git-scm
-
-    wrappers:
-      - build-name:
-          name: '$BUILD_NUMBER - POD: $NODE_NAME Scenario: $DEPLOY_SCENARIO'
-
-    builders:
-      - shell:
-          !include-raw-escape: ./fuel-deploy.sh
diff --git a/jjb/fuel/fuel-rtd-jobs.yaml b/jjb/fuel/fuel-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..d40ef98
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: fuel-rtd
+    project: fuel
+    project-name: fuel
+
+    project-pattern: 'fuel'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-fuel/47205/'
+    rtd-token: '38f40bf6c08fd4bccb930871bc29b08404cf98b0'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/fuel/fuel-set-scenario.sh b/jjb/fuel/fuel-set-scenario.sh
new file mode 100755 (executable)
index 0000000..4c8ed73
--- /dev/null
@@ -0,0 +1,54 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 SUSE, Mirantis Inc., Enea Software AB and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+set -o pipefail
+set -x
+
+#----------------------------------------------------------------------
+# This script is used by CI and executed by Jenkins jobs.
+# You are not supposed to use this script manually if you don't know
+# what you are doing.
+#----------------------------------------------------------------------
+
+# This function allows developers to specify the impacted scenario by
+# requesting a RE-check via a gerrit change comment under a specific format.
+#
+# Patterns to be searched in change comment:
+#   recheck: <scenario-name>
+#   reverify: <scenario-name>
+# Examples:
+#   recheck: os-odl-ovs-noha
+#   reverify: os-nosdn-nofeature-ha
+
+function set_scenario() {
+    # process gerrit event comment text (if present)
+    DEPLOY_SCENARIO=$(echo "${GERRIT_EVENT_COMMENT_TEXT}" | \
+                      grep -Po '(?!:(recheck|reverify):\s*)([-\w]+ha)')
+    if [ -z "${DEPLOY_SCENARIO}" ]; then
+        if [[ "$JOB_NAME" =~ baremetal ]]; then
+            DEPLOY_SCENARIO='os-nosdn-nofeature-ha'
+        else
+            DEPLOY_SCENARIO='os-nosdn-nofeature-noha'
+        fi
+    fi
+    # save the scenario names into java properties file
+    # so they can be injected to downstream jobs via envInject
+    echo "Recording the scenario '${DEPLOY_SCENARIO}' for downstream jobs"
+    echo "DEPLOY_SCENARIO=${DEPLOY_SCENARIO}" > "$WORK_DIRECTORY/scenario.properties"
+}
+
+# ensure GERRIT vars are set
+[ -n "${GERRIT_CHANGE_NUMBER}" ] || exit 1
+GERRIT_EVENT_COMMENT_TEXT="${GERRIT_EVENT_COMMENT_TEXT:-''}"
+
+# this directory is where the temporary properties file will be stored
+WORK_DIRECTORY=/tmp/$GERRIT_CHANGE_NUMBER
+/bin/rm -rf "$WORK_DIRECTORY" && mkdir -p "$WORK_DIRECTORY"
+
+set_scenario
index bef6a50..5695ef8 100644 (file)
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
-      - fraser:
+      - gambia:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
     #####################################
+    # cluster architectures
+    #####################################
+    arch_tag:
+      - 'amd64':
+          slave-label: 'fuel'
+          functest-suite-label: 'fuel'
+      - 'arm64':
+          slave-label: 'armband'
+          functest-suite-label: 'fuel-armband'
+    #####################################
+    # cluster types
+    #####################################
+    type:
+      - 'virtual'
+      - 'baremetal'
+    #####################################
     # patch verification phases
     #####################################
     phase:
-      - 'deploy-virtual':
-          slave-label: 'fuel-virtual'
+      - 'docker-build'
+      - 'deploy'
     #####################################
     # jobs
     #####################################
     jobs:
-      - 'fuel-verify-{stream}'
-      - 'fuel-verify-{phase}-{stream}'
+      - 'fuel-verify-{type}-{arch_tag}-{stream}'
+      - 'fuel-verify-{phase}-{type}-{arch_tag}-{stream}'
 #####################################
 # job templates
 #####################################
 - job-template:
-    name: 'fuel-verify-{stream}'
+    name: 'fuel-verify-{type}-{arch_tag}-{stream}'
 
     project-type: multijob
 
@@ -50,9 +66,9 @@
       - build-blocker:
           use-build-blocker: true
           blocking-jobs:
-            - 'fuel-os-.*?-virtual-daily-.*'
+            - 'fuel-os-.*?-daily-.*'
             - 'fuel-verify-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     scm:
       - git-scm-gerrit
           fail: true
 
     triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              file-paths:
-                - compare-type: ANT
-                  pattern: 'ci/**'
-                - compare-type: ANT
-                  pattern: 'mcp/**'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**'
-          readable-message: true
+      - 'fuel-verify-{type}-{arch_tag}-trigger':
+          project: '{project}'
+          branch: '{branch}'
 
     parameters:
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
-      - 'fuel-virtual-defaults':
+      - '{slave-label}-{type}-defaults':
           installer: '{installer}'
       - '{installer}-defaults':
           gs-pathname: '{gs-pathname}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: 'os-nosdn-nofeature-ha'
 
     builders:
+      - 'fuel-verify-set-scenario-macro'
+      - inject:
+          properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/scenario.properties"
       - description-setter:
-          description: "Built on $NODE_NAME"
+          description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
       - multijob:
-          name: deploy-virtual
+          name: docker-build
           condition: SUCCESSFUL
           projects:
-            - name: 'fuel-verify-deploy-virtual-{stream}'
+            - name: 'fuel-verify-docker-build-{type}-{arch_tag}-{stream}'
               current-parameters: false
               predefined-parameters: |
                 BRANCH=$BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
+              node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
 
       - multijob:
-          name: smoke-test
+          name: deploy
           condition: SUCCESSFUL
           projects:
-            # Use Functest job definition from jjb/functest/functest-daily-jobs
-            - name: 'functest-fuel-virtual-suite-{stream}'
+            - name: 'fuel-verify-deploy-{type}-{arch_tag}-{stream}'
               current-parameters: false
               predefined-parameters: |
-                FUNCTEST_MODE=tier
-                FUNCTEST_TIER=healthcheck
+                MCP_DOCKER_TAG={arch_tag}-verify
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
                 BRANCH=$BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
               node-parameters: true
-              kill-phase-on: NEVER
+              kill-phase-on: FAILURE
               abort-all-job: true
-            - name: 'functest-fuel-virtual-suite-{stream}'
+
+      - multijob:
+          name: smoke-test
+          condition: SUCCESSFUL
+          projects:
+            # Use Functest job definition from jjb/functest/functest-daily-jobs
+            - name: 'functest-{functest-suite-label}-{type}-suite-{stream}'
               current-parameters: false
               predefined-parameters: |
-                FUNCTEST_MODE=testcase
-                FUNCTEST_SUITE_NAME=vping_ssh
+                FUNCTEST_MODE=tier
+                FUNCTEST_TIER=healthcheck
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
                 BRANCH=$BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
               node-parameters: true
+              enable-condition: "def m = ! ('$NODE_LABELS' =~ /armband-virtual/)"
               kill-phase-on: NEVER
               abort-all-job: true
 
 - job-template:
-    name: 'fuel-verify-{phase}-{stream}'
+    name: 'fuel-verify-{phase}-{type}-{arch_tag}-{stream}'
 
     disabled: '{obj:disabled}'
 
       - build-blocker:
           use-build-blocker: true
           blocking-jobs:
+            - 'fuel-verify-docker-build-.*'
             - 'fuel-verify-deploy-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     scm:
       - git-scm-gerrit
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
-      - '{slave-label}-defaults'
-      - 'fuel-virtual-defaults':
+      - '{slave-label}-{type}-defaults':
           installer: '{installer}'
       - '{installer}-defaults':
           gs-pathname: '{gs-pathname}'
       - description-setter:
           description: "Built on $NODE_NAME"
       - '{project}-verify-{phase}-macro'
+########################
+# trigger macros
+########################
+- trigger:
+    name: 'fuel-verify-virtual-amd64-trigger'
+    triggers:
+      - gerrit: &fuel_verify_virtual_amd64_trigger
+          server-name: 'gerrit.opnfv.org'
+          trigger-on:
+            - patchset-created-event:
+                exclude-drafts: 'false'
+                exclude-trivial-rebase: 'false'
+                exclude-no-code-change: 'false'
+            - draft-published-event
+            - comment-added-contains-event:
+                comment-contains-value: '(recheck|reverify)(\s|$|:\s*[-\w]+-noha)'
+          projects:
+            - project-compare-type: 'ANT'
+              project-pattern: '{project}'
+              branches:
+                - branch-compare-type: 'ANT'
+                  branch-pattern: '**/{branch}'
+              file-paths:
+                - compare-type: ANT
+                  pattern: 'ci/**'
+                - compare-type: ANT
+                  pattern: 'mcp/**'
+              disable-strict-forbidden-file-verification: 'true'
+              forbidden-file-paths:
+                - compare-type: ANT
+                  pattern: 'docs/**'
+          readable-message: true
+- trigger:
+    name: 'fuel-verify-virtual-arm64-trigger'
+    triggers:
+      - gerrit:
+          <<: *fuel_verify_virtual_amd64_trigger
+- trigger:
+    name: 'fuel-verify-baremetal-amd64-trigger'
+    triggers:
+      - gerrit: &fuel_verify_baremetal_amd64_trigger
+          <<: *fuel_verify_virtual_amd64_trigger
+          trigger-on:
+            - comment-added-contains-event:
+                comment-contains-value: '(recheck|reverify):\s*[-\w]+-ha'
+- trigger:
+    name: 'fuel-verify-baremetal-arm64-trigger'
+    triggers:
+      - gerrit:
+          <<: *fuel_verify_baremetal_amd64_trigger
+          skip-vote:
+            successful: true
+            failed: true
+            unstable: true
+            notbuilt: true
 #####################################
 # builder macros
 #####################################
 - builder:
-    name: 'fuel-verify-deploy-virtual-macro'
+    name: 'fuel-verify-set-scenario-macro'
+    builders:
+      - shell:
+          !include-raw: ./fuel-set-scenario.sh
+- builder:
+    name: 'fuel-verify-deploy-macro'
     builders:
       - shell:
           !include-raw: ./fuel-deploy.sh
+- builder:
+    name: 'fuel-verify-docker-build-macro'
+    builders:
+      - shell: |
+          #!/bin/bash -ex
+          sudo -E ./ci/build.sh 'verify' ''
diff --git a/jjb/fuel/fuel-weekly-jobs.yaml b/jjb/fuel/fuel-weekly-jobs.yaml
deleted file mode 100644 (file)
index aca625d..0000000
+++ /dev/null
@@ -1,191 +0,0 @@
----
-# jenkins job templates for Fuel
-- project:
-
-    name: fuel-weekly
-
-    project: fuel
-
-    installer: fuel
-
-    # -------------------------------
-    # BRANCH ANCHORS
-    # -------------------------------
-    master: &master
-      stream: master
-      branch: '{stream}'
-      disabled: false
-      gs-pathname: ''
-    fraser: &fraser
-      stream: fraser
-      branch: 'stable/{stream}'
-      disabled: false
-      gs-pathname: '/{stream}'
-    # -------------------------------
-    # POD, INSTALLER, AND BRANCH MAPPING
-    # -------------------------------
-    #        CI PODs
-    # -------------------------------
-    pod:
-      - baremetal:
-          slave-label: fuel-baremetal
-          <<: *master
-      - virtual:
-          slave-label: fuel-virtual
-          <<: *master
-      - baremetal:
-          slave-label: fuel-baremetal
-          <<: *fraser
-      - virtual:
-          slave-label: fuel-virtual
-          <<: *fraser
-    # -------------------------------
-    #       scenarios
-    # -------------------------------
-    scenario:
-      # HA scenarios
-      - 'os-nosdn-nofeature-ha':
-          auto-trigger-name: 'weekly-trigger-disabled'
-
-    jobs:
-      - 'fuel-{scenario}-{pod}-weekly-{stream}'
-      - 'fuel-deploy-{pod}-weekly-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
-    name: 'fuel-{scenario}-{pod}-weekly-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: false
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 4
-          max-per-node: 1
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - 'fuel-os-.*?-{pod}-daily-.*'
-            - 'fuel-os-.*?-{pod}-weekly-.*'
-            - 'fuel-verify-.*'
-          block-level: 'NODE'
-
-    wrappers:
-      - build-name:
-          name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
-    triggers:
-      - '{auto-trigger-name}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{installer}-defaults':
-          gs-pathname: '{gs-pathname}'
-      - '{slave-label}-defaults':
-          installer: '{installer}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: '{scenario}'
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - trigger-builds:
-          - project: 'fuel-deploy-{pod}-weekly-{stream}'
-            current-parameters: false
-            predefined-parameters:
-              DEPLOY_SCENARIO={scenario}
-            same-node: true
-            block: true
-      - trigger-builds:
-          - project: 'functest-fuel-{pod}-weekly-{stream}'
-            current-parameters: false
-            predefined-parameters:
-              DEPLOY_SCENARIO={scenario}
-            same-node: true
-            block: true
-            block-thresholds:
-              build-step-failure-threshold: 'never'
-              failure-threshold: 'never'
-              unstable-threshold: 'FAILURE'
-
-    publishers:
-      - email:
-          recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
-      - email-jenkins-admins-on-failure
-
-- job-template:
-    name: 'fuel-deploy-{pod}-weekly-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 4
-          max-per-node: 1
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - 'fuel-deploy-{pod}-daily-.*'
-            - 'fuel-deploy-generic-daily-.*'
-            - 'fuel-deploy-{pod}-weekly-.*'
-            - 'fuel-deploy-generic-weekly-.*'
-          block-level: 'NODE'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{installer}-defaults':
-          gs-pathname: '{gs-pathname}'
-      - '{slave-label}-defaults':
-          installer: '{installer}'
-      - testapi-parameter
-      - string:
-          name: DEPLOY_SCENARIO
-          default: 'os-odl-nofeature-ha'
-
-    scm:
-      - git-scm
-
-    wrappers:
-      - build-name:
-          name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - track-begin-timestamp
-      - shell:
-          !include-raw-escape: ./fuel-deploy.sh
-
-    publishers:
-      - email:
-          recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
-      - email-jenkins-admins-on-failure
-      - report-provision-result
-
-########################
-# trigger macros
-########################
-# ----------------------------------------------
-# Triggers for job running on fuel-baremetal against master branch
-# ----------------------------------------------
-# HA Scenarios
-- trigger:
-    name: 'fuel-os-nosdn-nofeature-ha-baremetal-weekly-master-trigger'
-    triggers:
-      - timed: ''
index 1df9a55..42bf60b 100755 (executable)
@@ -4,15 +4,17 @@ set -e
 set +u
 set +o pipefail
 
+REPO=${REPO:-opnfv}
 CI_LOOP=${CI_LOOP:-daily}
 TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results
 ENERGY_RECORDER_API_URL=http://energy.opnfv.fr/resources
+DOCKER_TAG=${DOCKER_TAG:-$([[ ${BRANCH##*/} == "master" ]] && echo "latest" || echo ${BRANCH##*/})}
 
 check_os_deployment() {
-    FUNCTEST_IMAGE=opnfv/functest-healthcheck:${DOCKER_TAG}
+    FUNCTEST_IMAGE=${REPO}/functest-healthcheck:${DOCKER_TAG}
     echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
     docker pull ${FUNCTEST_IMAGE}>/dev/null
-    cmd="docker run --rm --privileged=true ${volumes} ${FUNCTEST_IMAGE} check_deployment"
+    cmd="docker run --rm ${volumes} ${FUNCTEST_IMAGE} check_deployment"
     echo "Checking deployment, CMD: ${cmd}"
     eval ${cmd}
     ret_value=$?
@@ -25,19 +27,15 @@ check_os_deployment() {
 
 }
 
-
 run_tiers() {
     tiers=$1
     cmd_opt="run_tests -r -t all"
     [[ $BUILD_TAG =~ "suite" ]] && cmd_opt="run_tests -t all"
-    ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-    echo 0 > ${ret_val_file}
-
     for tier in ${tiers[@]}; do
-        FUNCTEST_IMAGE=opnfv/functest-${tier}:${DOCKER_TAG}
+        FUNCTEST_IMAGE=${REPO}/functest-${tier}:${DOCKER_TAG}
         echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
         docker pull ${FUNCTEST_IMAGE}>/dev/null
-        cmd="docker run --rm  --privileged=true ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
+        cmd="docker run --rm  ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
         echo "Running Functest tier '${tier}'. CMD: ${cmd}"
         eval ${cmd}
         ret_value=$?
@@ -45,6 +43,7 @@ run_tiers() {
             echo ${ret_value} > ${ret_val_file}
             if [ ${tier} == 'healthcheck' ]; then
                 echo "Healthcheck tier failed. Exiting Functest..."
+                skip_tests=1
                 break
             fi
         fi
@@ -54,22 +53,20 @@ run_tiers() {
 run_test() {
     test_name=$1
     cmd_opt="run_tests -t ${test_name}"
-    ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-    echo 0 > ${ret_val_file}
     # Determine which Functest image should be used for the test case
     case ${test_name} in
-        connection_check|api_check|snaps_health_check)
-            FUNCTEST_IMAGE=opnfv/functest-healthcheck:${DOCKER_TAG} ;;
-        vping_ssh|vping_userdata|tempest_smoke_serial|rally_sanity|refstack_defcore|odl|odl_netvirt|snaps_smoke)
-            FUNCTEST_IMAGE=opnfv/functest-smoke:${DOCKER_TAG} ;;
-        tempest_full_parallel|rally_full)
-            FUNCTEST_IMAGE=opnfv/functest-components:${DOCKER_TAG} ;;
-        cloudify_ims|orchestra_openims|orchestra_clearwaterims|vyos_vrouter)
-            FUNCTEST_IMAGE=opnfv/functest-vnf:${DOCKER_TAG} ;;
-        promise|doctor-notification|bgpvpn|functest-odl-sfc|domino-multinode|barometercollectd|fds)
-            FUNCTEST_IMAGE=opnfv/functest-features:${DOCKER_TAG} ;;
-        parser-basics)
-            FUNCTEST_IMAGE=opnfv/functest-parser:${DOCKER_TAG} ;;
+        connection_check|tenantnetwork1|tenantnetwork2|vmready1|vmready2|singlevm1|singlevm2|vping_ssh|vping_userdata|cinder_test|odl|api_check|snaps_health_check)
+            FUNCTEST_IMAGE=${REPO}/functest-healthcheck:${DOCKER_TAG} ;;
+        tempest_smoke_serial|tempest_smoke|neutron-tempest-plugin-api|rally_sanity|refstack_defcore|patrole|snaps_smoke|neutron_trunk|networking-bgpvpn|networking-sfc|barbican)
+            FUNCTEST_IMAGE=${REPO}/functest-smoke:${DOCKER_TAG} ;;
+        shaker|vmtp)
+            FUNCTEST_IMAGE=${REPO}/functest-benchmarking:${DOCKER_TAG} ;;
+        tempest_full_parallel|tempest_full|tempest_scenario|rally_full)
+            FUNCTEST_IMAGE=${REPO}/functest-components:${DOCKER_TAG} ;;
+        cloudify|cloudify_ims|heat_ims|vyos_vrouter|juju_epc)
+            FUNCTEST_IMAGE=${REPO}/functest-vnf:${DOCKER_TAG} ;;
+        doctor-notification|bgpvpn|functest-odl-sfc|barometercollectd|fds|vgpu|stor4nfv_os)
+            FUNCTEST_IMAGE=${REPO}/functest-features:${DOCKER_TAG} ;;
         *)
             echo "Unkown test case $test_name"
             exit 1
@@ -77,7 +74,7 @@ run_test() {
     esac
     echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
     docker pull ${FUNCTEST_IMAGE}>/dev/null
-    cmd="docker run --rm --privileged=true ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
+    cmd="docker run --rm ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
     echo "Running Functest test case '${test_name}'. CMD: ${cmd}"
     eval ${cmd}
     ret_value=$?
@@ -92,7 +89,6 @@ FUNCTEST_DIR=/home/opnfv/functest
 DEPLOY_TYPE=baremetal
 [[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
 HOST_ARCH=$(uname -m)
-DOCKER_TAG=`[[ ${BRANCH##*/} == "master" ]] && echo "latest" || echo ${BRANCH##*/}`
 
 # Prepare OpenStack credentials volume
 rc_file=${HOME}/opnfv-openrc.sh
@@ -108,11 +104,6 @@ fi
 
 rc_file_vol="-v ${rc_file}:${FUNCTEST_DIR}/conf/env_file"
 
-# Set iptables rule to allow forwarding return traffic for container
-if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
-    sudo iptables -I FORWARD -j RETURN
-fi
-
 echo "Functest: Start Docker and prepare environment"
 
 echo "Functest: Download images that will be used by test cases"
@@ -141,13 +132,17 @@ envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
 
 ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
 
-
 if [ "${INSTALLER_TYPE}" == 'fuel' ]; then
     COMPUTE_ARCH=$(ssh -l ubuntu ${INSTALLER_IP} -i ${SSH_KEY} ${ssh_options} \
         "sudo salt 'cmp*' grains.get cpuarch --out yaml | awk '{print \$2; exit}'")
+    IMAGE_PROPERTIES="hw_disk_bus:scsi,hw_scsi_model:virtio-scsi"
     envs="${envs} -e POD_ARCH=${COMPUTE_ARCH}"
 fi
 
+if [[ ${INSTALLER_TYPE} == 'fuel' && ${DEPLOY_SCENARIO} == 'os-nosdn-nofeature-noha' ]]; then
+    libvirt_vol="-v ${ssh_key}:${FUNCTEST_DIR}/conf/libvirt_key"
+    envs="${envs} -e LIBVIRT_USER=ubuntu -e LIBVIRT_KEY_PATH=${FUNCTEST_DIR}/conf/libvirt_key"
+fi
 
 if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} =~ 'sfc' ]]; then
     ssh_key="/tmp/id_rsa"
@@ -159,8 +154,108 @@ if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} =~ 'sfc' ]]; then
     envs="${envs} -e EXTERNAL_NETWORK=${EXTERNAL_NETWORK}"
 fi
 
+if [[ ${INSTALLER_TYPE} == 'compass' ]] || [[ ${DEPLOY_SCENARIO} == *"odl"* ]]; then
+      envs="${envs} -e SDN_CONTROLLER_RESTCONFPORT=8080"
+fi
+
+if [[ ${DEPLOY_SCENARIO} == *"ovs"* ]] || [[ ${DEPLOY_SCENARIO} == *"fdio"* ]]; then
+    if [[ -n ${IMAGE_PROPERTIES} ]]; then
+        IMAGE_PROPERTIES="${IMAGE_PROPERTIES},hw_mem_page_size:large"
+    else
+        IMAGE_PROPERTIES="hw_mem_page_size:large"
+    fi
+    FLAVOR_EXTRA_SPECS="hw:mem_page_size:large"
+fi
+
+if [[ -n ${IMAGE_PROPERTIES} ]] || [[ -n ${FLAVOR_EXTRA_SPECS} ]]; then
+    envs="${envs} -e IMAGE_PROPERTIES=${IMAGE_PROPERTIES} -e FLAVOR_EXTRA_SPECS=${FLAVOR_EXTRA_SPECS}"
+fi
+
+tempest_conf_yaml=$(mktemp)
+case ${INSTALLER_TYPE} in
+apex)
+    cat << EOF > "${tempest_conf_yaml}"
+---
+compute-feature-enabled:
+    shelve: false
+    vnc_console: true
+identity-feature-enabled:
+    api_v2: false
+    api_v2_admin: false
+image-feature-enabled:
+    api_v2: true
+    api_v1: false
+volume:
+    storage_protocol: ceph
+volume-feature-enabled:
+    backup: false
+EOF
+    ;;
+compass)
+    cat << EOF > "${tempest_conf_yaml}"
+---
+compute-feature-enabled:
+    shelve: false
+    vnc_console: false
+    spice_console: true
+identity-feature-enabled:
+    api_v2: false
+    api_v2_admin: false
+image-feature-enabled:
+    api_v2: true
+    api_v1: false
+volume:
+    storage_protocol: ceph
+volume-feature-enabled:
+    backup: false
+EOF
+    ;;
+fuel)
+    cat << EOF > "${tempest_conf_yaml}"
+---
+compute-feature-enabled:
+    shelve: false
+    vnc_console: false
+    spice_console: true
+identity-feature-enabled:
+    api_v2: false
+    api_v2_admin: false
+image-feature-enabled:
+    api_v2: true
+    api_v1: false
+volume:
+    storage_protocol: iSCSI
+volume-feature-enabled:
+    backup: false
+EOF
+    ;;
+*)
+    cat << EOF > "${tempest_conf_yaml}"
+---
+compute-feature-enabled:
+    shelve: false
+    vnc_console: false
+identity-feature-enabled:
+    api_v2: false
+    api_v2_admin: false
+image-feature-enabled:
+    api_v2: true
+    api_v1: false
+volume:
+    storage_protocol: iSCSI
+volume-feature-enabled:
+    backup: false
+EOF
+    ;;
+esac
+echo "tempest_conf.yaml:" && cat "${tempest_conf_yaml}"
 
-volumes="${images_vol} ${results_vol} ${sshkey_vol} ${userconfig_vol} ${rc_file_vol} ${cacert_file_vol}"
+volumes="${images_vol} ${results_vol} ${sshkey_vol} ${libvirt_vol} \
+    ${userconfig_vol} ${rc_file_vol} ${cacert_file_vol} \
+    -v ${tempest_conf_yaml}:/usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml"
+
+ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
+echo 0 > ${ret_val_file}
 
 set +e
 
@@ -172,10 +267,27 @@ elif [ ${FUNCTEST_MODE} == 'tier' ]; then
     tiers=(${FUNCTEST_TIER})
     run_tiers ${tiers}
 else
+    tests=(tempest_full tempest_scenario)
+    skip_tests=0
     if [ ${DEPLOY_TYPE} == 'baremetal' ] && [ "${HOST_ARCH}" != "aarch64" ]; then
-        tiers=(healthcheck smoke features vnf parser)
+        if [[ ${BRANCH} == "stable/fraser" ]]; then
+            tiers=(healthcheck smoke features vnf parser)
+            tests=(tempest_full_parallel)
+        else
+            tiers=(healthcheck smoke benchmarking features vnf)
+        fi
     else
-        tiers=(healthcheck smoke features parser)
+        if [[ ${BRANCH} == "stable/fraser" ]]; then
+            tiers=(healthcheck smoke features parser)
+            tests=(tempest_full_parallel)
+        else
+            tiers=(healthcheck smoke benchmarking features)
+        fi
     fi
     run_tiers ${tiers}
+    if [ ${skip_tests} -eq 0 ]; then
+        for test in "${tests[@]}"; do
+            run_test "$test"
+        done
+    fi
 fi
index ae226b6..34aee8a 100644 (file)
       stream: fraser
       branch: 'stable/{stream}'
       gs-pathname: '/{stream}'
+    gambia: &gambia
+      stream: gambia
+      branch: 'stable/{stream}'
+      gs-pathname: '/{stream}'
+    hunter: &hunter
+      stream: hunter
+      branch: 'stable/{stream}'
+      gs-pathname: '/{stream}'
     # -------------------------------
     # POD, INSTALLER, AND BRANCH MAPPING
     # -------------------------------
           slave-label: fuel-virtual
           installer: fuel
           <<: *master
+      - baremetal:
+          slave-label: fuel-baremetal
+          installer: fuel
+          <<: *gambia
+      - virtual:
+          slave-label: fuel-virtual
+          installer: fuel
+          <<: *gambia
       - baremetal:
           slave-label: fuel-baremetal
           installer: fuel
           slave-label: compass-virtual
           installer: compass
           <<: *master
+      - arm-virtual:
+          slave-label: compass-arm-virtual
+          installer: compass
+          <<: *master
       - baremetal:
           slave-label: compass-baremetal
           installer: compass
           slave-label: compass-virtual
           installer: compass
           <<: *fraser
+      - arm-virtual:
+          slave-label: compass-arm-virtual
+          installer: compass
+          <<: *fraser
+      - baremetal:
+          slave-label: compass-baremetal
+          installer: compass
+          <<: *gambia
+      - virtual:
+          slave-label: compass-virtual
+          installer: compass
+          <<: *gambia
+      - arm-virtual:
+          slave-label: compass-arm-virtual
+          installer: compass
+          <<: *gambia
       # apex CI PODs
       - virtual:
           slave-label: apex-virtual-master
           slave-label: apex-baremetal-master
           installer: apex
           <<: *master
+      - virtual:
+          slave-label: apex-virtual-master
+          installer: apex
+          <<: *gambia
+      - baremetal:
+          slave-label: apex-baremetal-master
+          installer: apex
+          <<: *gambia
       - virtual:
           slave-label: apex-virtual-fraser
           installer: apex
           slave-label: armband-virtual
           installer: fuel
           <<: *master
+      - armband-baremetal:
+          slave-label: armband-baremetal
+          installer: fuel
+          <<: *gambia
+      - armband-virtual:
+          slave-label: armband-virtual
+          installer: fuel
+          <<: *gambia
       - armband-baremetal:
           slave-label: armband-baremetal
           installer: fuel
       - 'suite':
           job-timeout: 60
       - 'daily':
-          job-timeout: 360
-      - 'arm-daily':
-          job-timeout: 480
+          job-timeout: 600
 
     jobs:
       - 'functest-{installer}-{pod}-{testsuite}-{stream}'
       - throttle:
           enabled: true
           max-per-node: 1
+          max-total: 10
           option: 'project'
 
     wrappers:
       - 'functest-{testsuite}-parameter'
       - string:
           name: DEPLOY_SCENARIO
-          default: 'os-odl_l2-nofeature-ha'
+          default: 'os-nosdn-nofeature-noha'
+      - string:
+          name: DOCKER_TAG
+          default: ''
       - string:
           name: CLEAN_DOCKER_IMAGES
           default: 'false'
           default: 'daily'
           description: "Daily suite name to run"
 
-- parameter:
-    name: functest-arm-daily-parameter
-    parameters:
-      - string:
-          name: FUNCTEST_MODE
-          default: 'arm-daily'
-          description: "Daily suite name (Aarch64) to run"
-
 - parameter:
     name: functest-suite-parameter
     parameters:
             - 'connection_check'
             - 'api_check'
             - 'snaps_health_check'
-            - 'vping_userdata'
             - 'vping_ssh'
-            - 'tempest_smoke_serial'
+            - 'vping_userdata'
+            - 'cinder_test'
+            - 'tempest_smoke'
             - 'rally_sanity'
+            - 'refstack_defcore'
+            - 'patrole'
             - 'odl'
-            - 'odl_netvirt'
             - 'snaps_smoke'
-            - 'refstack_defcore'
-            - 'promise'
-            - 'doctor'
+            - 'shaker'
+            - 'vmtp'
+            - 'neutron_trunk'
+            - 'doctor-notification'
             - 'bgpvpn'
-            - 'parser'
-            - 'security_scan'
-            - 'tempest_full_parallel'
+            - 'functest-odl-sfc'
+            - 'barometercollectd'
+            - 'fds'
+            - 'tempest_full'
             - 'rally_full'
             - 'cloudify_ims'
-            - 'cloudify_vrouter'
-            - 'orchestra_openims'
-            - 'orchestra_clearwaterims'
+            - 'vyos_vrouter'
+            - 'juju_epc'
+            - 'parser'
           default: 'connection_check'
       - choice:
           name: FUNCTEST_TIER
           choices:
             - 'healthcheck'
             - 'smoke'
+            - 'benchmarking'
             - 'features'
             - 'components'
             - 'vnf'
+            - 'parser'
           default: 'healthcheck'
       - string:
           name: TESTCASE_OPTIONS
           name: RC_FILE_PATH
           default: ''
           description: "Path to the OS credentials file if given"
+      - string:
+          name: REPO
+          default: "opnfv"
+          description: "Repository name for functest images"
 ########################
 # trigger macros
 ########################
       - 'functest-store-results'
       - 'functest-exit'
 
-- builder:
-    name: functest-arm-daily-builder
-    builders:
-      - 'functest-cleanup'
-      - 'functest-arm-daily'
-      - 'functest-store-results'
-      - 'functest-exit'
-
 - builder:
     name: functest-suite-builder
     builders:
                     - ../../utils/fetch_k8_conf.sh
                     - ./functest-k8.sh
 
-# yamllint enable rule:indentation
-- builder:
-    name: functest-arm-daily
-    builders:
-      # yamllint disable rule:indentation
-      - shell:
-          !include-raw:
-              - ./functest-env-presetup.sh
-              - ../../utils/fetch_os_creds.sh
-              - ./functest-alpine.sh
-
 # yamllint enable rule:indentation
 - builder:
     name: functest-store-results
index 50c7c38..19967e9 100644 (file)
       - master:
           branch: '{stream}'
           disabled: false
+      - hunter:
+          branch: 'stable/{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
       - fraser:
           branch: 'stable/{stream}'
           disabled: false
     # yamllint disable rule:key-duplicates
     image:
       - 'core'
+      - 'tempest'
       - 'healthcheck'
       - 'features'
       - 'components'
       - 'parser'
       - 'smoke'
+      - 'benchmarking'
       - 'vnf'
 
+    exclude:
+      - stream: 'master'
+        image: 'parser'
+      - stream: 'hunter'
+        image: 'parser'
+      - stream: 'gambia'
+        image: 'parser'
+      - stream: 'fraser'
+        image: 'tempest'
+      - stream: 'fraser'
+        image: 'benchmarking'
+
     # settings for jobs run in multijob phases
     build-job-settings: &build-job-settings
       current-parameters: false
           projects:
             - name: 'functest-core-docker-manifest-{stream}'
               <<: *manifest-job-settings
+      - multijob:
+          name: 'build functest-tempest images'
+          execution-type: PARALLEL
+          projects:
+            - name: 'functest-tempest-docker-build-amd64-{stream}'
+              <<: *build-job-settings
+            - name: 'functest-tempest-docker-build-arm64-{stream}'
+              <<: *build-job-settings
+      - multijob:
+          name: 'publish functest-tempest manifests'
+          execution-type: PARALLEL
+          projects:
+            - name: 'functest-tempest-docker-manifest-{stream}'
+              <<: *manifest-job-settings
       - multijob:
           name: 'build all functest images'
           condition: SUCCESSFUL
               <<: *build-job-settings
             - name: 'functest-smoke-docker-build-arm64-{stream}'
               <<: *build-job-settings
+            - name: 'functest-benchmarking-docker-build-amd64-{stream}'
+              <<: *build-job-settings
+            - name: 'functest-benchmarking-docker-build-arm64-{stream}'
+              <<: *build-job-settings
             - name: 'functest-vnf-docker-build-amd64-{stream}'
               <<: *build-job-settings
             - name: 'functest-vnf-docker-build-arm64-{stream}'
               <<: *manifest-job-settings
             - name: 'functest-smoke-docker-manifest-{stream}'
               <<: *manifest-job-settings
+            - name: 'functest-benchmarking-docker-manifest-{stream}'
+              <<: *manifest-job-settings
             - name: 'functest-vnf-docker-manifest-{stream}'
               <<: *manifest-job-settings
 
           name: SLAVE_LABEL
           default: 'opnfv-build-ubuntu'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: PROJECT
           default: "{project}"
           name: SLAVE_LABEL
           default: '{slave_label}'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
index 323b325..510670b 100755 (executable)
@@ -1,31 +1,34 @@
 #!/usr/bin/env bash
 set -o errexit
-set -o nounset
 set -o pipefail
 
 # Fetch INSTALLER_IP for APEX deployments
 if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
-    echo "Gathering IP information for Apex installer VM"
-    ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-    if sudo virsh list | grep undercloud; then
-        echo "Installer VM detected"
-        undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
+    if [ -n "$RC_FILE_PATH" ]; then
+        echo "RC_FILE_PATH is set: ${RC_FILE_PATH}...skipping detecting UC IP"
+    else
+        echo "Gathering IP information for Apex installer VM"
+        ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+        if sudo virsh list | grep undercloud; then
+            echo "Installer VM detected"
+            undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
                       grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
-        export INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
-        export sshkey_vol="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
-        sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
-        export stackrc_vol="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
-
-        if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
-            sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
-        fi
-        if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
-          sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
+            export INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
+            export sshkey_vol="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
+            sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
+            export stackrc_vol="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
+
+            if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
+                sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
+            fi
+            if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
+                sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
+            fi
+            echo "Installer ip is ${INSTALLER_IP}"
+        else
+            echo "No available installer VM exists and no credentials provided...exiting"
+            exit 1
         fi
-        echo "Installer ip is ${INSTALLER_IP}"
-    else
-        echo "No available installer VM exists and no credentials provided...exiting"
-        exit 1
     fi
 
 elif [[ ${INSTALLER_TYPE} == 'daisy' ]]; then
index 2085251..c328f5d 100644 (file)
       - master:
           branch: '{stream}'
           disabled: false
+      - hunter:
+          branch: 'stable/{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
       - fraser:
           branch: 'stable/{stream}'
           disabled: false
     arch_tag:
       - 'amd64':
           slave_label: 'opnfv-build-ubuntu'
+      - 'arm64':
+          slave_label: 'opnfv-build-ubuntu-arm'
 
     # yamllint disable rule:key-duplicates
     image:
       - 'core'
       - 'healthcheck'
-      - 'smoke'
       - 'features'
+      - 'smoke'
 
     # settings for jobs run in multijob phases
     build-job-settings: &build-job-settings
@@ -53,6 +61,7 @@
     jobs:
       - "functest-kubernetes-docker-{stream}"
       - "functest-kubernetes-{image}-docker-build-{arch_tag}-{stream}"
+      - "functest-kubernetes-{image}-docker-manifest-{stream}"
 
 ########################
 # job templates
           projects:
             - name: 'functest-kubernetes-core-docker-build-amd64-{stream}'
               <<: *build-job-settings
+            - name: 'functest-kubernetes-core-docker-build-arm64-{stream}'
+              <<: *build-job-settings
       - multijob:
-          name: 'build functest-kubernetes-[healthcheck,features] image'
+          name: 'publish functest-kubernetes-core manifests'
+          execution-type: PARALLEL
+          projects:
+            - name: 'functest-kubernetes-core-docker-manifest-{stream}'
+              <<: *manifest-job-settings
+      - multijob:
+          name: 'build functest-kubernetes-healthcheck images'
           execution-type: PARALLEL
           projects:
             - name: 'functest-kubernetes-healthcheck-docker-build-amd64-{stream}'
               <<: *build-job-settings
-            - name: 'functest-kubernetes-features-docker-build-amd64-{stream}'
+            - name: 'functest-kubernetes-healthcheck-docker-build-arm64-{stream}'
               <<: *build-job-settings
       - multijob:
-          name: 'build functest-kubernetes-smoke image'
+          name: 'publish functest-kubernetes-healthcheck manifests'
+          execution-type: PARALLEL
+          projects:
+            - name: 'functest-kubernetes-healthcheck-docker-manifest-{stream}'
+              <<: *manifest-job-settings
+      - multijob:
+          name: 'build all functest-kubernetes images'
+          condition: SUCCESSFUL
           execution-type: PARALLEL
           projects:
+            - name: 'functest-kubernetes-features-docker-build-amd64-{stream}'
+              <<: *build-job-settings
+            - name: 'functest-kubernetes-features-docker-build-arm64-{stream}'
+              <<: *build-job-settings
             - name: 'functest-kubernetes-smoke-docker-build-amd64-{stream}'
               <<: *build-job-settings
+            - name: 'functest-kubernetes-smoke-docker-build-arm64-{stream}'
+              <<: *build-job-settings
+      - multijob:
+          name: 'publish all manifests'
+          condition: SUCCESSFUL
+          execution-type: PARALLEL
+          projects:
+            - name: 'functest-kubernetes-features-docker-manifest-{stream}'
+              <<: *manifest-job-settings
+            - name: 'functest-kubernetes-smoke-docker-manifest-{stream}'
+              <<: *manifest-job-settings
 
     publishers:
       - 'functest-kubernetes-amd64-recipients'
+      - 'functest-kubernetes-arm64-recipients'
 
 - job-template:
     name: 'functest-kubernetes-{image}-docker-build-{arch_tag}-{stream}'
     builders:
       - shell: |
           #!/bin/bash -ex
-          sudo amd64_dirs=docker/{image} bash ./build.sh
+          case "{arch_tag}" in
+          "arm64")
+              sudo amd64_dirs= arm64_dirs=docker/{image} bash ./build.sh ;;
+          *)
+              sudo amd64_dirs=docker/{image} arm64_dirs= bash ./build.sh ;;
+          esac
+          exit $?
+
+- job-template:
+    name: 'functest-kubernetes-{image}-docker-manifest-{stream}'
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - label:
+          name: SLAVE_LABEL
+          default: 'opnfv-build-ubuntu'
+          description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
+      - string:
+          name: PROJECT
+          default: "{project}"
+          description: "Project name used to enable job conditions"
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+      - string:
+          name: REPO
+          default: "opnfv"
+          description: "Repository name for functest-kubernetes images"
+
+
+    disabled: '{obj:disabled}'
+
+    builders:
+      - shell: |
+          #!/bin/bash -ex
+          case "{stream}" in
+          "master")
+              tag="latest" ;;
+          *)
+              tag="{stream}" ;;
+          esac
+          sudo manifest-tool push from-args \
+              --platforms linux/amd64,linux/arm64 \
+              --template $REPO/functest-kubernetes-{image}:ARCH-$tag \
+              --target $REPO/functest-kubernetes-{image}:$tag
           exit $?
 
 # parameter macro
           name: SLAVE_LABEL
           default: '{slave_label}'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - string:
           name: REPO
           default: "opnfv"
-          description: "Repository name for functest images"
+          description: "Repository name for functest-kubernetes images"
+
+# publisher macros
+- publisher:
+    name: 'functest-kubernetes-arm64-recipients'
+    publishers:
+      - email:
+          recipients: >
+            cristina.pauna@enea.com
+            alexandru.avadanii@enea.com
+            delia.popescu@enea.com
 
 - publisher:
     name: 'functest-kubernetes-amd64-recipients'
index 7f8dd8d..46b77ce 100644 (file)
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
+      - hunter: &hunter
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
+      - gambia: &gambia
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
       - fraser: &fraser
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
index 9a12305..e9a65b4 100644 (file)
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
+      - hunter:
+          branch: 'stable/{stream}'
+          gs-pathname: ''
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          gs-pathname: ''
+          disabled: false
       - fraser: &fraser
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
     wrappers:
       - ssh-agent-wrapper
       - build-timeout:
-          timeout: 30
+          timeout: 60
 
     parameters:
       - project-parameter:
diff --git a/jjb/functest/functest-rtd-jobs.yaml b/jjb/functest/functest-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..785bd35
--- /dev/null
@@ -0,0 +1,21 @@
+---
+- project:
+    name: functest-rtd
+    project: functest
+    project-name: functest
+
+    gerrit-skip-vote: true
+    project-pattern: 'functest'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-functest/47369/'
+    rtd-token: '26f9131bd5c337928ba8b431a289f6850b330504'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/functest/functest-virtual.yaml b/jjb/functest/functest-virtual.yaml
new file mode 100644 (file)
index 0000000..c383d7c
--- /dev/null
@@ -0,0 +1,353 @@
+---
+- functest-defaultparameters: &functest-defaultparameters
+    name: 'functest-defaultparameters'
+    repo: opnfv
+    tag:
+      - gambia
+      - hunter
+      - latest
+
+- job-template:
+    name: '{repo}-functest-{container}-{tag}-pull'
+    parameters:
+      - functest-virtual-defaults
+    builders:
+      - shell: sudo docker pull {repo}/functest-{container}:{tag}
+
+- project:
+    name: repo-functest-container-tag-pull
+    <<: *functest-defaultparameters
+    container:
+      - healthcheck
+      - smoke
+      - benchmarking
+      - components
+      - vnf
+      - features
+    jobs:
+      - '{repo}-functest-{container}-{tag}-pull'
+
+- functest-pulljobs: &functest-pulljobs
+    name: 'functest-pulljobs'
+    projects:
+      - name: '{repo}-functest-healthcheck-{tag}-pull'
+      - name: '{repo}-functest-smoke-{tag}-pull'
+      - name: '{repo}-functest-benchmarking-{tag}-pull'
+      - name: '{repo}-functest-components-{tag}-pull'
+      - name: '{repo}-functest-vnf-{tag}-pull'
+      - name: '{repo}-functest-features-{tag}-pull'
+
+- functest-buildparameters: &functest-buildparameters
+    name: 'functest-buildparameters'
+    parameters:
+      - functest-virtual-defaults
+      - string:
+          name: branch
+          default: master
+
+- job-template:
+    name: '{repo}-functest-{container}-{test}-{tag}-run'
+    parameters:
+      - functest-virtual-defaults
+    builders:
+      - shell: |
+          sudo docker run --rm \
+            -e EXTERNAL_NETWORK=public1 \
+            -v /home/opnfv/functest/results/$JOB_NAME-$BUILD_ID:\
+          /home/opnfv/functest/results \
+            -v /home/opnfv/functest/openstack.creds:\
+          /home/opnfv/functest/conf/env_file \
+            -v /home/opnfv/functest/images:/home/opnfv/functest/images \
+            -v /home/opnfv/functest/tempest_blacklist.yaml:\
+          /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/\
+          tempest/custom_tests/blacklist.yaml \
+            -v /home/opnfv/functest/rally_blacklist.yaml:\
+          /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/\
+          rally/blacklist.yaml \
+            {repo}/functest-{container}:{tag} run_tests -t {test}
+
+- project:
+    name: repo-functest-healthcheck-test-tag-run
+    <<: *functest-defaultparameters
+    container: healthcheck
+    test:
+      - connection_check
+      - tenantnetwork1
+      - tenantnetwork2
+      - vmready1
+      - vmready2
+      - singlevm1
+      - singlevm2
+      - vping_ssh
+      - vping_userdata
+      - cinder_test
+      - odl
+      - api_check
+      - snaps_health_check
+    jobs:
+      - '{repo}-functest-{container}-{test}-{tag}-run'
+
+- functest-healthcheckjobs: &functest-healthcheckjobs
+    name: 'functest-healthcheckjobs'
+    projects:
+      - name: '{repo}-functest-healthcheck-connection_check-{tag}-run'
+      - name: '{repo}-functest-healthcheck-tenantnetwork1-{tag}-run'
+      - name: '{repo}-functest-healthcheck-tenantnetwork2-{tag}-run'
+      - name: '{repo}-functest-healthcheck-vmready1-{tag}-run'
+      - name: '{repo}-functest-healthcheck-vmready2-{tag}-run'
+      - name: '{repo}-functest-healthcheck-singlevm1-{tag}-run'
+      - name: '{repo}-functest-healthcheck-singlevm2-{tag}-run'
+      - name: '{repo}-functest-healthcheck-vping_ssh-{tag}-run'
+      - name: '{repo}-functest-healthcheck-vping_userdata-{tag}-run'
+      - name: '{repo}-functest-healthcheck-cinder_test-{tag}-run'
+      - name: '{repo}-functest-healthcheck-odl-{tag}-run'
+      - name: '{repo}-functest-healthcheck-api_check-{tag}-run'
+      - name: '{repo}-functest-healthcheck-snaps_health_check-{tag}-run'
+
+- project:
+    name: repo-functest-smoke-test-tag-run
+    <<: *functest-defaultparameters
+    container: smoke
+    test:
+      - tempest_smoke
+      - neutron-tempest-plugin-api
+      - rally_sanity
+      - rally_jobs
+      - refstack_defcore
+      - patrole
+      - snaps_smoke
+      - neutron_trunk
+      - networking-bgpvpn
+      - networking-sfc
+      - barbican
+    jobs:
+      - '{repo}-functest-{container}-{test}-{tag}-run'
+
+- functest-smokejobs: &functest-smokejobs
+    name: 'functest-smokejobs'
+    projects:
+      - name: '{repo}-functest-smoke-tempest_smoke-{tag}-run'
+      - name: '{repo}-functest-smoke-neutron-tempest-plugin-api-{tag}-run'
+      - name: '{repo}-functest-smoke-rally_sanity-{tag}-run'
+      - name: '{repo}-functest-smoke-rally_jobs-{tag}-run'
+      - name: '{repo}-functest-smoke-refstack_defcore-{tag}-run'
+      - name: '{repo}-functest-smoke-patrole-{tag}-run'
+      - name: '{repo}-functest-smoke-snaps_smoke-{tag}-run'
+      - name: '{repo}-functest-smoke-neutron_trunk-{tag}-run'
+      - name: '{repo}-functest-smoke-networking-bgpvpn-{tag}-run'
+      - name: '{repo}-functest-smoke-networking-sfc-{tag}-run'
+      - name: '{repo}-functest-smoke-barbican-{tag}-run'
+
+- project:
+    name: repo-functest-benchmarking-test-tag-run'
+    <<: *functest-defaultparameters
+    container: benchmarking
+    test:
+      - vmtp
+      - shaker
+    jobs:
+      - '{repo}-functest-{container}-{test}-{tag}-run'
+
+- functest-benchmarkingjobs: &functest-benchmarkingjobs
+    name: 'functest-benchmarkingjobs'
+    projects:
+      - name: '{repo}-functest-benchmarking-vmtp-{tag}-run'
+      - name: '{repo}-functest-benchmarking-shaker-{tag}-run'
+
+- project:
+    name: repo-functest-components-test-tag-run
+    <<: *functest-defaultparameters
+    container: components
+    test:
+      - tempest_full
+      - tempest_scenario
+    jobs:
+      - '{repo}-functest-{container}-{test}-{tag}-run'
+
+- functest-componentsjobs: &functest-componentsjobs
+    name: 'functest-componentsjobs'
+    projects:
+      - name: '{repo}-functest-components-tempest_full-{tag}-run'
+      - name: '{repo}-functest-components-tempest_scenario-{tag}-run'
+
+- project:
+    name: repo-functest-vnf-{test}-{tag}-run
+    <<: *functest-defaultparameters
+    container: vnf
+    test:
+      - cloudify
+      - cloudify_ims
+      - heat_ims
+      - vyos_vrouter
+      - juju_epc
+    jobs:
+      - '{repo}-functest-{container}-{test}-{tag}-run'
+
+- functest-vnfjobs: &functest-vnfjobs
+    name: 'functest-vnfjobs'
+    projects:
+      - name: '{repo}-functest-vnf-cloudify-{tag}-run'
+      - name: '{repo}-functest-vnf-cloudify_ims-{tag}-run'
+      - name: '{repo}-functest-vnf-heat_ims-{tag}-run'
+      - name: '{repo}-functest-vnf-vyos_vrouter-{tag}-run'
+      - name: '{repo}-functest-vnf-juju_epc-{tag}-run'
+
+- project:
+    name: repo-functest-features-test-tag-run
+    <<: *functest-defaultparameters
+    container: features
+    test:
+      - doctor-notification
+      - bgpvpn
+      - functest-odl-sfc
+      - barometercollectd
+      - vgpu
+      - stor4nfv_os
+    jobs:
+      - '{repo}-functest-{container}-{test}-{tag}-run'
+
+- functest-featuresjobs: &functest-featuresjobs
+    name: 'functest-featuresjobs'
+    projects:
+      - name: '{repo}-functest-features-doctor-notification-{tag}-run'
+      - name: '{repo}-functest-features-bgpvpn-{tag}-run'
+      - name: '{repo}-functest-features-functest-odl-sfc-{tag}-run'
+      - name: '{repo}-functest-features-barometercollectd-{tag}-run'
+      - name: '{repo}-functest-features-vgpu-{tag}-run'
+      - name: '{repo}-functest-features-stor4nfv_os-{tag}-run'
+
+- job-template:
+    name: '{repo}-functest-{tag}-daily'
+    <<: *functest-defaultparameters
+    project-type: multijob
+    parameters:
+      - functest-virtual-defaults
+    builders:
+      - multijob:
+          name: pull containers
+          <<: *functest-pulljobs
+      - multijob:
+          name: healthcheck
+          <<: *functest-healthcheckjobs
+      - multijob:
+          name: smoke
+          <<: *functest-smokejobs
+      - multijob:
+          name: benchmarking
+          <<: *functest-benchmarkingjobs
+      - multijob:
+          name: components
+          <<: *functest-componentsjobs
+      - multijob:
+          name: vnf
+          <<: *functest-vnfjobs
+      - multijob:
+          name: features
+          <<: *functest-featuresjobs
+
+- project:
+    name: repo-functest-tag-daily
+    <<: *functest-defaultparameters
+    jobs:
+      - '{repo}-functest-{tag}-daily'
+
+- scm:
+    name: functest-gerrit
+    scm:
+      - git:
+          url: https://gerrit.opnfv.org/gerrit/functest
+          refspec: '+refs/changes/*:refs/changes/*'
+          branches:
+            - ${branch}
+
+- job-template:
+    name: '{repo}-functest-{container}-{tag}-build'
+    <<: *functest-buildparameters
+    scm:
+      - functest-gerrit
+    builders:
+      - shell: |
+          case "{container}" in
+          "components")
+              args="" ;;
+          *)
+              args="--build-arg BRANCH=${{branch}}" ;;
+          esac
+          cd docker/$(echo {container} |cut -d\- -f 2)
+          sudo docker build \
+            ${{args}} --pull=false --no-cache --force-rm=true \
+            -t {repo}/functest-{container}:{tag} .
+
+- project:
+    name: repo-functest-container-tag-build
+    <<: *functest-defaultparameters
+    container:
+      - core
+      - tempest
+      - healthcheck
+      - smoke
+      - benchmarking
+      - components
+      - vnf
+      - features
+    jobs:
+      - '{repo}-functest-{container}-{tag}-build'
+
+- functest-projectparameters: &functest-projectparameters
+    name: 'functest-projectparameters'
+    current-parameters: true
+
+- job-template:
+    name: '{repo}-functest-{tag}-gate'
+    project-type: multijob
+    <<: *functest-buildparameters
+    builders:
+      - multijob:
+          name: build functest-core
+          projects:
+            - name: '{repo}-functest-core-{tag}-build'
+              <<: *functest-projectparameters
+      - multijob:
+          name: build functest-tempest
+          projects:
+            - name: '{repo}-functest-tempest-{tag}-build'
+              <<: *functest-projectparameters
+      - multijob:
+          name: build all remaining contrainers
+          projects:
+            - name: '{repo}-functest-healthcheck-{tag}-build'
+              <<: *functest-projectparameters
+            - name: '{repo}-functest-smoke-{tag}-build'
+              <<: *functest-projectparameters
+            - name: '{repo}-functest-benchmarking-{tag}-build'
+              <<: *functest-projectparameters
+            - name: '{repo}-functest-components-{tag}-build'
+              <<: *functest-projectparameters
+            - name: '{repo}-functest-vnf-{tag}-build'
+              <<: *functest-projectparameters
+            - name: '{repo}-functest-features-{tag}-build'
+              <<: *functest-projectparameters
+      - multijob:
+          name: healthcheck
+          <<: *functest-healthcheckjobs
+      - multijob:
+          name: smoke
+          <<: *functest-smokejobs
+      - multijob:
+          name: benchmarking
+          <<: *functest-benchmarkingjobs
+      - multijob:
+          name: components
+          <<: *functest-componentsjobs
+      - multijob:
+          name: vnf
+          <<: *functest-vnfjobs
+      - multijob:
+          name: features
+          <<: *functest-featuresjobs
+
+- project:
+    name: repo-functest-tag-gate
+    <<: *functest-defaultparameters
+    jobs:
+      - '{repo}-functest-{tag}-gate'
index 251c395..88416bb 100644 (file)
       - master:
           branch: '{stream}'
           disabled: false
+      - hunter:
+          branch: 'stable/{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
       - fraser:
           branch: 'stable/{stream}'
           disabled: false
           name: SLAVE_LABEL
           default: 'opnfv-build-ubuntu'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: PROJECT
           default: "{project}"
           name: SLAVE_LABEL
           default: '{slave_label}'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
index e109387..239ae9f 100644 (file)
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
+      - hunter: &hunter
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
+      - gambia: &gambia
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
       - fraser: &fraser
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
diff --git a/jjb/global-jjb b/jjb/global-jjb
deleted file mode 160000 (submodule)
index 9b24045..0000000
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 9b240453b91f3c4b0844ea1c593721a1c421caa2
diff --git a/jjb/global-jjb/jjb b/jjb/global-jjb/jjb
new file mode 120000 (symlink)
index 0000000..23c884f
--- /dev/null
@@ -0,0 +1 @@
+../../global-jjb/jjb
\ No newline at end of file
diff --git a/jjb/global-jjb/shell b/jjb/global-jjb/shell
new file mode 120000 (symlink)
index 0000000..d37c43f
--- /dev/null
@@ -0,0 +1 @@
+../../global-jjb/shell
\ No newline at end of file
index a8b9cff..d2c2711 100644 (file)
@@ -13,7 +13,7 @@
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
-      - fraser:
+      - gambia:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
index 2e94767..e584107 100644 (file)
 
     node: master
 
+    # Defaults for global-jjb jobs
+    build-timeout: 60
+    build-node: 'opnfv-build'
+    gerrit-server-name: 'gerrit.opnfv.org'
+    jenkins-ssh-credential: 'd42411ac011ad6f3dd2e1fa34eaa5d87f910eb2e'
+    lftools-version: '<1.0.0'
+
     properties:
       - logrotate-default
 
index b282cff..fe24ed5 100644 (file)
       - shell: |
           #!/bin/bash
           # Install python package
-          sudo pip install "flake8==2.6.2"
+          sudo -H pip install "flake8==2.6.2"
 
           echo "Checking python code..."
           for f in $(egrep '\.py$' modified_files)
       - shell: |
           #!/bin/bash
           # sudo Install python packages
-          sudo pip install "yamllint==1.8.2"
+          sudo -H pip install "yamllint==1.8.2"
 
           echo "Checking yaml file..."
           for f in $(egrep '\.ya?ml$' modified_files)
       - email-ext:
           <<: *email_ptl_defaults
           recipients: >
-            gelkinbard@mirantis.com
+            Alexandru.Avadanii@enea.com
 
 - publisher:
     name: 'email-functest-ptl'
       - email-ext:
           <<: *email_ptl_defaults
           recipients: >
-            fatih.degirmenci@ericsson.com
+            tbramwell@linuxfoundation.org
 - publisher:
     name: 'email-releng-anteater-ptl'
     <<: *email_releng_ptl_defaults
     <<: *email_releng_ptl_defaults
 - publisher:
     name: 'email-releng-xci-ptl'
-    <<: *email_releng_ptl_defaults
+    publishers:
+      - email-ext:
+          <<: *email_ptl_defaults
+          recipients: >
+            fatih.degirmenci@ericsson.com
 
 - publisher:
     name: 'email-samplevnf-ptl'
index 9d8bed4..30ecd2a 100644 (file)
       - label:
           name: SLAVE_LABEL
           default: 'apex-baremetal-master'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+      - string:
+          name: SSH_KEY
+          default: /root/.ssh/id_rsa
+          description: 'SSH key to use for Apex'
+
+- parameter:
+    name: 'apex-baremetal-gambia-defaults'
+    parameters:
+      - label:
+          name: SLAVE_LABEL
+          default: 'apex-baremetal-master'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -25,6 +44,8 @@
       - label:
           name: SLAVE_LABEL
           default: 'apex-baremetal-master'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -40,6 +61,8 @@
       - label:
           name: SLAVE_LABEL
           default: 'apex-baremetal-master'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -55,6 +78,8 @@
       - label:
           name: SLAVE_LABEL
           default: 'apex-baremetal-danube'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'apex-virtual-master'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+      - string:
+          name: SSH_KEY
+          default: /root/.ssh/id_rsa
+          description: 'SSH key to use for Apex'
+
+- parameter:
+    name: 'apex-virtual-gambia-defaults'
+    parameters:
+      - label:
+          name: SLAVE_LABEL
+          default: 'apex-virtual-master'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'apex-virtual-master'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'apex-virtual-master'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'apex-virtual-danube'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'apex-baremetal'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'compass-baremetal'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'compass-baremetal-master'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'compass-baremetal-branch'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'fuel-baremetal'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'armband-baremetal'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           description: 'Git URL to use on this Jenkins Slave'
+
+- parameter:
+    name: 'auto-baremetal-defaults'
+    parameters:
+      - label:
+          name: SLAVE_LABEL
+          default: 'auto-baremetal'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
-          name: LAB_CONFIG_URL
-          default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
-          description: 'Base URI to the configuration directory'
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
 
 - parameter:
     name: 'joid-baremetal-defaults'
       - label:
           name: SLAVE_LABEL
           default: 'joid-baremetal'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'daisy-baremetal'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: INSTALLER_IP
           default: '10.20.7.3'
       - label:
           name: SLAVE_LABEL
           default: 'apex-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'compass-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+
+- parameter:
+    name: 'compass-arm-virtual-defaults'
+    parameters:
+      - label:
+          name: SLAVE_LABEL
+          default: 'compass-arm-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'compass-virtual-master'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'compass-virtual-branch'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'fuel-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'armband-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           description: 'Git URL to use on this Jenkins Slave'
-      - string:
-          name: LAB_CONFIG_URL
-          default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
-          description: 'Base URI to the configuration directory'
 
 - parameter:
     name: 'joid-virtual-defaults'
       - label:
           name: SLAVE_LABEL
           default: 'joid-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'daisy-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: INSTALLER_IP
           default: '10.20.11.2'
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           description: 'Git URL to use on this Jenkins Slave'
 
+- parameter:
+    name: 'functest-virtual-defaults'
+    parameters:
+      - label:
+          name: SLAVE_LABEL
+          default: 'functest-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
+
 #####################################################
 # Parameters for build slaves
 #####################################################
       - label:
           name: SLAVE_LABEL
           default: 'opnfv-build-centos'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           name: SLAVE_LABEL
           default: 'opnfv-build-ubuntu'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
     parameters:
       - label:
           name: SLAVE_LABEL
-          default: 'pharos-dashboard'
+          default: 'pharos-dashboard2'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           name: SLAVE_LABEL
           default: 'opnfv-build'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+      - string:
+          name: BUILD_DIRECTORY
+          default: $WORKSPACE/build_output
+          description: "Directory where the build artifact will be located upon the completion of the build."
+
+- parameter:
+    name: 'lf-build2-defaults'
+    parameters:
+      - label:
+          name: SLAVE_LABEL
+          default: 'lf-build2'
+          description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           name: SLAVE_LABEL
           default: 'ericsson-build3'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           description: 'Git URL to use on this Jenkins Slave'
 
+- parameter:
+    name: 'ericsson-build4-defaults'
+    parameters:
+      - label:
+          name: SLAVE_LABEL
+          default: 'ericsson-build4'
+          description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+      - string:
+          name: BUILD_DIRECTORY
+          default: $WORKSPACE/build_output
+          description: "Directory where the build artifact will be located upon the completion of the build."
+
 - parameter:
     name: 'huawei-build-defaults'
     parameters:
           name: SLAVE_LABEL
           default: 'opnfv-build-ubuntu-arm'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'huawei-test'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'huawei-test'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'huawei-virtual5'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'ericsson-virtual5'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://git.opendaylight.org/gerrit/p/$PROJECT.git
       - label:
           name: SLAVE_LABEL
           default: 'ericsson-virtual12'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'ericsson-virtual13'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'ericsson-virtual-pod1bl01'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'odl-netvirt-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - label:
           name: SLAVE_LABEL
           default: 'odl-netvirt-virtual-intel'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           name: SLAVE_LABEL
           default: 'sandbox-baremetal'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           name: SLAVE_LABEL
           default: 'sandbox-virtual'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           name: SLAVE_LABEL
           default: 'dummy-pod1'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
diff --git a/jjb/ipv6/ipv6-rtd-jobs.yaml b/jjb/ipv6/ipv6-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..edaa1c3
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: ipv6-rtd
+    project: ipv6
+    project-name: ipv6
+
+    project-pattern: 'ipv6'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-ipv6/47370/'
+    rtd-token: 'e7abb4e2c3f3f0dfc1a8feefe39b27f4a4f9b98a'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 2719c72..1766d61 100644 (file)
     fraser: &fraser
       stream: fraser
       branch: 'stable/{stream}'
-      disabled: false
+      disabled: true
       gs-pathname: '/{stream}'
     master: &master
       stream: master
       branch: '{stream}'
-      disabled: false
+      disabled: true
       gs-pathname: ''
     # -------------------------------
     # POD, INSTALLER, AND BRANCH MAPPING
@@ -97,7 +97,7 @@
           use-build-blocker: true
           blocking-jobs:
             - 'joid-(os|k8)-.*?-{pod}-daily-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - build-name:
           use-build-blocker: true
           blocking-jobs:
             - 'joid-deploy-{pod}-daily-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     wrappers:
       - build-name:
diff --git a/jjb/joid/joid-rtd-jobs.yaml b/jjb/joid/joid-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..14d171c
--- /dev/null
@@ -0,0 +1,12 @@
+---
+- project:
+    name: joid-rtd
+    project: joid
+    project-name: joid
+
+    project-pattern: 'joid'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-joid/47371/'
+    rtd-token: '44a666161d99c9f53e7f69e56f1704694502015b'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 2d1c7c9..d7cdec6 100644 (file)
       - master:
           branch: '{stream}'
           gs-pathname: ''
-          disabled: false
+          disabled: true
       - fraser: &fraser
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
-          disabled: false
+          disabled: true
     #####################################
     # patch verification phases
     #####################################
@@ -57,7 +57,7 @@
           blocking-jobs:
             - 'joid-verify-master'
             - 'joid-verify-danube'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     scm:
       - git-scm-gerrit
           blocking-jobs:
             - 'joid-verify-deploy-.*'
             - 'joid-verify-test-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     scm:
       - git-scm-gerrit
diff --git a/jjb/kvmfornfv/kvmfornfv-rtd-jobs.yaml b/jjb/kvmfornfv/kvmfornfv-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..7d0b925
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: kvmfornfv-rtd
+    project: kvmfornfv
+    project-name: kvmfornfv
+
+    gerrit-skip-vote: true
+    project-pattern: 'kvmfornfv'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-kvmfornfv/47372/'
+    rtd-token: '32ae6f0ad54181a27fd38d99821a021f5087554a'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/moon/moon-rtd-jobs.yaml b/jjb/moon/moon-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..9d5f3c3
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: moon-rtd
+    project: moon
+    project-name: moon
+
+    gerrit-skip-vote: true
+    project-pattern: 'moon'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-moon/47373/'
+    rtd-token: 'f4dc698744431a8688a58746f84fc5d7bee694b2'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/multisite/multisite-rtd-jobs.yaml b/jjb/multisite/multisite-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..4e7f9e5
--- /dev/null
@@ -0,0 +1,12 @@
+---
+- project:
+    name: multisite-rtd
+    project: multisite
+    project-name: multisite
+
+    project-pattern: 'multisite'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-multisite/47374/'
+    rtd-token: '5d5b5c20e10df51eec38593b76d96cd22d4f1a96'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/netready/netready-rtd-jobs.yaml b/jjb/netready/netready-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..ce46b33
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: netready-rtd
+    project: netready
+    project-name: netready
+
+    gerrit-skip-vote: true
+    project-pattern: 'netready'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-netready/47375/'
+    rtd-token: '3defd5720db87d5fd6487702e2778053b9279c28'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/nfvbench/nfvbench-rtd-jobs.yaml b/jjb/nfvbench/nfvbench-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..5ff9483
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: nfvbench-rtd
+    project: nfvbench
+    project-name: nfvbench
+
+    project-pattern: 'nfvbench'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-nfvbench/47376/'
+    rtd-token: '7d77b15615ffe7906f1f20e245c80dc0a0f97e47'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 44e85d2..ec6776f 100644 (file)
@@ -14,7 +14,7 @@
           gs-pathname: ''
           docker-tag: 'latest'
           disabled: false
-      - fraser: &fraser
+      - gambia:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           docker-tag: 'stable'
diff --git a/jjb/octopus/octopus-rtd-jobs.yaml b/jjb/octopus/octopus-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..068ff3b
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: octopus-rtd
+    project: octopus
+    project-name: octopus
+
+    gerrit-skip-vote: true
+    project-pattern: 'octopus'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-octopus/47377/'
+    rtd-token: 'b3a1784dc55db91f38ea54cb181a0e4551221349'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/onosfw/onosfw-rtd-jobs.yaml b/jjb/onosfw/onosfw-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..a7a5e73
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: onosfw-rtd
+    project: onosfw
+    project-name: onosfw
+
+    gerrit-skip-vote: true
+    project-pattern: 'onosfw'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-onosfw/47378/'
+    rtd-token: '1ad406bcdf2d627e2e18fbcd6605f3456b05bb3d'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/openci/create-ane.sh b/jjb/openci/create-ane.sh
deleted file mode 100755 (executable)
index 8a4da8f..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# This script creates ArtifactPublishedEvent
-# The JMS Messaging Plugin doesn't handle the newlines well so the eventBody is
-# constructed on a single line. This is something that needs to be fixed properly
-
-cat << EOF > $WORKSPACE/event.properties
-type=$PUBLISH_EVENT_TYPE
-origin=$PUBLISH_EVENT_ORIGIN
-eventBody="{ 'type': '$PUBLISH_EVENT_TYPE', 'id': '$(uuidgen)', 'time': '$(date -u +%Y-%m-%d_%H:%M:%SUTC)', 'origin': '$PUBLISH_EVENT_ORIGIN', 'buildUrl': '$BUILD_URL', 'branch': 'master', 'artifactLocation': '$ARTIFACT_LOCATION', 'confidenceLevel': { $CONFIDENCE_LEVEL } }"
-EOF
-echo "Constructed $PUBLISH_EVENT_TYPE"
-echo "--------------------------------------------"
-cat $WORKSPACE/event.properties
-echo "--------------------------------------------"
diff --git a/jjb/openci/create-ape.sh b/jjb/openci/create-ape.sh
new file mode 100755 (executable)
index 0000000..7c9b46c
--- /dev/null
@@ -0,0 +1,47 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 Ericsson AB and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+set -o errexit
+set -o nounset
+set -o pipefail
+
+# workaround for https://github.com/pypa/virtualenv/issues/1029
+export PS1=${PS1:-}
+
+# This script creates ArtifactPublishedEvent
+
+git clone https://gitlab.openci.io/openci/prototypes.git
+cd prototypes/federated-cicd
+virtualenv openci_publish
+cd openci_publish
+source bin/activate
+python setup.py install
+
+# generate event body
+cat <<EOF > ./json_body.txt
+{
+    "type": "$PUBLISH_EVENT_TYPE",
+    "id": "$(uuidgen)",
+    "time": "$(date -u +%Y-%m-%d_%H:%M:%SUTC)",
+    "buildUrl": "$BUILD_URL",
+    "branch": "master",
+    "origin": "$PUBLISH_EVENT_ORIGIN",
+    "artifactLocation": "$ARTIFACT_LOCATION",
+    "confidenceLevel": "$CONFIDENCE_LEVEL"
+}
+EOF
+
+echo "Constructed $PUBLISH_EVENT_TYPE"
+echo "--------------------------------------------"
+cat  ./json_body.txt
+echo "--------------------------------------------"
+
+python openci_publish -H 129.192.69.55 -U ${ACTIVEMQ_USER} -p ${ACTIVEMQ_PASSWORD} -n openci.prototype -B ./json_body.txt
+
+deactivate
index 410db50..9780119 100755 (executable)
@@ -11,17 +11,37 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
+export PS1=${PS1:-}
+
 # This script creates CompositionDefinedEvent
-# The JMS Messaging Plugin doesn't handle the newlines well so the eventBody is
-# constructed on a single line. This is something that needs to be fixed properly
 
-cat << EOF > $WORKSPACE/event.properties
-type=$PUBLISH_EVENT_TYPE
-origin=$PUBLISH_EVENT_ORIGIN
-scenario=$DEPLOY_SCENARIO
-eventBody="{ 'type': '$PUBLISH_EVENT_TYPE', 'id': '$(uuidgen)', 'time': '$(date -u +%Y-%m-%d_%H:%M:%SUTC)', 'origin': '$PUBLISH_EVENT_ORIGIN', 'buildUrl': '$BUILD_URL', 'branch': 'master', 'compositionName': '$DEPLOY_SCENARIO', 'compositionMetadataUrl': '$SCENARIO_METADATA_LOCATION' }"
+git clone https://gitlab.openci.io/openci/prototypes.git
+cd prototypes/federated-cicd
+virtualenv openci_publish
+cd openci_publish
+source bin/activate
+python setup.py install
+
+# generate event body
+cat <<EOF > ./json_body.txt
+{
+    "type": "$PUBLISH_EVENT_TYPE",
+    "id": "$(uuidgen)",
+    "time": "$(date -u +%Y-%m-%d_%H:%M:%SUTC)",
+    "buildUrl": "$BUILD_URL",
+    "branch": "master",
+    "origin": "$PUBLISH_EVENT_ORIGIN",
+    "scenario": "$DEPLOY_SCENARIO",
+    "compositionName": "$DEPLOY_SCENARIO",
+    "compositionMetadataUrl": "$SCENARIO_METADATA_LOCATION"
+}
 EOF
+
 echo "Constructed $PUBLISH_EVENT_TYPE"
 echo "--------------------------------------------"
-cat $WORKSPACE/event.properties
+cat  ./json_body.txt
 echo "--------------------------------------------"
+
+python openci_publish -H 129.192.69.55 -U ${ACTIVEMQ_USER} -p ${ACTIVEMQ_PASSWORD} -n openci.prototype -B ./json_body.txt
+
+deactivate
index 5e8ee10..2ece019 100755 (executable)
@@ -11,17 +11,38 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
+export PS1=${PS1:-}
+
 # This script creates ConfidenceLevelModifiedEvent
-# The JMS Messaging Plugin doesn't handle the newlines well so the eventBody is
-# constructed on a single line. This is something that needs to be fixed properly
 
-cat << EOF > $WORKSPACE/event.properties
-type=$PUBLISH_EVENT_TYPE
-origin=$PUBLISH_EVENT_ORIGIN
-scenario=$DEPLOY_SCENARIO
-eventBody="{ 'type': '$PUBLISH_EVENT_TYPE', 'id': '$(uuidgen)', 'time': '$(date -u +%Y-%m-%d_%H:%M:%SUTC)', 'origin': '$PUBLISH_EVENT_ORIGIN', 'buildUrl': '$BUILD_URL', 'branch': 'master', 'compositionName': '$DEPLOY_SCENARIO', 'compositionMetadataUrl': '$SCENARIO_METADATA_LOCATION', 'confidenceLevel': { $CONFIDENCE_LEVEL } }"
+git clone https://gitlab.openci.io/openci/prototypes.git
+cd prototypes/federated-cicd
+virtualenv openci_publish
+cd openci_publish
+source bin/activate
+python setup.py install
+
+# generate event body
+cat <<EOF > ./json_body.txt
+{
+    "type": "$PUBLISH_EVENT_TYPE",
+    "id": "$(uuidgen)",
+    "time": "$(date -u +%Y-%m-%d_%H:%M:%SUTC)",
+    "buildUrl": "$BUILD_URL",
+    "branch": "master",
+    "origin": "$PUBLISH_EVENT_ORIGIN",
+    "scenario": "$DEPLOY_SCENARIO",
+    "compositionName": "$DEPLOY_SCENARIO",
+    "compositionMetadataUrl": "$SCENARIO_METADATA_LOCATION",
+    "confidenceLevel": "$CONFIDENCE_LEVEL"
+}
 EOF
+
 echo "Constructed $PUBLISH_EVENT_TYPE"
 echo "--------------------------------------------"
-cat $WORKSPACE/event.properties
+cat ./json_body.txt
 echo "--------------------------------------------"
+
+python openci_publish -H 129.192.69.55 -U ${ACTIVEMQ_USER} -p ${ACTIVEMQ_PASSWORD} -n openci.prototype -B ./json_body.txt
+
+deactivate
index d80fead..bdaca57 100644 (file)
 
     builders:
       - shell:
-          !include-raw-escape: ./create-ane.sh
-      - inject:
-          properties-file: "$WORKSPACE/event.properties"
-
-    publishers:
-      - jms-messaging:
-          provider-name: openci.activemq
-          msg-type: Custom
-          msg-props: |
-            type=$type
-            origin=$origin
-          msg-content:
-            $eventBody
+          !include-raw-escape: ./create-ape.sh
 
 # This job gets triggered by a ConfidenceLevelModifiedEvent published
 # by OPNFV jobs so ODL can promote the autorelease artifact even further.
     triggers:
       - jms-messaging:
           provider-name: openci.activemq
-          selector: CI_TYPE = 'custom'
-          checks:
-            - field: origin
-              expected-value: 'OPNFV'
-            - field: type
-              expected-value: 'ConfidenceLevelModifiedEvent'
-            - field: scenario
-              expected-value: 'os-odl-nofeature'
+          selector: |
+            JMSType = 'ConfidenceLevelModifiedEvent' and JMSOrigin = 'OPNFV' and JMSScenario = 'os-odl-nofeature-ha'
 
     builders:
       - shell: |
index 28c3e69..88589d8 100644 (file)
 
     builders:
       - shell:
-          !include-raw-escape: ./create-ane.sh
-      - inject:
-          properties-file: "$WORKSPACE/event.properties"
-
-    publishers:
-      - jms-messaging:
-          provider-name: openci.activemq
-          msg-type: Custom
-          msg-props: |
-            type=$type
-            origin=$origin
-          msg-content:
-            $eventBody
+          !include-raw-escape: ./create-ape.sh
 
 # This job gets triggered by a ConfidenceLevelModifiedEvent published
 # by OPNFV jobs so ONAP can promote the autorelease artifact even further.
     triggers:
       - jms-messaging:
           provider-name: openci.activemq
-          selector: CI_TYPE = 'custom'
-          checks:
-            - field: origin
-              expected-value: 'OPNFV'
-            - field: type
-              expected-value: 'ConfidenceLevelModifiedEvent'
-            - field: scenario
-              expected-value: 'k8-nosdn-onap'
+          selector: |
+            JMSType = 'ConfidenceLevelModifiedEvent' and JMSOrigin = 'OPNFV' and JMSScenario = 'k8-nosdn-onap-ha'
 
     builders:
       - shell: |
index cb15014..020171b 100644 (file)
           name: SCENARIO_METADATA_LOCATION
           default: https://url/to/scenario/metadata/on/opnfv/artifact/repo/$BUILD_NUMBER
           description: 'The location of the scenario metadata'
-      - 'opnfv-build-defaults'
+      - label:
+          name: SLAVE_LABEL
+          default: 'xci-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
+
+    wrappers:
+      - credentials-binding:
+          - username-password-separated:
+              credential-id: openci-connect-activemq
+              username: ACTIVEMQ_USER
+              password: ACTIVEMQ_PASSWORD
+      - workspace-cleanup
 
     triggers:
       - jms-messaging:
           provider-name: openci.activemq
-          selector: CI_TYPE = 'custom'
-          checks:
-            - field: origin
-              expected-value: '{origin}'
-            - field: type
-              expected-value: 'ArtifactPublishedEvent'
+          selector: JMSType = 'ArtifactPublishedEvent' and JMSOrigin = '{origin}'
 
     builders:
       - shell: |
           echo "----------------------------------"
       - shell:
           !include-raw-escape: ./create-cde.sh
-      - inject:
-          properties-file: "$WORKSPACE/event.properties"
-
-    publishers:
-      - jms-messaging:
-          provider-name: openci.activemq
-          msg-type: Custom
-          msg-props: |
-            type=$type
-            origin=$origin
-            scenario=$scenario
-          msg-content:
-            $eventBody
 
 - job-template:
     name: 'openci-opnfv-{scenario}-test-daily-{stream}'
           name: CONFIDENCE_LEVEL
           default: "'opnfvdaily': 'SUCCESS'"
           description: 'The confidence level the published artifact gained'
-      - 'opnfv-build-defaults'
+      - label:
+          name: SLAVE_LABEL
+          default: 'xci-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
+
+    wrappers:
+      - credentials-binding:
+          - username-password-separated:
+              credential-id: openci-connect-activemq
+              username: ACTIVEMQ_USER
+              password: ACTIVEMQ_PASSWORD
+      - workspace-cleanup
 
     triggers:
       - jms-messaging:
           provider-name: openci.activemq
-          selector: CI_TYPE = 'custom'
-          checks:
-            - field: origin
-              expected-value: 'OPNFV'
-            - field: type
-              expected-value: 'CompositionDefinedEvent'
-            - field: scenario
-              expected-value: '{scenario}'
+          selector: JMSType = 'CompositionDefinedEvent' and JMSOrigin = 'OPNFV' and JMSScenario = '{scenario}'
 
     builders:
       - shell: |
           echo "----------------------------------"
       - shell:
           !include-raw-escape: ./create-clme.sh
-      - inject:
-          properties-file: "$WORKSPACE/event.properties"
-
-    publishers:
-      - jms-messaging:
-          provider-name: openci.activemq
-          msg-type: Custom
-          msg-props: |
-            type=$type
-            origin=$origin
-            scenario=$scenario
-          msg-content:
-            $eventBody
diff --git a/jjb/opera/opera-rtd-jobs.yaml b/jjb/opera/opera-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..b923c9f
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: opera-rtd
+    project: opera
+    project-name: opera
+
+    gerrit-skip-vote: true
+    project-pattern: 'opera'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-opera/47379/'
+    rtd-token: '4dc678d2d96adf6376b4f8f9aafdbbeb24dbb006'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index ece8569..a8d29e4 100644 (file)
@@ -8,12 +8,12 @@
     stream:
       - master:
           branch: 'master'
+      - gambia:
+          branch: 'stable/{stream}'
       - fraser:
           branch: 'stable/{stream}'
       - danube:
           branch: 'stable/{stream}'
-      - euphrates:
-          branch: 'stable/{stream}'
 
     project: 'opnfvdocs'
     rtdproject: 'opnfv'
@@ -29,6 +29,8 @@
           name: SLAVE_LABEL
           default: 'lf-build1'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
@@ -60,6 +62,8 @@
           name: SLAVE_LABEL
           default: 'lf-build2'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
     triggers:
       - gerrit-trigger-patchset-created:
           server: 'gerrit.opnfv.org'
-          project: '**'
+          project: 'opnfvdocs'
           branch: '{branch}'
-          files: 'docs/**/*.*'
+          files: 'docs/**'
 
     builders:
       - shell: |
-          if [ "$GERRIT_PROJECT" != "opnfvdocs" ]; then
-              cd docs/submodules/$GERRIT_PROJECT
-              git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
-          else
-              git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
-          fi
-      - shell: |
-          sudo pip install virtualenv
+          sudo -H pip install virtualenv
           virtualenv $WORKSPACE/venv
           . $WORKSPACE/venv/bin/activate
           pip install --upgrade pip
index 5f08f69..a974573 100644 (file)
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
-      - fraser: &fraser
+      - gambia:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
-      - euphrates:
+      - fraser:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
diff --git a/jjb/opnfvtsc/opnfvtsc-rtd-jobs.yaml b/jjb/opnfvtsc/opnfvtsc-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..b1923f4
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: opnfvtsc-rtd
+    project: opnfvtsc
+    project-name: opnfvtsc
+
+    gerrit-skip-vote: true
+    project-pattern: 'opnfvtsc'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-opnfvtsc/47400/'
+    rtd-token: 'b8cbc26c46f1b1bd98adbf8c4488787a58d68fdd'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 7c2deae..94f9d8b 100644 (file)
@@ -14,7 +14,7 @@
       stream: master
       branch: '{stream}'
       gs-pathname: ''
-      disabled: false
+      disabled: true
 
     # ------------------------------------------------------
     # POD, INSTALLER, AND BRANCH MAPPING
diff --git a/jjb/orchestra/orchestra-rtd-jobs.yaml b/jjb/orchestra/orchestra-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..d1279eb
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: orchestra-rtd
+    project: orchestra
+    project-name: orchestra
+
+    gerrit-skip-vote: true
+    project-pattern: 'orchestra'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-orchestra/47380/'
+    rtd-token: 'c7f8cb2949a592bf6040c648552b0afa61f7da15'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/ovn4nfv/golang-make-test.sh b/jjb/ovn4nfv/golang-make-test.sh
new file mode 100644 (file)
index 0000000..7ed4632
--- /dev/null
@@ -0,0 +1,25 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 Intel Corporation.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+set -o errexit
+set -o pipefail
+
+source /etc/os-release || source /usr/lib/os-release
+case ${ID,,} in
+    ubuntu|debian)
+    sudo add-apt-repository -y ppa:longsleep/golang-backports
+    sudo apt-get update
+    sudo apt-get install -y build-essential golang-go
+    sudo apt-get -y clean && sudo apt-get -y autoremove
+    ;;
+esac
+
+echo "Running unit tests in Go ${golang_version} ..."
+cd $WORKSPACE
+make test
index ca4cfee..034d6d6 100644 (file)
@@ -8,7 +8,7 @@
       stream: master
       branch: '{stream}'
       gs-pathname: ''
-      disabled: false
+      disabled: true
 
     pod:
       - virtual:
diff --git a/jjb/ovn4nfv/ovn4nfv-k8s-plugins-project-jobs.yaml b/jjb/ovn4nfv/ovn4nfv-k8s-plugins-project-jobs.yaml
new file mode 100644 (file)
index 0000000..cd8d7e0
--- /dev/null
@@ -0,0 +1,109 @@
+---
+- project:
+    name: ovn4nfv-k8s-plugin-project-jobs
+
+    project: 'ovn4nfv-k8s-plugin'
+
+    jobs:
+      - 'ovn4nfv-k8s-plugin-verify-{stream}'
+      - 'ovn4nfv-k8s-plugin-merge-{stream}'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          gs-pathname: ''
+          disabled: false
+
+################################
+# job templates
+################################
+
+- job-template:
+    name: 'ovn4nfv-k8s-plugin-verify-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - opnfv-build-defaults
+
+    scm:
+      - git-scm-gerrit
+
+    triggers:
+      - gerrit:
+          server-name: 'gerrit.opnfv.org'
+          trigger-on:
+            - patchset-created-event:
+                exclude-drafts: 'false'
+                exclude-trivial-rebase: 'false'
+                exclude-no-code-change: 'false'
+            - draft-published-event
+            - comment-added-contains-event:
+                comment-contains-value: 'recheck'
+            - comment-added-contains-event:
+                comment-contains-value: 'reverify'
+          projects:
+            - project-compare-type: 'ANT'
+              project-pattern: '{project}'
+              branches:
+                - branch-compare-type: 'ANT'
+                  branch-pattern: '**/{branch}'
+
+    wrappers:
+      - ssh-agent-wrapper
+      - timeout:
+          timeout: 30
+          fail: true
+
+    builders:
+      - ovn4nfv-k8s-plugin-unit-tests-golang
+
+- job-template:
+    name: 'ovn4nfv-k8s-plugin-merge-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: '{branch}'
+      - opnfv-build-defaults
+
+    scm:
+      - git-scm
+
+    triggers:
+      - gerrit:
+          server-name: 'gerrit.opnfv.org'
+          trigger-on:
+            - change-merged-event
+            - comment-added-contains-event:
+                comment-contains-value: 'remerge'
+          projects:
+            - project-compare-type: 'ANT'
+              project-pattern: '{project}'
+              branches:
+                - branch-compare-type: 'ANT'
+                  branch-pattern: '**/{branch}'
+
+    wrappers:
+      - ssh-agent-wrapper
+      - timeout:
+          timeout: 30
+          fail: true
+
+    builders:
+      - ovn4nfv-k8s-plugin-unit-tests-golang
+
+################################
+# job builders
+################################
+
+- builder:
+    name: ovn4nfv-k8s-plugin-unit-tests-golang
+    builders:
+      - shell: |
+          !include-raw: ./golang-make-test.sh
index 2ce4b6d..8fbd75b 100644 (file)
@@ -9,7 +9,11 @@
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
-      - fraser: &fraser
+      - gambia: &gambia
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
+      - fraser:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
diff --git a/jjb/ovn4nfv/ovn4nfv-rtd-jobs.yaml b/jjb/ovn4nfv/ovn4nfv-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..984ec4f
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: ovn4nfv-rtd
+    project: ovn4nfv
+    project-name: ovn4nfv
+
+    project-pattern: 'ovn4nfv'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-ovn4nfv/47381/'
+    rtd-token: 'f131200fd878a5c443f18c134c3bfda122538bce'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/ovno/ovno-rtd-jobs.yaml b/jjb/ovno/ovno-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..c5d661d
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: ovno-rtd
+    project: ovno
+    project-name: ovno
+
+    gerrit-skip-vote: true
+    project-pattern: 'ovno'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-ovno/47382/'
+    rtd-token: 'd393a62c6ee0b06979d0bb28f0b43e88208ce2c1'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/ovsnfv/ovsnfv-rtd-jobs.yaml b/jjb/ovsnfv/ovsnfv-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..ecbceca
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: ovsnfv-rtd
+    project: ovsnfv
+    project-name: ovsnfv
+
+    gerrit-skip-vote: true
+    project-pattern: 'ovsnfv'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-ovsnfv/47383/'
+    rtd-token: '1faa400aa3da4eca3c6018f8ed0e48a33fa66a00'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/parser/parser-rtd-jobs.yaml b/jjb/parser/parser-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..df880f2
--- /dev/null
@@ -0,0 +1,12 @@
+---
+- project:
+    name: parser-rtd
+    project: parser
+    project-name: parser
+
+    project-pattern: 'parser'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-parser/47384/'
+    rtd-token: '10223af1183fc8bdc191932045d82a0c1d7c874a'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/pharos/pharos-rtd-jobs.yaml b/jjb/pharos/pharos-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..60bcd8a
--- /dev/null
@@ -0,0 +1,12 @@
+---
+- project:
+    name: pharos-rtd
+    project: pharos
+    project-name: pharos
+
+    project-pattern: 'pharos'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-pharos/47385/'
+    rtd-token: '12cb789478d0c3577fb62c610232b3113d3a16ad'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/pharos/pharos-tools.yml b/jjb/pharos/pharos-tools.yml
new file mode 100644 (file)
index 0000000..ac5d195
--- /dev/null
@@ -0,0 +1,51 @@
+---
+- project:
+    name: pharos-dashboard-jobs
+
+    project: 'pharos-tools'
+
+    disabled: false
+
+    jobs:
+      - '{project}-verify-basic'
+      - 'pharos-dashboard-backup'
+      - 'pharos-dashboard-deploy'
+
+- job-template:
+    name: 'pharos-dashboard-backup'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+      - 'pharos-dashboard-defaults'
+
+    triggers:
+      - timed: '@daily'
+
+    builders:
+      - shell:
+          !include-raw: shell/backup-dashboard.sh
+
+- job-template:
+    name: 'pharos-dashboard-deploy'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+      - project-parameter:
+          project: '{project}'
+          branch: 'master'
+      - 'pharos-dashboard-defaults'
+
+    scm:
+      - git-scm-gerrit
+
+    triggers:
+      - gerrit-trigger-change-merged:
+          branch: 'master'
+          project: '{project}'
+          files: 'dashboard/docker-compose.yml'
+
+    builders:
+      - shell:
+          !include-raw: shell/deploy-dashboard.sh
index c1bb1ba..0b51196 100644 (file)
@@ -4,85 +4,8 @@
 
     project:
       - '{name}'
-      - '{name}-tools'
 
     disabled: false
 
     jobs:
       - '{project}-verify-basic'
-      - 'backup-pharos-dashboard'
-      - 'deploy-pharos-dashboard':
-          disabled: true
-
-- job-template:
-    name: 'backup-pharos-dashboard'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - 'pharos-dashboard-defaults'
-
-    triggers:
-      - timed: '@daily'
-
-    builders:
-      - shell: |
-          BACKUP_DIR=$HOME/backups/
-          TMP_DIR=$HOME/tmp/
-          mkdir -p $BACKUP_DIR
-          echo "-- $(date +%Y%m%d) --"
-          echo "Backing up Pharos Dashboard data..."
-          sudo docker run --rm \
-            -v pharos-data:/pharos-data:ro \
-            -v $TMP_DIR:/backup \
-            alpine \
-            tar -czf /backup/pharos-dashboard-db-$(date +%Y%m%d).tar.gz -C /pharos-data ./
-          sudo mv $TMP_DIR/pharos-dashboard-db-$(date +%Y%m%d).tar.gz $BACKUP_DIR
-          sudo chown $USER:$USER $BACKUP_DIR/pharos-dashboard-db-$(date +%Y%m%d).tar.gz
-          echo "...complete"
-
-- job-template:
-    name: 'deploy-pharos-dashboard'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: 'pharos-tools'
-          branch: 'master'
-      - 'pharos-dashboard-defaults'
-
-    scm:
-      - git:
-          choosing-strategy: 'gerrit'
-          refspec: '$GERRIT_REFSPEC'
-          branches:
-            - 'origin/$BRANCH'
-          timeout: 15
-          credentials-id: '$SSH_CREDENTIAL_ID'
-          url: '$GIT_BASE'
-          skip-tag: true
-          wipe-workspace: false
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - ref-updated-event
-            - comment-added-contains-event:
-                comment-contains-value: '^redeploy$'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: 'pharos-tools'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/master'
-
-    builders:
-      - shell: |
-          cp $HOME/config.env $WORKSPACE/dashboard
-          cp $HOME/rsa.pub $WORKSPACE/dashboard
-          cp $HOME/rsa.pem $WORKSPACE/dashboard
-          cd $WORKSPACE/dashboard
-          sudo docker-compose build
-          sudo docker-compose up -d
diff --git a/jjb/pharos/shell/backup-dashboard.sh b/jjb/pharos/shell/backup-dashboard.sh
new file mode 100644 (file)
index 0000000..4c1c015
--- /dev/null
@@ -0,0 +1,28 @@
+#!/bin/bash -eux
+##############################################################################
+# Copyright (c) 2018 Linux Foundation and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+BACKUP_DIR=$HOME/backups
+DATE=$(date +%Y%m%d)
+TAR_FILE=pharos-dashboard-db-$DATE.tar.tz
+
+mkdir -p $BACKUP_DIR
+echo "-- $DATE --"
+echo "--> Backing up Pharos Dashboard"
+
+docker run --rm \
+  -v pharos-data:/pharos-data:ro \
+  -v $BACKUP_DIR:/backup \
+  alpine \
+  tar -czf /backup/$TAR_FILE -C /pharos-data ./
+
+/usr/local/bin/gsutil cp $BACKUP_DIR/$TAR_FILE \
+  gs://opnfv-backups/pharos-dashboard/ && rm $BACKUP_DIR/$TAR_FILE
+
+echo "--> Pharos dashboard backup complete"
diff --git a/jjb/pharos/shell/deploy-dashboard.sh b/jjb/pharos/shell/deploy-dashboard.sh
new file mode 100644 (file)
index 0000000..46a79c3
--- /dev/null
@@ -0,0 +1,19 @@
+#!/bin/bash -eux
+##############################################################################
+# Copyright (c) 2018 Linux Foundation and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+cp $HOME/config.env $WORKSPACE/dashboard
+cd $WORKSPACE/dashboard
+
+docker-compose pull
+docker-compose up -d
+
+# Copy JIRA keys into web container
+WEB_CONTAINER="$(docker ps --filter 'name=dg01' -q)"
+docker cp $HOME/rsa.pub $WEB_CONTAINER:/pharos_dashboard/account/
+docker cp $HOME/rsa.pem $WEB_CONTAINER:/pharos_dashboard/account/
diff --git a/jjb/promise/promise-rtd-jobs.yaml b/jjb/promise/promise-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..d627326
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: promise-rtd
+    project: promise
+    project-name: promise
+
+    project-pattern: 'promise'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-promise/47386/'
+    rtd-token: 'e689330ddc9401d83dd26efbe26a55a6ecd33a0d'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 4e79c05..9f72954 100644 (file)
@@ -12,7 +12,7 @@
       - master:
           branch: '{stream}'
           gs-pathname: ''
-          disabled: false
+          disabled: true
 
     pod:
       - zte-virtual5:
diff --git a/jjb/qtip/qtip-rtd-jobs.yaml b/jjb/qtip/qtip-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..4af5997
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: qtip-rtd
+    project: qtip
+    project-name: qtip
+
+    gerrit-skip-vote: true
+    project-pattern: 'qtip'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-qtip/47387/'
+    rtd-token: 'eb407b024fbe12c416d012f48db866caf5fbab7c'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 0fc623c..f7d66df 100644 (file)
       branch: '{stream}'
       gs-pathname: ''
       docker-tag: latest
+      disabled: false
     fraser: &fraser
       stream: fraser
       branch: 'stable/{stream}'
       gs-pathname: '{stream}'
       docker-tag: 'stable'
+      disabled: false
 
     # -------------------------------
     # JOB VARIABLES
       - compute:
           installer: apex
           pod: zte-virtual5
+          disabled: true
           sut: node
           <<: *master
       - storage:
           installer: apex
           pod: zte-virtual5
+          disabled: true
           sut: ''
           <<: *master
       - network:
           installer: apex
           pod: zte-virtual5
+          disabled: true
           sut: ''
           <<: *master
       - compute:
       - compute:
           installer: apex
           pod: zte-virtual5
+          disabled: true
           sut: node
           <<: *fraser
       - storage:
           installer: apex
           pod: zte-virtual5
+          disabled: true
           sut: ''
           <<: *fraser
       - compute:
@@ -72,7 +79,7 @@
 ################################
 - job-template:
     name: 'qtip-{qpi}-{installer}-{pod}-{stream}'
-    disabled: false
+    disabled: '{obj:disabled}'
 
     parameters:
       - project-parameter:
index 5fbb37c..2eb2a20 100644 (file)
       - euphrates:
           branch: 'stable/{stream}'
           disabled: true
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
 
     arch_tag:
       - 'amd64':
           slave_label: 'opnfv-build-ubuntu'
+          docker_file: 'Dockerfile'
+      - 'arm64':
+          slave_label: 'opnfv-build-ubuntu-arm'
+          docker_file: 'Dockerfile-arm64'
 
     # yamllint disable rule:key-duplicates
     image:
         PUSH_IMAGE=$PUSH_IMAGE
         COMMIT_ID=$COMMIT_ID
         GERRIT_REFNAME=$GERRIT_REFNAME
-        DOCKERFILE=$DOCKERFILE
+      kill-phase-on: FAILURE
+      abort-all-jobs: false
+
+    manifest-job-settings: &manifest-job-settings
+      current-parameters: false
+      git-revision: true
+      node-parameters: false
+      predefined-parameters:
+        GERRIT_REFNAME=$GERRIT_REFNAME
       kill-phase-on: FAILURE
       abort-all-jobs: false
 
@@ -47,6 +62,7 @@
     jobs:
       - "compass-docker-{stream}"
       - "compass-{image}-build-{arch_tag}-{stream}"
+      - "compass-{image}-manifest-{stream}"
 
 ########################
 # job templates
@@ -63,6 +79,7 @@
           project: '{project}'
           branch: '{branch}'
           slave_label: 'opnfv-build-ubuntu'
+          docker_file: 'Dockerfile'
           arch_tag: 'amd64'
 
     properties:
           projects:
             - name: 'compass-tasks-base-build-amd64-{stream}'
               <<: *build-job-settings
+            - name: 'compass-tasks-base-build-arm64-{stream}'
+              <<: *build-job-settings
+      - multijob:
+          name: 'publish compass-tasks-base manifests'
+          execution-type: PARALLEL
+          projects:
+            - name: 'compass-tasks-base-manifest-{stream}'
+              <<: *manifest-job-settings
       - multijob:
           name: 'build all compass images'
           condition: SUCCESSFUL
               <<: *build-job-settings
             - name: 'compass-tasks-osa-build-amd64-{stream}'
               <<: *build-job-settings
+            - name: 'compass-cobbler-build-arm64-{stream}'
+              <<: *build-job-settings
+            - name: 'compass-db-build-arm64-{stream}'
+              <<: *build-job-settings
+            - name: 'compass-deck-build-arm64-{stream}'
+              <<: *build-job-settings
+            - name: 'compass-tasks-build-arm64-{stream}'
+              <<: *build-job-settings
+            - name: 'compass-tasks-k8s-build-arm64-{stream}'
+              <<: *build-job-settings
+            - name: 'compass-tasks-osa-build-arm64-{stream}'
+              <<: *build-job-settings
+      - multijob:
+          name: 'publish all manifests'
+          execution-type: PARALLEL
+          projects:
+            - name: 'compass-cobbler-manifest-{stream}'
+              <<: *manifest-job-settings
+            - name: 'compass-db-manifest-{stream}'
+              <<: *manifest-job-settings
+            - name: 'compass-deck-manifest-{stream}'
+              <<: *manifest-job-settings
+            - name: 'compass-tasks-manifest-{stream}'
+              <<: *manifest-job-settings
+            - name: 'compass-tasks-k8s-manifest-{stream}'
+              <<: *manifest-job-settings
+            - name: 'compass-tasks-osa-manifest-{stream}'
+              <<: *manifest-job-settings
 
     publishers:
       - 'compass-amd64-recipients'
+      - 'compass-arm64-recipients'
 
 - job-template:
     name: 'compass-{image}-build-{arch_tag}-{stream}'
           project: '{project}'
           branch: '{branch}'
           slave_label: '{slave_label}'
+          docker_file: '{docker_file}'
           arch_tag: '{arch_tag}'
       - string:
           name: DOCKER_REPO_NAME
       - shell:
           !include-raw-escape: ./opnfv-docker.sh
 
+- job-template:
+    name: 'compass-{image}-manifest-{stream}'
+    disabled: '{obj:disabled}'
+    parameters:
+      - compass-job-parameters:
+          project: '{project}'
+          branch: '{branch}'
+          slave_label: 'opnfv-build-ubuntu'
+          docker_file: 'Dockerfile'
+          arch_tag: 'amd64'
+    builders:
+      - shell: |
+          #!/bin/bash -ex
+          case "{stream}" in
+          "master")
+              sudo manifest-tool push from-args \
+                  --platforms linux/amd64,linux/arm64 \
+                  --template opnfv/compass-{image}:ARCH-latest \
+                  --target opnfv/compass-{image}:latest ;;
+          *)
+              sudo manifest-tool push from-args \
+                  --platforms linux/amd64,linux/arm64 \
+                  --template opnfv/compass-{image}:ARCH-stable \
+                  --target opnfv/compass-{image}:{stream} ;;
+          esac
+          exit $?
+
 # parameter macro
 - parameter:
     name: compass-job-parameters
           name: SLAVE_LABEL
           default: '{slave_label}'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
       - string:
           name: DOCKERFILE
-          default: "Dockerfile"
+          default: '{docker_file}'
           description: "Dockerfile to use for creating the image."
       - string:
           name: ARCH_TAG
-          default: ""
+          default: "{arch_tag}"
           description: "If set, this value will be added to the docker image tag as a prefix"
       - string:
           name: PROJECT
     publishers:
       - email:
           recipients: >
-            chigang@huawei.com
-            huangxiangyui5@huawei.com
-            xueyifei@huawei.com
+            huangxiangyu5@huawei.com
             wutianwei1@huawei.com
+
+- publisher:
+    name: 'compass-arm64-recipients'
+    publishers:
+      - email:
+          recipients: >
+            yibo.cai@arm.com
index 9066b41..7e87c5f 100644 (file)
       stream: fraser
       branch: 'stable/{stream}'
       disabled: false
+    gambia: &gambia
+      stream: gambia
+      branch: 'stable/{stream}'
+      disabled: false
     dovetail-arm-receivers: &dovetail-arm-receivers
       receivers: >
           cristina.pauna@enea.com
           dockerdir: 'docker/storperf-master'
           docker_repo_name: 'opnfv/storperf-master'
           arch_tag: 'aarch64'
-          <<: *fraser
+          <<: *gambia
           <<: *storperf-arm-receivers
       - 'storperf-graphite':
           project: 'storperf'
           dockerdir: 'docker/storperf-graphite'
           docker_repo_name: 'opnfv/storperf-graphite'
           arch_tag: 'aarch64'
-          <<: *fraser
+          <<: *gambia
           <<: *storperf-arm-receivers
       - 'storperf-httpfrontend':
           project: 'storperf'
           dockerdir: 'docker/storperf-httpfrontend'
           docker_repo_name: 'opnfv/storperf-httpfrontend'
           arch_tag: 'aarch64'
-          <<: *fraser
+          <<: *gambia
           <<: *storperf-arm-receivers
       - 'storperf-reporting':
           project: 'storperf'
           dockerdir: 'docker/storperf-reporting'
           docker_repo_name: 'opnfv/storperf-reporting'
           arch_tag: 'aarch64'
-          <<: *fraser
+          <<: *gambia
           <<: *storperf-arm-receivers
       - 'storperf-swaggerui':
           project: 'storperf'
           dockerdir: 'docker/storperf-swaggerui'
           docker_repo_name: 'opnfv/storperf-swaggerui'
           arch_tag: 'aarch64'
-          <<: *fraser
+          <<: *gambia
           <<: *storperf-arm-receivers
       - 'yardstick':
           project: 'yardstick'
-          <<: *fraser
+          <<: *gambia
           <<: *yardstick-arm-receivers
 
     # yamllint enable rule:key-duplicates
index 70baf16..e647641 100644 (file)
@@ -20,12 +20,20 @@ echo
 function remove_containers_images()
 {
     # Remove previous running containers if exist
-    if [[ -n "$(docker ps -a | grep $DOCKER_REPO_NAME)" ]]; then
+    #
+    # $ docker ps -a
+    # CONTAINER ID        IMAGE                            COMMAND      ...
+    # 6a796ed40b8e        opnfv/compass-tasks:latest       "/bin/bash"  ...
+    # 99fcb59f4787        opnfv/compass-tasks-base:latest  "/bin/bash"  ...
+    # cc5eee16b995        opnfv/compass-tasks-k8s          "/bin/bash"  ...
+    #
+    # Cut image name by leading space and ending space or colon(tag)
+    if [[ -n "$(docker ps -a | grep " $DOCKER_REPO_NAME[ :]")" ]]; then
         echo "Removing existing $DOCKER_REPO_NAME containers..."
-        docker ps -a | grep $DOCKER_REPO_NAME | awk '{print $1}' | xargs docker rm -f
+        docker ps -a | grep " $DOCKER_REPO_NAME[ :]" | awk '{print $1}' | xargs docker rm -f
         t=60
         # Wait max 60 sec for containers to be removed
-        while [[ $t -gt 0 ]] && [[ -n "$(docker ps| grep $DOCKER_REPO_NAME)" ]]; do
+        while [[ $t -gt 0 ]] && [[ -n "$(docker ps| grep " $DOCKER_REPO_NAME[ :]")" ]]; do
             sleep 1
             let t=t-1
         done
@@ -33,12 +41,20 @@ function remove_containers_images()
 
 
     # Remove existing images if exist
-    if [[ -n "$(docker images | grep $DOCKER_REPO_NAME)" ]]; then
+    #
+    # $ docker images
+    # REPOSITORY                    TAG                 IMAGE ID        ...
+    # opnfv/compass-tasks           latest              6501569fd328    ...
+    # opnfv/compass-tasks-base      latest              8764fe29c434    ...
+    # opnfv/compass-tasks-k8s       latest              61094cac9e65    ...
+    #
+    # Cut image name by start of line and ending space
+    if [[ -n "$(docker images | grep "^$DOCKER_REPO_NAME ")" ]]; then
         echo "Docker images to remove:"
-        docker images | head -1 && docker images | grep $DOCKER_REPO_NAME
-        image_ids=($(docker images | grep $DOCKER_REPO_NAME | awk '{print $3}'))
+        docker images | head -1 && docker images | grep "^$DOCKER_REPO_NAME "
+        image_ids=($(docker images | grep "^$DOCKER_REPO_NAME " | awk '{print $3}'))
         for id in "${image_ids[@]}"; do
-            if [[ -n "$(docker images|grep $DOCKER_REPO_NAME|grep $id)" ]]; then
+            if [[ -n "$(docker images|grep "^$DOCKER_REPO_NAME "|grep $id)" ]]; then
                 echo "Removing docker image $DOCKER_REPO_NAME:$id..."
                 docker rmi -f $id
             fi
index b7d1ce6..17235b3 100644 (file)
       stream: fraser
       branch: 'stable/{stream}'
       disabled: false
+    gambia: &gambia
+      stream: gambia
+      branch: 'stable/{stream}'
+      disabled: false
+    pharos-tools-receivers: &pharos_tools_receivers
+      receivers: >
+          pberberian@iol.unh.edu
+          sbergeron@iol.unh.edu
     storperf-receivers: &storperf-receivers
       receivers: >
           mark.beierl@emc.com
+    yardstick-receivers: &yardstick-receivers
+      receivers: >
+          limingjiang@huawei.com
+          rexlee8776@gmail.com
+          emma.l.foley@intel.com
     other-receivers: &other-receivers
       receivers: ''
 
           dockerdir: 'docker/barometer-collectd'
           <<: *master
           <<: *other-receivers
+      - 'barometer-dma':
+          project: 'barometer'
+          dockerdir: 'docker/barometer-dma'
+          <<: *master
+          <<: *other-receivers
       - 'barometer-grafana':
           project: 'barometer'
           dockerdir: 'docker/barometer-grafana'
           dockerfile: 'Dockerfile'
           <<: *master
           <<: *other-receivers
+      - 'clover-controller':
+          project: 'clover'
+          dockerdir: 'clover/controller'
+          dockerfile: 'docker/Dockerfile'
+          <<: *master
+          <<: *other-receivers
+      - 'clover-collector':
+          project: 'clover'
+          dockerdir: 'clover/collector'
+          dockerfile: 'docker/Dockerfile'
+          <<: *master
+          <<: *other-receivers
+      - 'clover-jmeter-master':
+          project: 'clover'
+          dockerdir: 'clover/tools/jmeter'
+          dockerfile: 'jmeter-master/Dockerfile'
+          <<: *master
+          <<: *other-receivers
+      - 'clover-jmeter-slave':
+          project: 'clover'
+          dockerdir: 'clover/tools/jmeter'
+          dockerfile: 'jmeter-slave/Dockerfile'
+          <<: *master
+          <<: *other-receivers
+      - 'clover-sample-app':
+          project: 'clover'
+          dockerdir: 'samples/scenarios/sample_app'
+          dockerfile: 'Dockerfile'
+          <<: *master
+          <<: *other-receivers
+      - 'clover-ns-modsecurity-crs':
+          project: 'clover'
+          dockerdir: 'samples/services/modsecurity/docker'
+          dockerfile: 'Dockerfile'
+          <<: *master
+          <<: *other-receivers
+      - 'clover-spark':
+          project: 'clover'
+          dockerdir: 'clover/spark/docker/clover-spark'
+          dockerfile: 'Dockerfile'
+          <<: *master
+          <<: *other-receivers
+      - 'clover-spark-submit':
+          project: 'clover'
+          dockerdir: 'clover/spark/docker/spark-submit'
+          dockerfile: 'Dockerfile'
+          <<: *master
+          <<: *other-receivers
+      - 'clover-clovisor':
+          project: 'clover'
+          dockerdir: 'clover/clovisor'
+          dockerfile: 'Dockerfile'
+          <<: *master
+          <<: *other-receivers
       - 'cperf':
           project: 'cperf'
           <<: *master
           project: 'dovetail'
           <<: *master
           <<: *other-receivers
+      - 'dovetail-webportal-api':
+          project: 'dovetail-webportal'
+          dockerfile: 'Dockerfile.api'
+          <<: *master
+          <<: *other-receivers
+      - 'dovetail-webportal-web':
+          project: 'dovetail-webportal'
+          dockerfile: 'Dockerfile.web'
+          <<: *master
+          <<: *other-receivers
       - 'nfvbench':
           project: 'nfvbench'
           <<: *master
           <<: *other-receivers
+      - 'pharos-tools-laas-dashboard':
+          project: 'pharos-tools'
+          dockerdir: 'dashboard'
+          dockerfile: 'web/Dockerfile'
+          <<: *master
+          <<: *pharos_tools_receivers
+      - 'pharos-tools-laas-celery':
+          project: 'pharos-tools'
+          dockerdir: 'dashboard'
+          dockerfile: 'worker/Dockerfile'
+          <<: *master
+          <<: *pharos_tools_receivers
       - 'qtip':
           project: 'qtip'
           dockerdir: '.'
       - 'yardstick':
           project: 'yardstick'
           <<: *master
-          <<: *other-receivers
+          <<: *yardstick-receivers
+      - 'yardstick-image-k8s':
+          project: 'yardstick'
+          dockerdir: 'docker/k8s'
+          <<: *master
+          <<: *yardstick-receivers
+
       # projects with jobs for euphrates
+      - 'barometer-dma':
+          project: 'barometer'
+          dockerdir: 'docker/barometer-dma'
+          <<: *gambia
+          <<: *other-receivers
+      - 'barometer-collectd':
+          project: 'barometer'
+          dockerdir: 'docker/barometer-collectd'
+          <<: *gambia
+          <<: *other-receivers
+      - 'barometer-grafana':
+          project: 'barometer'
+          dockerdir: 'docker/barometer-grafana'
+          <<: *gambia
+          <<: *other-receivers
+      - 'barometer-influxdb':
+          project: 'barometer'
+          dockerdir: 'docker/barometer-influxdb'
+          <<: *gambia
+          <<: *other-receivers
+      - 'barometer-kafka':
+          project: 'barometer'
+          dockerdir: 'docker/barometer-kafka'
+          <<: *gambia
+          <<: *other-receivers
+      - 'barometer-ves':
+          project: 'barometer'
+          dockerdir: 'docker/barometer-ves'
+          <<: *gambia
+          <<: *other-receivers
+      - 'barometer-snmp':
+          project: 'barometer'
+          dockerdir: 'docker/barometer-snmp'
+          <<: *gambia
+          <<: *other-receivers
       - 'bottlenecks':
           project: 'bottlenecks'
           <<: *euphrates
           project: 'storperf'
           dockerdir: 'docker/storperf-master'
           arch_tag: 'x86_64'
-          <<: *fraser
+          <<: *gambia
           <<: *storperf-receivers
       - 'storperf-graphite':
           project: 'storperf'
           dockerdir: 'docker/storperf-graphite'
           arch_tag: 'x86_64'
-          <<: *fraser
+          <<: *gambia
           <<: *storperf-receivers
       - 'storperf-httpfrontend':
           project: 'storperf'
           dockerdir: 'docker/storperf-httpfrontend'
           arch_tag: 'x86_64'
-          <<: *fraser
+          <<: *gambia
           <<: *storperf-receivers
       - 'storperf-reporting':
           project: 'storperf'
           dockerdir: 'docker/storperf-reporting'
           arch_tag: 'x86_64'
-          <<: *fraser
+          <<: *gambia
           <<: *storperf-receivers
       - 'storperf-swaggerui':
           project: 'storperf'
           dockerdir: 'docker/storperf-swaggerui'
           arch_tag: 'x86_64'
-          <<: *fraser
+          <<: *gambia
           <<: *storperf-receivers
       - 'yardstick':
           project: 'yardstick'
-          <<: *fraser
-          <<: *other-receivers
-      # projects with jobs for danube
-      - 'dovetail':
-          project: 'dovetail'
-          <<: *danube
-          <<: *other-receivers
+          <<: *gambia
+          <<: *yardstick-receivers
       # projects with jobs for fraser
       - 'bottlenecks':
           project: 'bottlenecks'
           dockerfile: 'Dockerfile'
           <<: *fraser
           <<: *other-receivers
+      - 'dovetail':
+          project: 'dovetail'
+          <<: *fraser
+          <<: *other-receivers
     # yamllint enable rule:key-duplicates
     jobs:
       - "{dockerrepo}-docker-build-push-{stream}"
index 6fa0aef..b7ff3ba 100644 (file)
@@ -29,7 +29,7 @@ for PROJECT in "${PROJECT_LIST[@]}"; do
     git clone "https://gerrit.opnfv.org/gerrit/$PROJECT.git" $CLONE_PATH/$PROJECT
   else
     pushd "$CLONE_PATH/$PROJECT" &>/dev/null
-    git pull -f
+    git pull -f origin master
     popd &> /dev/null
   fi
 
index 19fb4b5..05ffd51 100644 (file)
@@ -28,7 +28,6 @@
             - arm-build3
             - arm-build4
             - arm-build5
-            - arm-build6
             - ericsson-build3
             - ericsson-build4
             - lf-build2
@@ -41,6 +40,7 @@
       # yamllint disable rule:line-length
       - shell: |
           #!/bin/bash
+          sudo systemctl restart docker
           (docker ps -q; docker ps -aq) | sort | uniq -u | xargs --no-run-if-empty docker rm
           docker images -f dangling=true -q | xargs --no-run-if-empty docker rmi
 
@@ -61,7 +61,7 @@
           name: SLAVE_NAME
           description: Where to create the archive
           default-slaves:
-            - master
+            - lf-build2
           allowed-multiselect: false
           ignore-offline-nodes: true
 
@@ -82,6 +82,7 @@
           reply-to: >
             helpdesk@opnfv.org
           recipients: >
+            tbramwell@linuxfoundation.org
             swinslow@linuxfoundation.org
 
 
           default-slaves:
             - lf-build1
             - lf-build2
+            - lf-build3
+            - ericsson-build3
             - ericsson-build4
           allowed-multiselect: true
           ignore-offline-nodes: true
       - timed: '@midnight'
 
     builders:
+      - description-setter:
+          description: '$NODE_NAME'
       - install-ansible
       - run-ansible-build-server-playbook
 
           sudo apt-get -y install ansible
           fi
 
+
 - builder:
     name: run-ansible-build-server-playbook
     builders:
diff --git a/jjb/releng/releng-ci-jobs.yaml b/jjb/releng/releng-ci-jobs.yaml
deleted file mode 100644 (file)
index f9a2bc2..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
----
-- project:
-    name: releng-builder-jobs
-    project: 'releng'
-    project-name: 'releng'
-
-    build-timeout: 60
-    build-node: 'lf-build1'
-    gerrit-server-name: 'gerrit.opnfv.org'
-    jenkins-ssh-credential: 'd42411ac011ad6f3dd2e1fa34eaa5d87f910eb2e'
-    jjb-version: '2.0.3'
-    lftools-version: '<1.0.0'
-
-    jobs:
-      - '{project-name}-ci-jobs'
index 92be0e6..663ff19 100644 (file)
@@ -26,7 +26,20 @@ STREAM=${STREAM:-'nostream'}
 RELEASE_FILES=$(git diff HEAD^1 --name-only -- "releases/$STREAM")
 
 for release_file in $RELEASE_FILES; do
-    python releases/scripts/create_branch.py -f $release_file
+
+    while read -r repo branch ref; do
+
+        echo "$repo" "$branch" "$ref"
+        branches="$(git ls-remote "https://gerrit.opnfv.org/gerrit/$repo.git" "refs/heads/$branch")"
+
+        if ! [ -z "$branches" ]; then
+            echo "refs/heads/$branch already exists at $ref ($branches)"
+        else
+            ssh -n -f -p 29418 gerrit.opnfv.org gerrit create-branch "$repo" "$branch" "$ref"
+        fi
+
+    done < <(python releases/scripts/repos.py -b -f "$release_file")
+
     python releases/scripts/create_jobs.py -f $release_file
     NEW_FILES=$(git status --porcelain --untracked=no | cut -c4-)
     if [ -n "$NEW_FILES" ]; then
similarity index 51%
rename from jjb/releng/releng-release-verify.sh
rename to jjb/releng/releng-release-create-venv.sh
index c1262e2..de585fd 100644 (file)
@@ -7,21 +7,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-set -xe
+set -e -o pipefail
+echo "---> Create virtualenv"
 
-# Activate virtualenv, supressing shellcheck warning
-# shellcheck source=/dev/null
-. $WORKSPACE/venv/bin/activate
+sudo -H pip install virtualenv
+virtualenv $WORKSPACE/venv
+# shellcheck source=$WORKSPACE/venv/bin/activate disable=SC1091
+source $WORKSPACE/venv/bin/activate
+pip install --upgrade pip
 pip install -r releases/scripts/requirements.txt
-
-STREAM=${STREAM:-'nostream'}
-RELEASE_FILES=$(git diff HEAD^1 --name-only -- "releases/$STREAM")
-
-# TODO: The create_branch.py should be refactored so it can be used here
-# to verify the commit exists that is being added, along with
-# jjb/<project>
-for release_file in $RELEASE_FILES; do
-    python releases/scripts/verify_schema.py \
-    -s releases/schema.yaml \
-    -y $release_file
-done
index e31320b..3136d78 100644 (file)
 - project:
     name: releng-release-jobs
 
+    build-node: 'opnfv-build'
+
     stream:
       - fraser
+      - gambia
+      - hunter
 
     jobs:
       - 'releng-release-{stream}-verify'
@@ -23,6 +27,7 @@
     name: 'releng-release-{stream}-verify'
 
     parameters:
+      - '{build-node}-defaults'
       - stream-parameter:
           stream: '{stream}'
       - project-parameter:
@@ -59,9 +64,9 @@
                   pattern: 'releases/scripts/verify_schema.py'
 
     builders:
-      - create-virtualenv
-      - shell:
-          !include-raw-escape: releng-release-verify.sh
+      - shell: !include-raw-escape:
+          - releng-release-create-venv.sh
+          - releng-release-tagging.sh
 
     publishers:
       - email-jenkins-admins-on-failure
       - project-parameter:
           project: '{project}'
           branch: 'master'
+      # Override GIT_URL so we can send patches back to Gerrit and
+      # modify repos
+      - string:
+          name: GIT_URL
+          default: ssh://$USER@gerrit.opnfv.org:29418/
+          description: 'Git URL to use on this Jenkins Slave'
 
     scm:
       - git-scm-gerrit
           files: 'releases/{stream}/*'
 
     builders:
-      - create-virtualenv
-      - shell:
-          !include-raw-escape: releng-release-create-branch.sh
+      - shell: !include-raw-escape:
+          - releng-release-create-venv.sh
+          - releng-release-tagging.sh
+          - releng-release-create-branch.sh
 
     publishers:
       - email-jenkins-admins-on-failure
           name: STREAM
           default: '{stream}'
           description: "OPNFV Stable Stream"
-
-- builder:
-    name: create-virtualenv
-    builders:
-      - shell: |
-          #!/bin/bash
-          sudo pip install virtualenv
-          virtualenv $WORKSPACE/venv
-          . $WORKSPACE/venv/bin/activate
-          pip install --upgrade pip
diff --git a/jjb/releng/releng-release-tagging.sh b/jjb/releng/releng-release-tagging.sh
new file mode 100644 (file)
index 0000000..f8cf9c8
--- /dev/null
@@ -0,0 +1,70 @@
+#!/bin/bash
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+set -e -o pipefail
+
+GIT_URL=${GIT_URL:-https://gerrit.opnfv.org/gerrit}
+STREAM=${STREAM:-'nostream'}
+RELEASE_FILES=$(git diff HEAD^1 --name-only -- "releases/$STREAM")
+
+echo "--> Verifying $RELEASE_FILES."
+for release_file in $RELEASE_FILES; do
+    # Verify the release file schema
+    python releases/scripts/verify_schema.py \
+    -s releases/schema.yaml \
+    -y $release_file
+
+    # Verify tag for each repo exist and are attached to commits on stable-branch
+    while read -r repo tag ref
+    do
+      echo "--> Cloning $repo"
+      if [ ! -d $repo ]; then
+          git clone $GIT_URL/$repo.git $repo
+      fi
+      pushd $repo &> /dev/null
+
+      echo "--> Checking for tag: $tag"
+      if ! (git tag -l | grep $tag &> /dev/null); then
+          echo "$tag does not exist"
+          TAG_EXISTS=false
+      else
+          git cat-file commit $tag
+          TAG_EXISTS=true
+      fi
+
+      echo "--> Checking if $ref is on stable/$STREAM"
+      if ! (git branch -a --contains $ref | grep "stable/$STREAM"); then
+          echo "--> ERROR: $ref for $repo is not on stable/$STREAM!"
+          # If the tag exists but is on the wrong ref, there's nothing
+          # we can do. But if the tag neither exists nor is on the
+          # correct branch we need to fail the verification.
+          if [ $TAG_EXISTS = false ]; then
+              exit 1
+          fi
+      else
+          if [[ $TAG_EXISTS = false && "$JOB_NAME" =~ "merge" ]]; then
+              # If the tag doesn't exist and we're in a merge job,
+              # everything has been verified up to this point and we
+              # are ready to create the tag.
+              git config --global user.name "jenkins-ci"
+              git config --global user.email "jenkins-opnfv-ci@opnfv.org"
+              echo "--> Creating $tag tag for $repo at $ref"
+              git tag -am "$tag" $tag $ref
+              echo "--> Pushing tag"
+              git push origin $tag
+          else
+              # For non-merge jobs just output the ref info.
+              git show -s --format="%h %s %d" $ref
+          fi
+      fi
+
+      popd &> /dev/null
+      echo "--> Done verifing $repo"
+    done < <(python releases/scripts/repos.py -f $release_file)
+done
diff --git a/jjb/releng/releng-rtd-jobs.yaml b/jjb/releng/releng-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..f7c9603
--- /dev/null
@@ -0,0 +1,16 @@
+---
+- project:
+    name: releng-builder-jobs
+    project: 'releng'
+    project-name: 'releng'
+    jjb-version: '2.5.0'
+
+    build-timeout: 60
+
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-releng/38594/'
+    rtd-token: '291c6a0109493b4457e566d06141212452c65784'
+    project-pattern: 'releng'
+
+    jobs:
+      - '{project-name}-ci-jobs'
+      - '{project-name}-rtd-jobs'
index 01fbe0c..3a9bead 100644 (file)
@@ -28,6 +28,8 @@
           name: SLAVE_LABEL
           default: 'testresults'
           description: 'Slave label on Jenkins'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - project-parameter:
           project: 'releng-testresults'
           branch: 'master'
diff --git a/jjb/samplevnf/samplevnf-rtd-jobs.yaml b/jjb/samplevnf/samplevnf-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..9c09c82
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: samplevnf-rtd
+    project: samplevnf
+    project-name: samplevnf
+
+    gerrit-skip-vote: true
+    project-pattern: 'samplevnf'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-samplevnf/47390/'
+    rtd-token: '133bd533bd10428c549c17967469b25e847f42f4'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/sdnvpn/sdnvpn-rtd-jobs.yaml b/jjb/sdnvpn/sdnvpn-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..a01544f
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: sdnvpn-rtd
+    project: sdnvpn
+    project-name: sdnvpn
+
+    project-pattern: 'sdnvpn'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-sdnvpn/47391/'
+    rtd-token: '1efdc48a9819be55a28137937674f1f744d02fe0'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/securedlab/check-jinja2.yaml b/jjb/securedlab/check-jinja2.yaml
deleted file mode 100644 (file)
index f7a7941..0000000
+++ /dev/null
@@ -1,85 +0,0 @@
----
-########################
-# Job configuration to validate jinja2 files
-########################
-- project:
-
-    name: validate-templates
-
-    project: 'securedlab'
-
-    jobs:
-      - 'validate-jinja2-templates-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          disabled: false
-      - fraser:
-          branch: 'stable/{stream}'
-          disabled: false
-
-########################
-# job templates
-########################
-
-- job-template:
-    name: 'validate-jinja2-templates-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: true
-
-    parameters:
-      - project-parameter:
-          project: $GERRIT_PROJECT
-          branch: '{branch}'
-      - node:
-          name: SLAVE_NAME
-          description: Slave to execute jnija template test
-          default-slaves:
-            - lf-build1
-          allowed-multiselect: true
-          ignore-offline-nodes: true
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'REG_EXP'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              file-paths:
-                - compare-type: ANT
-                  pattern: '**/*.yaml'
-                - compare-type: ANT
-                  pattern: 'check-jinja2.sh'
-          skip-vote:
-            successful: true
-            failed: true
-            unstable: true
-            notbuilt: true
-
-    builders:
-      - check-jinja
-
-- builder:
-    name: check-jinja
-    builders:
-      - shell: |
-          $WORKSPACE/check-jinja2.sh
index fea7924..31f7aa6 100644 (file)
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
-      - fraser: &fraser
+      - gambia: &gambia
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
+      - fraser:
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: true
 
 - job-template:
     name: 'sfc-verify-{stream}'
                   pattern: 'docs/**|.gitignore'
 
     builders:
-      - sfc-unit-tests
+      - sfc-unit-tests-and-docs
+
+    publishers:
+      - sfc-unit-tests-and-docs-publisher
 
 ################################
 # job builders
 ################################
 
 - builder:
-    name: sfc-unit-tests
+    name: sfc-unit-tests-and-docs
     builders:
       - shell: |
-          cd $WORKSPACE && yamllint $(git ls-tree -r HEAD --name-only  | egrep 'yml$|yaml$' | egrep -v 'scenarios')
+          #!/bin/bash
+          virtualenv /var/tmp/sfc
+          source /var/tmp/sfc/bin/activate
+          pip install tox
+          cd $WORKSPACE && tox
+
+################################
+# job publishers
+################################
+- publisher:
+    name: 'sfc-unit-tests-and-docs-publisher'
+    publishers:
+      - junit:
+          results: nosetests.xml
+      - cobertura:
+          report-file: "coverage.xml"
+          only-stable: "true"
+          fail-no-reports: "true"
+          health-auto-update: "true"
+          stability-auto-update: "true"
+          zoom-coverage-chart: "true"
+          targets:
+            - files:
+                healthy: 0
+                unhealthy: 0
+                failing: 0
+            - method:
+                healthy: 0
+                unhealthy: 0
+                failing: 0
+      - email-jenkins-admins-on-failure
diff --git a/jjb/sfc/sfc-rtd-jobs.yaml b/jjb/sfc/sfc-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..f7d4c9b
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: sfc-rtd
+    project: sfc
+    project-name: sfc
+
+    project-pattern: 'sfc'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-sfc/47392/'
+    rtd-token: 'bc4419f4dded5c816071b042ac32c03ac6108700'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/snaps/snaps-rtd-jobs.yaml b/jjb/snaps/snaps-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..2159c1a
--- /dev/null
@@ -0,0 +1,12 @@
+---
+- project:
+    name: snaps-rtd
+    project: snaps
+    project-name: snaps
+
+    project-pattern: 'snaps'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-snaps/47393/'
+    rtd-token: '8fa2d732997534df1e91a87d6dc3ee60bb56508b'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/stor4nfv/stor4nfv-rtd-jobs.yaml b/jjb/stor4nfv/stor4nfv-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..0e03e7b
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: stor4nfv-rtd
+    project: stor4nfv
+    project-name: stor4nfv
+
+    project-pattern: 'stor4nfv'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-stor4nfv/47394/'
+    rtd-token: '9c189b44cf08de75dc06253558cc86ed93982cbb'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index fe6aebb..65e1d32 100644 (file)
@@ -15,8 +15,8 @@
       branch: '{stream}'
       gs-pathname: ''
       docker-tag: 'latest'
-    fraser: &fraser
-      stream: fraser
+    gambia: &gambia
+      stream: gambia
       branch: 'stable/{stream}'
       gs-pathname: '/{stream}'
       disabled: false
@@ -69,7 +69,7 @@
       - baremetal:
           slave-label: apex-baremetal-master
           installer: apex
-          <<: *fraser
+          <<: *gambia
     ## armband CI PODs
     # - armband-baremetal:
     #     slave-label: armband-baremetal
diff --git a/jjb/storperf/storperf-rtd-jobs.yaml b/jjb/storperf/storperf-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..e8ee7b9
--- /dev/null
@@ -0,0 +1,12 @@
+---
+- project:
+    name: storperf-rtd
+    project: storperf
+    project-name: storperf
+
+    project-pattern: 'storperf'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-storperf/47395/'
+    rtd-token: '8a5c04ef90e5f32edc4e805a5018763cd25e9afc'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 4d6b174..2acedd7 100644 (file)
@@ -13,7 +13,7 @@
           gs-pathname: ''
           disabled: false
           docker-tag: 'latest'
-      - fraser: &fraser
+      - gambia: &gambia
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
index fe57847..993094c 100644 (file)
@@ -13,7 +13,7 @@
           gs-pathname: ''
           disabled: false
           docker-tag: 'latest'
-      - fraser: &fraser
+      - gambia: &gambia
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
diff --git a/jjb/ves/ves-rtd-jobs.yaml b/jjb/ves/ves-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..67b611b
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: ves-rtd
+    project: ves
+    project-name: ves
+
+    gerrit-skip-vote: true
+    project-pattern: 'ves'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-ves/47396/'
+    rtd-token: 'ea5026fc44841e7721529b95a9ebc1b29950e2ce'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/vnf_forwarding_graph/vnf-forwarding-graph-rtd-jobs.yaml b/jjb/vnf_forwarding_graph/vnf-forwarding-graph-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..69895d1
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: vnf-forwarding-graph-rtd
+    project: vnf-forwarding-graph
+    project-name: vnf-forwarding-graph
+
+    gerrit-skip-vote: true
+    project-pattern: 'vnf-forwarding-graph'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-vnf-forwarding-graph/47397/'
+    rtd-token: '5c6fd3d0f49fb88ea4230d6666d94b59fef86e7f'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
diff --git a/jjb/vswitchperf/vswitchperf-rtd-jobs.yaml b/jjb/vswitchperf/vswitchperf-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..9981240
--- /dev/null
@@ -0,0 +1,21 @@
+---
+- project:
+    name: vswitchperf-rtd
+    project: vswitchperf
+    project-name: vswitchperf
+
+    gerrit-skip-vote: true
+    project-pattern: 'vswitchperf'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-vswitchperf/47398/'
+    rtd-token: '47bbe5675e6cc5a6207fcc9b4db8dac03c27d9de'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index ba0742f..9e987fe 100644 (file)
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
-      - fraser: &fraser
+      - gambia: &gambia
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
+      - fraser:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
@@ -34,7 +38,7 @@
             - 'vswitchperf-verify-.*'
             - 'vswitchperf-merge-.*'
             - 'vswitchperf-daily-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     parameters:
       - project-parameter:
@@ -75,7 +79,7 @@
             - 'vswitchperf-verify-.*'
             - 'vswitchperf-merge-.*'
             - 'vswitchperf-daily-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     parameters:
       - project-parameter:
             - 'vswitchperf-verify-.*'
             - 'vswitchperf-merge-.*'
             - 'vswitchperf-daily-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     parameters:
       - project-parameter:
index fbe2e20..4c61be1 100644 (file)
@@ -82,7 +82,7 @@
             - '^xci-functest.*'
             - '^bifrost-.*periodic.*'
             - '^osa-.*periodic.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
       - logrotate-default
 
     parameters:
       - label:
           name: SLAVE_LABEL
           default: '{slave-label}'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: CI_LOOP
           default: 'periodic'
index 4ca7835..f895cf6 100644 (file)
@@ -80,7 +80,7 @@
             - xci-verify-virtual
           option: category
 
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     parameters:
       - string:
       - label:
           name: SLAVE_LABEL
           default: 'xci-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: CI_LOOP
           default: 'verify'
index 6c2d165..b731bf5 100644 (file)
@@ -58,7 +58,7 @@
             - 'bifrost-periodic-{distro}-.*'
             - 'xci-osa-verify-{distro}-.*'
             - 'xci-osa-periodic-{distro}-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
       - throttle:
           max-per-node: 2
           max-total: 10
@@ -82,6 +82,8 @@
       - label:
           name: SLAVE_LABEL
           default: 'xci-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: OPENSTACK_OSA_VERSION
           default: 'master'
           blocking-jobs:
             - '.*-bifrost-verify-.*'
             - '.*-bifrost-periodic-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     parameters:
       - project-parameter:
index c1132ab..66cbb83 100644 (file)
@@ -1,15 +1,9 @@
 ---
-# -------------------------------
-# These jobs run on a daily basis and deploy OpenStack
-# using the pinned versions of opnfv/releng, openstack/bifrost
-# and openstack/openstack-ansible. Due to this, there is no
-# version/branch is set/passed to jobs and instead the versions
-# are checked out based on what is configured.
-# -------------------------------
 - project:
     project: 'releng-xci'
 
     name: 'xci-daily'
+
     # -------------------------------
     # Branch Anchors
     # -------------------------------
       stream: master
       opnfv-releng-version: master
       gs-pathname: ''
+
     # -------------------------------
     # Scenarios
     # -------------------------------
     scenario:
-      - 'os-nosdn-nofeature-ha':
-          auto-trigger-name: 'daily-trigger-disabled'
-          xci-flavor: 'ha'
       - 'os-nosdn-nofeature-noha':
-          auto-trigger-name: 'daily-trigger-disabled'
-          xci-flavor: 'noha'
-      - 'os-odl-sfc-ha':
-          auto-trigger-name: 'daily-trigger-disabled'
-          xci-flavor: 'ha'
+          deploy-scenario: 'os-nosdn-nofeature'
+          installer-type: 'osa'
+          xci-flavor: 'mini'
+      - 'os-nosdn-osm-noha':
+          deploy-scenario: 'os-nosdn-osm'
+          installer-type: 'osa'
+          xci-flavor: 'mini'
+      - 'os-odl-nofeature-noha':
+          deploy-scenario: 'os-odl-nofeature'
+          installer-type: 'osa'
+          xci-flavor: 'mini'
+      - 'os-odl-bgpvpn-noha':
+          deploy-scenario: 'os-odl-bgpvpn'
+          installer-type: 'osa'
+          xci-flavor: 'mini'
       - 'os-odl-sfc-noha':
-          auto-trigger-name: 'daily-trigger-disabled'
-          xci-flavor: 'noha'
+          deploy-scenario: 'os-odl-sfc'
+          installer-type: 'osa'
+          xci-flavor: 'mini'
+      - 'k8-nosdn-nofeature-noha':
+          deploy-scenario: 'k8-nosdn-nofeature'
+          installer-type: 'kubespray'
+          xci-flavor: 'mini'
+      - 'k8-canal-nofeature-noha':
+          deploy-scenario: 'k8-canal-nofeature'
+          installer-type: 'kubespray'
+          xci-flavor: 'mini'
+      - 'k8-calico-nofeature-noha':
+          deploy-scenario: 'k8-calico-nofeature'
+          installer-type: 'kubespray'
+          xci-flavor: 'mini'
+      - 'k8-contiv-nofeature-noha':
+          deploy-scenario: 'k8-contiv-nofeature'
+          installer-type: 'kubespray'
+          xci-flavor: 'mini'
+      - 'k8-flannel-nofeature-noha':
+          deploy-scenario: 'k8-flannel-nofeature'
+          installer-type: 'kubespray'
+          xci-flavor: 'mini'
+      - 'k8-nosdn-istio-noha':
+          deploy-scenario: 'k8-nosdn-istio'
+          installer-type: 'kubespray'
+          xci-flavor: 'mini'
+
     # -------------------------------
     # XCI PODs
     # -------------------------------
     pod:
       - virtual:
           <<: *master
+
     # -------------------------------
     # Supported Distros
     # -------------------------------
     distro:
-      - 'xenial':
+      - 'ubuntu':
           disabled: false
-          slave-label: xci-xenial-virtual
-          dib-os-release: 'xenial'
-          dib-os-element: 'ubuntu-minimal'
-          # yamllint disable rule:line-length
-          dib-os-packages: 'vlan,vim,less,bridge-utils,sudo,language-pack-en,iputils-ping,rsyslog,curl,python,debootstrap,ifenslave,ifenslave-2.6,lsof,lvm2,tcpdump,nfs-kernel-server,chrony,iptabls'
-          # yamllint enable rule:line-length
-          extra-dib-elements: 'openssh-server'
-      - 'centos7':
-          disabled: true
-          slave-label: xci-centos7-virtual
-          dib-os-release: '7'
-          dib-os-element: 'centos7'
-          dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
-          extra-dib-elements: 'openssh-server'
-      - 'suse':
+          slave-label: xci-virtual
+      - 'centos':
           disabled: true
-          slave-label: xci-suse-virtual
-          dib-os-release: '42.3'
-          dib-os-element: 'opensuse-minimal'
-          dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
-          extra-dib-elements: 'openssh-server'
+          slave-label: xci-virtual
+      - 'opensuse':
+          disabled: false
+          slave-label: xci-virtual
 
     # -------------------------------
     #        Phases
@@ -73,6 +88,8 @@
     phase:
       - 'deploy'
       - 'functest'
+      - 'yardstick'
+
     # -------------------------------
     # jobs
     # -------------------------------
       - build-blocker:
           use-build-blocker: true
           blocking-jobs:
+            - '^xci-verify.*'
+            - '^xci-merge.*'
             - '^xci-os.*'
+            - '^xci-k8.*'
             - '^xci-deploy.*'
             - '^xci-functest.*'
             - '^bifrost-.*periodic.*'
             - '^osa-.*periodic.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
       - logrotate-default
 
+    triggers:
+      - timed: '@midnight'
+
     parameters:
       - string:
           name: DEPLOY_SCENARIO
-          default: '{scenario}'
+          default: '{deploy-scenario}'
+      - string:
+          name: INSTALLER_TYPE
+          default: '{installer-type}'
       - string:
           name: XCI_FLAVOR
           default: '{xci-flavor}'
       - label:
           name: SLAVE_LABEL
           default: '{slave-label}'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
+      - string:
+          name: XCI_DISTRO
+          default: '{distro}'
+      - string:
+          name: FUNCTEST_VERSION
+          default: 'hunter'
+      - string:
+          name: FUNCTEST_MODE
+          default: 'tier'
+      - string:
+          name: FUNCTEST_SUITE_NAME
+          default: 'smoke'
       - string:
           name: CI_LOOP
           default: 'daily'
-
-    triggers:
-      - '{auto-trigger-name}'
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
 
     wrappers:
+      - ssh-agent-wrapper
+      - build-timeout:
+          timeout: 240
       - fix-workspace-permissions
 
     builders:
       - description-setter:
-          description: "Built on $NODE_NAME"
+          description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
       - trigger-builds:
           - project: 'xci-deploy-{pod}-{distro}-daily-{stream}'
             current-parameters: false
             predefined-parameters: |
               DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+              INSTALLER_TYPE=$INSTALLER_TYPE
               XCI_FLAVOR=$XCI_FLAVOR
               CI_LOOP=$CI_LOOP
+              XCI_DISTRO=$XCI_DISTRO
+              FUNCTEST_VERSION=$FUNCTEST_VERSION
+              FUNCTEST_MODE=$FUNCTEST_MODE
+              FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
             same-node: true
             block: true
       - trigger-builds:
             current-parameters: false
             predefined-parameters: |
               DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+              INSTALLER_TYPE=$INSTALLER_TYPE
+              XCI_FLAVOR=$XCI_FLAVOR
+              CI_LOOP=$CI_LOOP
+              XCI_DISTRO=$XCI_DISTRO
+              FUNCTEST_VERSION=$FUNCTEST_VERSION
+              FUNCTEST_MODE=$FUNCTEST_MODE
+              FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
+            same-node: true
+            block: true
+            block-thresholds:
+              build-step-failure-threshold: 'never'
+              failure-threshold: 'never'
+              unstable-threshold: 'FAILURE'
+      - trigger-builds:
+          - project: 'xci-yardstick-{pod}-{distro}-daily-{stream}'
+            current-parameters: false
+            predefined-parameters: |
+              DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+              INSTALLER_TYPE=$INSTALLER_TYPE
               XCI_FLAVOR=$XCI_FLAVOR
               CI_LOOP=$CI_LOOP
+              XCI_DISTRO=$XCI_DISTRO
             same-node: true
             block: true
             block-thresholds:
 
     publishers:
       # yamllint disable rule:line-length
-      - email:
-          recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com julienjut@gmail.com
       # yamllint enable rule:line-length
       - email-jenkins-admins-on-failure
-      - postbuildscript:
-          builders:
-            - role: BOTH
-              build-on:
-                - ABORTED
-                - FAILURE
-                - NOT_BUILT
-                - SUCCESS
-                - UNSTABLE
-              build-steps:
-                - shell: !include-raw: ./xci-cleanup.sh
-          mark-unstable-if-failed: true
 
 - job-template:
     name: 'xci-{phase}-{pod}-{distro}-daily-{stream}'
 
-    disabled: '{obj:disabled}'
+    disabled: false
 
-    concurrent: false
+    concurrent: true
 
     properties:
       - build-blocker:
           use-build-blocker: true
           blocking-jobs:
+            - '^xci-verify.*'
+            - '^xci-merge.*'
             - '^xci-deploy.*'
             - '^xci-functest.*'
             - '^bifrost-.*periodic.*'
             - '^osa-.*periodic.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
+      - throttle:
+          enabled: true
+          max-per-node: 1
+          option: 'project'
       - logrotate-default
 
     wrappers:
+      - ssh-agent-wrapper
+      - build-timeout:
+          timeout: 240
       - fix-workspace-permissions
 
     scm:
       - project-parameter:
           project: '{project}'
           branch: '{opnfv-releng-version}'
-      - string:
-          name: GIT_BASE
-          default: https://gerrit.opnfv.org/gerrit/$PROJECT
       - string:
           name: DEPLOY_SCENARIO
-          default: 'os-nosdn-nofeature-ha'
-      - string:
-          name: XCI_FLAVOR
-          default: 'ha'
-      - string:
-          name: DISTRO
-          default: '{distro}'
+          default: 'os-nosdn-nofeature-noha'
       - string:
-          name: DIB_OS_RELEASE
-          default: '{dib-os-release}'
-      - string:
-          name: DIB_OS_ELEMENT
-          default: '{dib-os-element}'
-      - string:
-          name: DIB_OS_PACKAGES
-          default: '{dib-os-packages}'
+          name: INSTALLER_TYPE
+          default: 'osa'
       - string:
-          name: EXTRA_DIB_ELEMENTS
-          default: '{extra-dib-elements}'
+          name: XCI_FLAVOR
+          default: 'mini'
       - string:
-          name: CLEAN_DIB_IMAGES
-          default: 'true'
+          name: XCI_DISTRO
+          default: 'ubuntu'
       - label:
           name: SLAVE_LABEL
           default: '{slave-label}'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
-          name: CI_LOOP
-          default: 'daily'
-      - string:
-          name: INSTALLER_TYPE
-          default: 'osa'
+          name: FUNCTEST_VERSION
+          default: 'hunter'
       - string:
           name: FUNCTEST_MODE
-          default: 'daily'
+          default: 'tier'
       - string:
           name: FUNCTEST_SUITE_NAME
+          default: 'smoke'
+      - string:
+          name: CI_LOOP
           default: 'daily'
-          description: "Daily suite name to run"
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
 
     builders:
       - description-setter:
-          description: "Built on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
-      - 'xci-{phase}-builder'
+          description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
+      - 'xci-daily-{phase}-macro'
 
 # --------------------------
 # builder macros
 # --------------------------
+# These need to be properly fixed once the basic deployment, functest, and
+# yardstick jobs are working outside of clean vm.
+# One of the ugly fixes is moving test  preparation step into the
+# deployment job itself since test preparation requires some of the
+# things from deployment job. With clean VM, this wasn't an issue
+# since everything was on clean VM. When we move things out of clean
+# VM, things are done in workspaces of the jobs that are different.
+#
+# Apart from these things, we will need to go through the scripts
+# used for verify jobs and make them updated in order to be able to
+# use them for jobs that don't use clean VM.
 - builder:
-    name: xci-deploy-builder
+    name: 'xci-daily-deploy-macro'
     builders:
-      - shell:
-          !include-raw: ./xci-deploy.sh
+      - shell: |
+          #!/bin/bash
+          set -o errexit
+          set -o pipefail
+
+          echo "Cleaning the leftovers from the earlier run"
+          echo "---------------------------------------------------------------------------------"
+          for vm in $(sudo virsh list --all --name | grep -v xci_vm); do
+              echo "Removing $vm"
+              sudo virsh destroy $vm > /dev/null 2>&1 || true
+              sudo virsh undefine $vm > /dev/null 2>&1 || true
+              sudo killall -r vbmc > /dev/null 2>&1 || true
+              sudo rm -rf /root/.vbmc > /dev/null 2>&1 || true
+          done
+          echo "---------------------------------------------------------------------------------"
+
+          cd $WORKSPACE/xci && ./xci-deploy.sh
+
+          echo "Prepare OPNFV VM for Tests"
+          echo "---------------------------------------------------------------------------------"
+          export XCI_PATH=$WORKSPACE
+          export XCI_VENV=${XCI_PATH}/venv
+          source $XCI_VENV/bin/activate
+          while read var; do
+              declare -x "\${var}" 2>/dev/null
+              echo $var
+          done < ${XCI_PATH}/.cache/xci.env && cd ${XCI_PATH}/xci && \
+          ansible-playbook -i playbooks/dynamic_inventory.py playbooks/prepare-tests.yml
+          ssh root@192.168.122.2 "/root/prepare-tests.sh"
+          echo "---------------------------------------------------------------------------------"
 
 - builder:
-    name: xci-functest-builder
+    name: 'xci-daily-functest-macro'
     builders:
       - shell: |
           #!/bin/bash
+          set -o pipefail
+
+          ssh root@192.168.122.2 "/root/run-functest.sh"
+          functest_exit=$?
+
+          case ${DEPLOY_SCENARIO[0]} in
+              os-*)
+                  FUNCTEST_LOG=/root/functest-results/functest.log
+                  ;;
+              k8-*)
+                  FUNCTEST_LOG=/root/functest-results/functest-kubernetes.log
+                  ;;
+              *)
+                  echo "Unable to determine the installer. Exiting!"
+                  exit $functest_exit
+                  ;;
+          esac
 
-          echo "Hello World!"
+          echo "Functest log"
+          echo "---------------------------------------------------------------------------------"
+          ssh root@192.168.122.2 "cat $FUNCTEST_LOG"
+          echo "---------------------------------------------------------------------------------"
+          exit ${functest_exit}
+- builder:
+    name: 'xci-daily-yardstick-macro'
+    builders:
+      - shell: |
+          #!/bin/bash
+          set -o errexit
+          set -o pipefail
 
-# this will be enabled once the xci is prepared
-# - builder:
-#    name: xci-functest-builder
-#    builders:
-#        - shell:
-#            !include-raw:
-#                - ../../utils/fetch_os_creds.sh
-#                - ../functest/functest-alpine.sh
+          ssh root@192.168.122.2 "/root/run-yardstick.sh"
index cb438ad..c468be6 100644 (file)
       - label:
           name: SLAVE_LABEL
           default: 'xci-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: CI_LOOP
           default: 'merge'
           blocking-jobs:
             - 'xci-verify-{distro}-.*'
             - 'xci-merge-{distro}-.*'
+            - 'xci-os.*'
+            - 'xci-k8.*'
             - 'openstack-bifrost-verify-{distro}-.*'
             - 'xci-osa-verify-{distro}-.*'
             - 'xci-osa-periodic-{distro}-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
       - throttle:
           max-per-node: 1
           max-total: 3
       - label:
           name: SLAVE_LABEL
           default: 'xci-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: CI_LOOP
           default: 'merge'
+      - string:
+          name: FUNCTEST_VERSION
+          default: 'hunter'
       - string:
           name: FUNCTEST_MODE
           default: 'tier'
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                FUNCTEST_VERSION=$FUNCTEST_VERSION
+                FUNCTEST_MODE=$FUNCTEST_MODE
+                FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
               node-parameters: true
               kill-phase-on: '{kill-phase-on}'
               abort-all-job: '{abort-all-job}'
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                 CI_LOOP=$CI_LOOP
+                FUNCTEST_VERSION=$FUNCTEST_VERSION
                 FUNCTEST_MODE=$FUNCTEST_MODE
                 FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
               node-parameters: true
             - '.*-bifrost-periodic-.*'
             - 'osa-verify-.*'
             - 'osa-periodic-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     parameters:
       - string:
       - string:
           name: CI_LOOP
           default: 'merge'
+      - string:
+          name: FUNCTEST_VERSION
+          default: 'hunter'
       - string:
           name: FUNCTEST_MODE
           default: 'tier'
diff --git a/jjb/xci/xci-rtd-jobs.yaml b/jjb/xci/xci-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..d21aad5
--- /dev/null
@@ -0,0 +1,13 @@
+---
+- project:
+    name: releng-xci-rtd
+    project: releng-xci
+    project-name: releng-xci
+
+    gerrit-skip-vote: true
+    project-pattern: 'releng-xci'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-releng-xci/47389/'
+    rtd-token: '7dbc774ea9a625296d2e973aff3e79af26b15d4d'
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index f46abe0..992194c 100755 (executable)
@@ -47,18 +47,22 @@ fi
 export XCI_PATH=/home/devuser/releng-xci
 export XCI_VENV=${XCI_PATH}/venv
 
-ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm "source $XCI_VENV/bin/activate; while read var; do declare -x \"\${var}\" 2>/dev/null; done < ${XCI_PATH}/.cache/xci.env && cd releng-xci/xci && ansible-playbook -i playbooks/dynamic_inventory.py playbooks/prepare-functest.yml"
-echo "Running functest"
+ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm "source $XCI_VENV/bin/activate; \
+    while read var; do declare -x \"\${var}\" 2>/dev/null; done < ${XCI_PATH}/.cache/xci.env && \
+    cd releng-xci/xci && ansible-playbook -i playbooks/dynamic_inventory.py playbooks/prepare-tests.yml"
+echo "Prepare OPNFV VM for Tests"
+ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm_opnfv "/root/prepare-tests.sh"
+echo "Running Functest"
 ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm_opnfv "/root/run-functest.sh"
 # Record exit code
 functest_exit=$?
 
 case ${DEPLOY_SCENARIO[0]} in
     os-*)
-        FUNCTEST_LOG=/root/results/functest.log
+        FUNCTEST_LOG=/root/functest-results/functest.log
         ;;
     k8-*)
-        FUNCTEST_LOG=/root/results/functest-kubernetes.log
+        FUNCTEST_LOG=/root/functest-results/functest-kubernetes.log
         ;;
     *)
         echo "Unable to determine the installer. Exiting!"
index 7bc45f1..59c1ebf 100755 (executable)
@@ -8,6 +8,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 set -o pipefail
+set -x
 
 #----------------------------------------------------------------------
 # This script is used by CI and executed by Jenkins jobs.
@@ -166,9 +167,10 @@ WORK_DIRECTORY=/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO
 
 if [[ $GERRIT_PROJECT == "releng-xci" ]]; then
     determine_default_scenario
+else
+    determine_scenario
 fi
 override_scenario
-determine_scenario
 
 # ensure single scenario is impacted
     if [[ $(IFS=$'\n' echo ${DEPLOY_SCENARIO[@]} | wc -w) != 1 ]]; then
index 2230cd5..9edf555 100644 (file)
@@ -20,7 +20,7 @@
           unstable: false
           notbuilt: false
       - centos:
-          disabled: false
+          disabled: true
           successful: true
           failed: true
           unstable: true
           blocking-jobs:
             - 'xci-verify-{distro}-.*'
             - 'xci-merge-{distro}-.*'
+            - 'xci-os-.*'
+            - 'xci-k8-.*'
             - 'openstack-bifrost-verify-{distro}-.*'
             - 'xci-osa-verify-{distro}-.*'
             - 'xci-osa-periodic-{distro}-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
       - throttle:
           max-per-node: 2
           max-total: 10
       - label:
           name: SLAVE_LABEL
           default: 'xci-virtual'
+          all-nodes: false
+          node-eligibility: 'ignore-offline'
       - string:
           name: DISTRO
           default: '{distro}'
       - string:
           name: CI_LOOP
           default: 'verify'
+      - string:
+          name: FUNCTEST_VERSION
+          default: 'hunter'
       - string:
           name: FUNCTEST_MODE
           default: 'tier'
           description: 'Git URL to use on this Jenkins Slave'
 
     builders:
+      - 'xci-verify-clean-vm-macro'
       - 'xci-verify-set-scenario-macro'
       - inject:
           properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                 CI_LOOP=$CI_LOOP
+                FUNCTEST_VERSION=$FUNCTEST_VERSION
+                FUNCTEST_MODE=$FUNCTEST_MODE
+                FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
               node-parameters: true
               kill-phase-on: FAILURE
               abort-all-job: true
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                 CI_LOOP=$CI_LOOP
+                FUNCTEST_VERSION=$FUNCTEST_VERSION
                 FUNCTEST_MODE=$FUNCTEST_MODE
                 FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
               node-parameters: true
             - role: BOTH
               build-on:
                 - ABORTED
-                - FAILURE
                 - NOT_BUILT
                 - SUCCESS
                 - UNSTABLE
             - '.*-bifrost-periodic-.*'
             - 'osa-verify-.*'
             - 'osa-periodic-.*'
-          block-level: 'NODE'
+          blocking-level: 'NODE'
 
     parameters:
       - string:
       - string:
           name: CI_LOOP
           default: 'verify'
+      - string:
+          name: FUNCTEST_VERSION
+          default: 'hunter'
       - string:
           name: FUNCTEST_MODE
           default: 'tier'
 # -------------------------------
 # builder macros
 # -------------------------------
+- builder:
+    name: 'xci-verify-clean-vm-macro'
+    builders:
+      - shell:
+          !include-raw: ./xci-cleanup.sh
+
 - builder:
     name: 'xci-verify-set-scenario-macro'
     builders:
index 4df3f52..1c622ac 100644 (file)
       branch: '{stream}'
       gs-pathname: ''
       docker-tag: 'latest'
+    gambia: &gambia
+      stream: gambia
+      branch: 'stable/{stream}'
+      gs-pathname: '{stream}'
+      docker-tag: 'stable'
+    # Temporary fraser entry, until all fraser-tied PODs below migrate to gambia
     fraser: &fraser
       stream: fraser
       branch: 'stable/{stream}'
       gs-pathname: '{stream}'
-      docker-tag: 'stable'
+      # In the meantime, docker image 'stable' will carry gambia contents, and
+      # there is no implemented way to build a docker image with 'fraser' tag.
+      # Available docker image tag options, as of 2018-10-03:
+      # - 'opnfv-6.2.1', built on 2018-07-02, is rather old.
+      # - 'ovp-2.0.0', built on 2018-09-21, is a good fit despite its name: it
+      #   was built using SHA1 6c10a2d9cc3fe10e0bdd73a0985ab767b9479afc, which
+      #   is the same as used for the most recent 'stable' docker image carrying
+      #   fraser contents, built on 2018-09-13 and used so far by the fraser-
+      #   tied PODs below.
+      # The alternative would be to create a new 'opnfv-6.2.2' git tag, but that
+      # would just build another image using the same SHA1 as for 'ovp-2.0.0'.
+      docker-tag: 'ovp-2.0.0'
     # -------------------------------
     # POD, INSTALLER, AND BRANCH MAPPING
     # -------------------------------
           installer: apex
           auto-trigger-name: 'daily-trigger-disabled'
           <<: *master
+      - virtual:
+          slave-label: apex-virtual-master
+          installer: apex
+          auto-trigger-name: 'daily-trigger-disabled'
+          <<: *gambia
+      - baremetal:
+          slave-label: apex-baremetal-master
+          installer: apex
+          auto-trigger-name: 'daily-trigger-disabled'
+          <<: *gambia
       - virtual:
           slave-label: apex-virtual-fraser
           installer: apex
           slave-label: fuel-baremetal
           installer: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *fraser
+          <<: *gambia
       - virtual:
           slave-label: fuel-virtual
           installer: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *fraser
+          <<: *gambia
       # armband CI PODs
       - armband-baremetal:
           slave-label: armband-baremetal
           slave-label: armband-baremetal
           installer: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *fraser
+          <<: *gambia
       - armband-virtual:
           slave-label: armband-virtual
           installer: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *fraser
+          <<: *gambia
       # joid CI PODs
       - baremetal:
           slave-label: joid-baremetal
           installer: compass
           auto-trigger-name: 'daily-trigger-disabled'
           <<: *fraser
+      - baremetal:
+          slave-label: compass-baremetal
+          installer: compass
+          auto-trigger-name: 'daily-trigger-disabled'
+          <<: *gambia
+      - virtual:
+          slave-label: compass-virtual
+          installer: compass
+          auto-trigger-name: 'daily-trigger-disabled'
+          <<: *gambia
       # daisy CI PODs
       - baremetal:
           slave-label: daisy-baremetal
           slave-label: '{pod}'
           installer: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *fraser
+          <<: *gambia
       - zte-pod2:
           slave-label: '{pod}'
           installer: daisy
 
     publishers:
       - email:
-          recipients: jean.gaoliang@huawei.com limingjiang@huawei.com ross.b.brattain@intel.com
+          recipients: jean.gaoliang@huawei.com limingjiang@huawei.com
       - email-jenkins-admins-on-failure
 
 ########################
index 783c64e..58d590c 100755 (executable)
@@ -41,10 +41,6 @@ elif [[ ${INSTALLER_TYPE} == 'fuel' ]]; then
     cacert_file_vol="-v ${HOME}/os_cacert:/etc/ssl/certs/mcp_os_cacert"
     sshkey="-v ${SSH_KEY}:/root/.ssh/mcp.rsa"
 fi
-# Set iptables rule to allow forwarding return traffic for container
-if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
-    sudo iptables -I FORWARD -j RETURN
-fi
 
 opts="--privileged=true --rm"
 envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
index 56e825e..ace2e02 100644 (file)
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
-      - fraser: &fraser
+      - gambia: &gambia
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
+      - fraser:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
@@ -35,7 +39,7 @@
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
+      - 'ericsson-build4-defaults'
 
     scm:
       - git-scm-gerrit
@@ -83,7 +87,7 @@
       - project-parameter:
           project: '{project}'
           branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
+      - 'ericsson-build4-defaults'
       - string:
           name: GS_URL
           default: '$GS_BASE{gs-pathname}'
diff --git a/jjb/yardstick/yardstick-rtd-jobs.yaml b/jjb/yardstick/yardstick-rtd-jobs.yaml
new file mode 100644 (file)
index 0000000..a18a655
--- /dev/null
@@ -0,0 +1,20 @@
+---
+- project:
+    name: yardstick-rtd
+    project: yardstick
+    project-name: yardstick
+
+    project-pattern: 'yardstick'
+    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-yardstick/47399/'
+    rtd-token: '6aa883824f3917c7db5ffa1fe9168817fb5feb68'
+
+    stream:
+      - master:
+          branch: '{stream}'
+          disabled: false
+      - gambia:
+          branch: 'stable/{stream}'
+          disabled: false
+
+    jobs:
+      - '{project-name}-rtd-jobs'
index 52d9b56..1e54321 100644 (file)
@@ -23,15 +23,17 @@ print(handler.get_deployment_info())
 
 
 print("########## FUEL ##########")
+# NOTE: If you get traces containing <paramiko.ecdsakey.ECDSAKey object [...]>
+# make sure 10.20.0.2 is not already in ~/.ssh/known_hosts with another sig
 handler = factory.Factory.get_handler('fuel',
                                       '10.20.0.2',
-                                      'root',
-                                      installer_pwd='r00tme')
+                                      'ubuntu',
+                                      pkey_file='/var/lib/opnfv/mcp.rsa')
 
 print(handler.get_deployment_info())
 
-print("List of nodes in cluster 4:")
-nodes = handler.get_nodes({'cluster': '4'})
+print("List of nodes in cluster")
+nodes = handler.get_nodes()
 for node in nodes:
     print(node)
 
index 1fd8d44..cd2fc36 100644 (file)
@@ -42,7 +42,7 @@ class Factory(object):
         elif installer.lower() == "fuel":
             return fuel_adapter.FuelAdapter(installer_ip=installer_ip,
                                             installer_user=installer_user,
-                                            installer_pwd=installer_pwd)
+                                            pkey_file=pkey_file)
         elif installer.lower() == "compass":
             return compass_adapter.ContainerizedCompassAdapter(
                 installer_ip=installer_ip,
index a217767..a57168d 100644 (file)
@@ -1,5 +1,5 @@
 ##############################################################################
-# Copyright (c) 2017 Ericsson AB and others.
+# Copyright (c) 2018 Ericsson AB and others.
 # Author: Jose Lausuch (jose.lausuch@ericsson.com)
 #         George Paraskevopoulos (geopar@intracom-telecom.com)
 # All rights reserved. This program and the accompanying materials
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+'''
+    This modules implements the Fuel@OPNFV adapter
 
+    - host executing this module needs network connectivity to a cluster via:
+      * mcpcontrol network (usually 10.20.0.0/24, created by installer);
+      * PXE/admin network;
+      The above are always true for an OPNFV Pharos jumpserver.
+    - key-based SSH auth is used throughout the cluster, without proxy-ing
+      cluster node access via Salt master (old Fuel@OPNFV used to);
+'''
+
+from yaml import safe_load, YAMLError
 
 from opnfv.deployment import manager
 from opnfv.utils import opnfv_logger as logger
 from opnfv.utils import ssh_utils
 
-logger = logger.Logger(__name__).getLogger()
+LOGGER = logger.Logger(__name__).getLogger()
 
 
 class FuelAdapter(manager.DeploymentHandler):
+    '''
+        This class extends the generic handler with Fuel@OPNFV specifics
+    '''
 
-    def __init__(self, installer_ip, installer_user, installer_pwd):
+    def __init__(self, installer_ip, installer_user, pkey_file):
         super(FuelAdapter, self).__init__(installer='fuel',
                                           installer_ip=installer_ip,
                                           installer_user=installer_user,
-                                          installer_pwd=installer_pwd,
-                                          pkey_file=None)
-
-    def _get_clusters(self):
-        environments = []
-        output = self.runcmd_fuel_env()
-        lines = output.rsplit('\n')
-        if len(lines) < 2:
-            logger.info("No environments found in the deployment.")
-            return None
-        else:
-            fields = lines[0].rsplit(' | ')
-
-            index_id = -1
-            index_status = -1
-            index_name = -1
-            index_release_id = -1
-
-            for i in range(len(fields)):
-                if "id" in fields[i]:
-                    index_id = i
-                elif "status" in fields[i]:
-                    index_status = i
-                elif "name" in fields[i]:
-                    index_name = i
-                elif "release_id" in fields[i]:
-                    index_release_id = i
-
-            # order env info
-            for i in range(2, len(lines)):
-                fields = lines[i].rsplit(' | ')
-                dict = {"id": fields[index_id].strip(),
-                        "status": fields[index_status].strip(),
-                        "name": fields[index_name].strip(),
-                        "release_id": fields[index_release_id].strip()}
-                environments.append(dict)
-
-        return environments
+                                          installer_pwd=None,
+                                          pkey_file=pkey_file)
 
     def get_nodes(self, options=None):
-
-        if options and options['cluster'] and len(self.nodes) > 0:
-            n = []
-            for node in self.nodes:
-                if str(node.info['cluster']) == str(options['cluster']):
-                    n.append(node)
-            return n
-
+        '''
+            Generates a list of all the nodes in the deployment
+        '''
+        # Unlike old Fuel@Openstack, we don't keep track of different clusters
+        # explicitly, but through domain names.
+        # For simplicity, we will assume a single cluster per Salt master node.
         try:
             # if we have retrieved previously all the nodes, don't do it again
             # This fails the first time when the constructor calls this method
             # therefore the try/except
             if len(self.nodes) > 0:
                 return self.nodes
+        # pylint: disable=bare-except
         except:
             pass
 
+        # Manager roles to reclass properties mapping
+        _map = {
+            'salt:master:enabled': manager.Role.INSTALLER,
+            'maas:region:enabled': manager.Role.INSTALLER,
+            'nova:controller:enabled': manager.Role.CONTROLLER,
+            'nova:compute:enabled': manager.Role.COMPUTE,
+            'opendaylight:server:enabled': manager.Role.ODL,
+        }
         nodes = []
-        cmd = 'fuel node'
+        cmd = ("sudo salt '*' pillar.item {} --out yaml --static 2>/dev/null"
+               .format(' '.join(_map.keys() + ['_param:pxe_admin_address'])))
+        # Sample output (for one node):
+        #   cmp001.mcp-ovs-noha.local:
+        #     _param:pxe_admin_address: 192.168.11.34
+        #     maas:region:enabled: ''
+        #     nova:compute:enabled: true
+        #     nova:controller:enabled: ''
+        #     opendaylight:server:enabled: ''
+        #     retcode: 0
+        #     salt:master:enabled: ''
         output = self.installer_node.run_cmd(cmd)
-        lines = output.rsplit('\n')
-        if len(lines) < 2:
-            logger.info("No nodes found in the deployment.")
+        if output.startswith('No minions matched the target'):
+            LOGGER.info('No nodes found in the deployment.')
             return nodes
 
-        # get fields indexes
-        fields = lines[0].rsplit(' | ')
-
-        index_id = -1
-        index_status = -1
-        index_name = -1
-        index_cluster = -1
-        index_ip = -1
-        index_mac = -1
-        index_roles = -1
-        index_online = -1
-
-        for i in range(len(fields)):
-            if "group_id" in fields[i]:
-                break
-            elif "id" in fields[i]:
-                index_id = i
-            elif "status" in fields[i]:
-                index_status = i
-            elif "name" in fields[i]:
-                index_name = i
-            elif "cluster" in fields[i]:
-                index_cluster = i
-            elif "ip" in fields[i]:
-                index_ip = i
-            elif "mac" in fields[i]:
-                index_mac = i
-            elif "roles " in fields[i] and "pending_roles" not in fields[i]:
-                index_roles = i
-            elif "online" in fields[i]:
-                index_online = i
-
-        # order nodes info
-        for i in range(2, len(lines)):
-            fields = lines[i].rsplit(' | ')
-            id = fields[index_id].strip().encode()
-            ip = fields[index_ip].strip().encode()
-            status_node = fields[index_status].strip().encode().lower()
-            name = fields[index_name].strip().encode()
-            roles_all = fields[index_roles].strip().encode().lower()
-
-            roles = [x for x in [manager.Role.CONTROLLER,
-                                 manager.Role.COMPUTE,
-                                 manager.Role.ODL] if x in roles_all]
-
-            dict = {"cluster": fields[index_cluster].strip().encode(),
-                    "mac": fields[index_mac].strip().encode(),
-                    "status_node": status_node,
-                    "online": fields[index_online].strip().encode()}
-
-            ssh_client = None
-            if status_node == 'ready':
-                status = manager.NodeStatus.STATUS_OK
-                proxy = {'ip': self.installer_ip,
-                         'username': self.installer_user,
-                         'password': self.installer_pwd}
-                ssh_client = ssh_utils.get_ssh_client(hostname=ip,
-                                                      username='root',
-                                                      proxy=proxy)
-            elif 'error' in status_node:
-                status = manager.NodeStatus.STATUS_ERROR
-            elif 'off' in status_node:
-                status = manager.NodeStatus.STATUS_OFFLINE
-            elif 'discover' in status_node:
-                status = manager.NodeStatus.STATUS_UNUSED
-            else:
-                status = manager.NodeStatus.STATUS_INACTIVE
-
+        try:
+            yaml_output = safe_load(output)
+        except YAMLError as exc:
+            LOGGER.error(exc)
+        for node_name in yaml_output.keys():
+            ip_addr = yaml_output[node_name]['_param:pxe_admin_address']
+            ssh_client = ssh_utils.get_ssh_client(hostname=ip_addr,
+                                                  username='ubuntu',
+                                                  pkey_file=self.pkey_file)
             node = manager.Node(
-                id, ip, name, status, roles, ssh_client, dict)
-            if options and options['cluster']:
-                if fields[index_cluster].strip() == options['cluster']:
-                    nodes.append(node)
-            else:
-                nodes.append(node)
+                id=node_name,
+                ip=ip_addr,
+                name=node_name,
+                status=manager.NodeStatus.STATUS_OK,
+                roles=[_map[x] for x in _map if yaml_output[node_name][x]],
+                ssh_client=ssh_client)
+            nodes.append(node)
 
-        self.get_nodes_called = True
         return nodes
 
     def get_openstack_version(self):
-        cmd = 'source openrc;nova-manage version 2>/dev/null'
-        version = None
-        for node in self.nodes:
-            if node.is_controller() and node.is_active():
-                version = node.run_cmd(cmd)
-                break
-        return version
+        '''
+        Returns a string of the openstack version (nova-compute)
+        '''
+        cmd = ("sudo salt -C 'I@nova:controller and *01*' "
+               "cmd.run 'nova-manage version 2>/dev/null' --out yaml --static")
+        nova_version = self.installer_node.run_cmd(cmd)
+        if nova_version:
+            return nova_version.split(' ')[-1]
+        return None
 
     def get_sdn_version(self):
-        cmd = "apt-cache policy opendaylight|grep Installed"
+        '''
+        Returns a string of the sdn controller and its version, if exists
+        '''
+        cmd = ("sudo salt -C 'I@opendaylight:server and *01*'"
+               "pkg.version opendaylight --out yaml --static")
         version = None
         for node in self.nodes:
             if manager.Role.ODL in node.roles and node.is_active():
-                odl_version = node.run_cmd(cmd)
+                odl_version = self.installer_node.run_cmd(cmd)
                 if odl_version:
                     version = 'OpenDaylight ' + odl_version.split(' ')[-1]
                     break
         return version
 
     def get_deployment_status(self):
-        cmd = "fuel env|tail -1|awk '{print $3}'"
-        result = self.installer_node.run_cmd(cmd)
-        if result is None or len(result) == 0:
-            return 'unknown'
-        elif 'operational' in result:
-            return 'active'
-        elif 'deploy' in result:
-            return 'deploying'
-        else:
-            return 'active'
+        '''
+        Returns a string of the status of the deployment
+        '''
+        # NOTE: Requires Fuel-side signaling of deployment status, stub it
+        return 'active'
index 694df77..2b5aedb 100644 (file)
@@ -241,13 +241,13 @@ class Node(object):
         Returns the ovs version installed
         '''
         if self.is_active():
-            cmd = "ovs-vsctl --version|head -1| sed 's/^.*) //'"
-            return self.run_cmd(cmd)
+            cmd = "ovs-vsctl --version 2>/dev/null|head -1| sed 's/^.*) //'"
+            return self.run_cmd(cmd) or None
         return None
 
     def get_system_info(self):
         '''
-        Returns the ovs version installed
+        Returns system information
         '''
         cmd = 'grep MemTotal /proc/meminfo'
         memory = self.run_cmd(cmd).partition('MemTotal:')[-1].strip().encode()
index 141ecbd..193a10a 100644 (file)
@@ -7,9 +7,9 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 #
-# Usage example:
+# Usage example (note: Fuel actually uses key-based auth, not user/pass):
 #   from opnfv.utils.Credentials import Credentials as credentials
-#   credentials("fuel", "10.20.0.2", "root", "r00tme").fetch('./openrc')
+#   credentials("fuel", "10.20.0.2", "user", "password").fetch('./openrc')
 #
 
 import os
@@ -74,12 +74,7 @@ class Credentials(object):
         pass
 
     def __fetch_creds_fuel(self, target_path):
-        creds_file = '/root/openrc'
-        try:
-            self.handler.get_file_from_controller(creds_file, target_path)
-        except Exception as e:
-            self.logger.error(
-                "Cannot get %s from controller. %e" % (creds_file, e))
+        # TODO
         pass
 
     def __fetch_creds_joid(self, target_path):
index 175a380..2272717 100644 (file)
@@ -49,10 +49,11 @@ def get_ssh_client(hostname,
             client = paramiko.SSHClient()
         else:
             client = ProxyHopClient()
+            proxy_password = proxy.get('password', None)
             proxy_pkey_file = proxy.get('pkey_file', '/root/.ssh/id_rsa')
             client.configure_jump_host(proxy['ip'],
                                        proxy['username'],
-                                       proxy['password'],
+                                       proxy_password,
                                        proxy_pkey_file)
         if client is None:
             raise Exception('Could not connect to client')
index 0718fa3..2c51daa 100644 (file)
@@ -1,3 +1,4 @@
 paramiko>=2.0 # LGPLv2.1+
 mock>=2.0 # BSD
 requests>=2.14.2 # Apache-2.0
+pyyaml>=3.11 # MIT
index c097d06..1613f2e 100644 (file)
@@ -25,6 +25,30 @@ releases:
   - version: opnfv-6.0.0
     location:
       apex-os-net-config: e09d110d7b58d26424c28a128cdfd8c766636461
+  - version: opnfv-6.1.0
+    location:
+      apex: a1f2e922d18430e064c9c8311f01ac2a0df19414
+  - version: opnfv-6.1.0
+    location:
+      apex-tripleo-heat-templates: 7f1cc97bcd6f267b11a251de3204edf0e8ec79c0
+  - version: opnfv-6.1.0
+    location:
+      apex-puppet-tripleo: bebd15efe40498c272577c563bc113ec3849dcc6
+  - version: opnfv-6.1.0
+    location:
+      apex-os-net-config: e09d110d7b58d26424c28a128cdfd8c766636461
+  - version: opnfv-6.2.0
+    location:
+      apex: 86826823c9a8cf0f10a6a903c8256bcc7222f5d7
+  - version: opnfv-6.2.0
+    location:
+      apex-tripleo-heat-templates: 7f1cc97bcd6f267b11a251de3204edf0e8ec79c0
+  - version: opnfv-6.2.0
+    location:
+      apex-puppet-tripleo: bebd15efe40498c272577c563bc113ec3849dcc6
+  - version: opnfv-6.2.0
+    location:
+      apex-os-net-config: e09d110d7b58d26424c28a128cdfd8c766636461
 
 branches:
   - name: stable/fraser
similarity index 75%
rename from releases/fraser/armband.yml
rename to releases/fraser/armband.yaml
index 66afd10..c0d0e4f 100644 (file)
@@ -15,6 +15,15 @@ releases:
   - version: opnfv-6.0.0
     location:
       armband: 2b0ac4026a3acf3b10ab2f4c416636d80ab559f0
+  - version: opnfv-6.1.0
+    location:
+      armband: 0de669e26d14794ab117e915abaa6bf6b1dd878d
+  - version: opnfv-6.2.0
+    location:
+      armband: f776d862ce62f66e09a3c695b67b1905f9ecfae7
+  - version: opnfv-6.2.1
+    location:
+      armband: 1262ea206b51abdfd3101123173a64fca8e89bfa
 
 branches:
   - name: stable/fraser
index a86fabb..5425507 100644 (file)
@@ -8,6 +8,18 @@ releases:
     location:
       auto: 3a957a75f99686767d13f45d74b5ca8463559b7b
 
+  - version: opnfv-6.1.0
+    location:
+      auto: b718a43380376d2b19222c4203c97a95ca849ae8
+
+  - version: opnfv-6.1.1
+    location:
+      auto: 0f43563624832c5908e6fc00eb0a4bbc6cc82c33
+
+  - version: opnfv-6.2.0
+    location:
+      auto: d8634fc5ceef5e87484186c7305637f260ccb28f
+
 branches:
   - name: stable/fraser
     location:
index 91163b5..beda625 100644 (file)
@@ -8,6 +8,10 @@ releases:
     location:
       availability: c1ee3631b59009ff61450808e2bd33fe1d4b17f2
 
+  - version: opnfv-6.1.0
+    location:
+      availability: c1ee3631b59009ff61450808e2bd33fe1d4b17f2
+
 branches:
   - name: stable/fraser
     location:
index 4ad1e9d..e8d88f1 100644 (file)
@@ -8,6 +8,10 @@ releases:
     location:
       barometer: 70a568ed8eed6f1a336f1fbe9bbc6900422e0c11
 
+  - version: opnfv-6.1.0
+    location:
+      barometer: 70a568ed8eed6f1a336f1fbe9bbc6900422e0c11
+
 branches:
   - name: stable/fraser
     location:
index e72d0f6..722bb7a 100644 (file)
@@ -7,6 +7,12 @@ releases:
   - version: opnfv-6.0.0
     location:
       bottlenecks: d76736440f852baed2e10a9b92c0557aabc041a6
+  - version: opnfv-6.1.0
+    location:
+      bottlenecks: d76736440f852baed2e10a9b92c0557aabc041a6
+  - version: opnfv-6.2.0
+    location:
+      bottlenecks: 3bf15446777a60510824e4b73da358934df03b98
 
 branches:
   - name: stable/fraser
index e334910..8e3db3c 100644 (file)
@@ -3,6 +3,11 @@ project: calipso
 project-type: tools
 release-model: stable
 
+releases:
+  - version: opnfv-6.1.0
+    location:
+      calipso: 5c821a4d1462f3b0131f37995f40fc77a53468e6
+
 branches:
   - name: stable/fraser
     location:
index c731d2a..d7a87f8 100644 (file)
@@ -10,6 +10,13 @@ releases:
   - version: opnfv-6.0.1
     location:
       clover: be5f20bbe91f1ae3138a1e338eea5b45e89bda82
+  - version: opnfv-6.1.0
+    location:
+      clover: 596a3573b2622a5d314349592088b6b737f5e126
+  - version: opnfv-6.2.0
+    location:
+      clover: 596a3573b2622a5d314349592088b6b737f5e126
+
 
 branches:
   - name: stable/fraser
index 5c44336..99f7962 100644 (file)
@@ -4,6 +4,20 @@ project-type: installer
 release-model: stable
 
 releases:
+  - version: opnfv-6.2.0
+    location:
+      compass4nfv: c51e937d7a121f8a267f0522fb787adbfb1a7eb9
+  - version: opnfv-6.2.0
+    location:
+      compass-containers: 067a72e250f9eea8deed84348aef6681d305a160
+
+  - version: opnfv-6.1.0
+    location:
+      compass4nfv: 499d95cdea092ea9eb76a939b6ea832601d41b7f
+  - version: opnfv-6.1.0
+    location:
+      compass-containers: 15a7cbc925bed896c3c09a5635454c33dbffbadc
+
   - version: opnfv-6.0.0
     location:
       compass4nfv: ab73374e424a679cc42b9812e6bd39a49fcea07d
index ed42ff0..c95007f 100644 (file)
@@ -7,6 +7,9 @@ releases:
   - version: opnfv-6.0.0
     location:
       container4nfv: 38d0ffe507a137167ffdbac860de5c1a738e81a8
+  - version: opnfv-6.1.0
+    location:
+      container4nfv: 38d0ffe507a137167ffdbac860de5c1a738e81a8
 
 branches:
   - name: stable/fraser
index 28cfe7b..ee10261 100644 (file)
@@ -16,6 +16,14 @@ releases:
     location:
       doctor: cd500723c3b2de3655b2dc56ea9647f358183264
 
+  - version: opnfv-6.1.0
+    location:
+      doctor: c91aa72e7df34ec3416be3c9f73f7484a91312fb
+
+  - version: opnfv-6.2.0
+    location:
+      doctor: 7fda374ace0122200146a60c422019399884d9b7
+
 branches:
   - name: stable/fraser
     location:
index 9d08cbf..5544585 100644 (file)
@@ -3,6 +3,14 @@ project: fds
 project-type: feature
 release-model: stable
 
+releases:
+  - version: opnfv-6.1.0
+    location:
+      fds: 6eb4ed8ce9be7135ec65318b7b833f3c487498be
+  - version: opnfv-6.2.0
+    location:
+      fds: 0a8efd332d57324b1dd8b41884730f1d1cdfc2b2
+
 branches:
   - name: stable/fraser
     location:
index 226ba58..f139c67 100644 (file)
@@ -15,6 +15,15 @@ releases:
   - version: opnfv-6.0.0
     location:
       fuel: d45841926790df8313912697d31753c120e2c4aa
+  - version: opnfv-6.1.0
+    location:
+      fuel: e29220a401a5ad0607453d6a82de9b61d1c02aca
+  - version: opnfv-6.2.0
+    location:
+      fuel: 93c3c3acd269016b63e72cfdda16c8652e0b2f00
+  - version: opnfv-6.2.1
+    location:
+      fuel: 56cfed1a8981a16f2671754d8aa29f7c850a0ee0
 
 branches:
   - name: stable/fraser
index d1837b8..0466e6e 100644 (file)
@@ -10,6 +10,18 @@ releases:
   - version: opnfv-6.0.0
     location:
       functest-kubernetes: 770bd295031af2e12da3472b9ed2763bfdc4deaf
+  - version: opnfv-6.1.0
+    location:
+      functest: 7be15936ba4fd0f624200ae6b587081ab292077c
+  - version: opnfv-6.1.0
+    location:
+      functest-kubernetes: c3e48ccef64acc6ba31823e1c8d6138eebbffb8c
+  - version: opnfv-6.2.0
+    location:
+      functest: f7948517b7cfabf8405e64eb4c2057c0105b852f
+  - version: opnfv-6.2.0
+    location:
+      functest-kubernetes: d1a1e7e92053d767e8322d17370a8700562b44a0
 
 branches:
   - name: stable/fraser
index 2e92199..64dd74a 100644 (file)
@@ -15,8 +15,16 @@ releases:
   - version: opnfv-6.0.0
     location:
       ipv6: 5b914695088b39aee3886bd60be4707b7a11a1e8
+  - version: opnfv-6.1.0
+    location:
+      ipv6: 5c8ce8ff16e9dee430357cb60d1eba5a8d1961b7
+  - version: opnfv-6.2.0
+    location:
+      ipv6: e24312ba6ba61f70e9dd815fb03f7bf22dbe311d
 
 branches:
   - name: stable/fraser
     location:
       ipv6: 809dba7f52ff1571a760a01376530de23c5d97c9
+
+release-notes: http://docs.opnfv.org/en/stable-fraser/submodules/ipv6/docs/release/release-notes/release-notes.html
index 6c230c3..fcb6d37 100644 (file)
@@ -16,6 +16,10 @@ releases:
     location:
       joid: 266a4fd08bef19fca930cbbb1d45f5639b87d3c0
 
+  - version: opnfv-6.1.0
+    location:
+      joid: ff656d6b0fa60241c76709a41c023b76ba4def8f
+
 branches:
   - name: stable/fraser
     location:
index 9df1642..ba423d3 100644 (file)
@@ -7,6 +7,9 @@ releases:
   - version: opnfv-6.0.0
     location:
       nfvbench: c8402089ad8686a16ad08ce6c6e16a14c3144d64
+  - version: opnfv-6.1.0
+    location:
+      nfvbench: c8402089ad8686a16ad08ce6c6e16a14c3144d64
 
 branches:
   - name: stable/fraser
index 6e22328..a206f11 100644 (file)
@@ -7,6 +7,9 @@ releases:
   - version: opnfv-6.0.0
     location:
       opnfvdocs: 49538d56771c45d8d437ab00abaa1b52f97ac931
+  - version: opnfv-6.1.0
+    location:
+      opnfvdocs: 98570eb59bb4abda67cea7a22feec45eebc4bec2
 
 branches:
   - name: stable/fraser
index 40eac0c..3b88d79 100644 (file)
@@ -8,6 +8,9 @@ releases:
   - version: opnfv-6.0.0
     location:
       ovn4nfv: 651acb603c2f4523905e4fe5134fd8a7940e1f3d
+  - version: opnfv-6.1.0
+    location:
+      ovn4nfv: 651acb603c2f4523905e4fe5134fd8a7940e1f3d
 
 branches:
   - name: stable/fraser
index 3c30f72..11728a7 100644 (file)
@@ -7,6 +7,13 @@ releases:
   - version: opnfv-6.0.0
     location:
       samplevnf: 4685c59ec97927af559b4bf10001d0e07de34702
+  - version: opnfv-6.1.0
+    location:
+      samplevnf: 95e9ab89f6d8f9911a9fdb625e8721f3d2e20b1a
+  - version: opnfv-6.2.0
+    location:
+      samplevnf: 692702c10772e13b533c908ed3f95a430ad476ce
+
 
 branches:
   - name: stable/fraser
index 6fa8fa5..77e3390 100644 (file)
@@ -7,6 +7,12 @@ releases:
   - version: opnfv-6.0.0
     location:
       sdnvpn: be6cce375c0363f93fa6b1a099eb34fee75a6161
+  - version: opnfv-6.1.0
+    location:
+      sdnvpn: be6cce375c0363f93fa6b1a099eb34fee75a6161
+  - version: opnfv-6.2.0
+    location:
+      sdnvpn: 8e60d220cbe67c9274b32dfe8d73e1c16351a80f
 
 branches:
   - name: stable/fraser
index 83850d7..b1648a7 100644 (file)
@@ -7,6 +7,12 @@ releases:
   - version: opnfv-6.0.0
     location:
       sfc: 084446656bf2794a11de56b782f589af1f703487
+  - version: opnfv-6.1.0
+    location:
+      sfc: 7f8a775dec72e03d6cae2400b7a49d7a097c8957
+  - version: opnfv-6.2.0
+    location:
+      sfc: 297052a5aa6dd596443930e8ad0d2754b70f79ba
 
 branches:
   - name: stable/fraser
index c6c316c..953f5f3 100644 (file)
@@ -7,6 +7,12 @@ releases:
   - version: opnfv-6.0.0
     location:
       snaps: 4edc3d87392cf78c3f046217543fb76380413306
+  - version: opnfv-6.1.0
+    location:
+      snaps: 4edc3d87392cf78c3f046217543fb76380413306
+  - version: opnfv-6.2.0
+    location:
+      snaps: 7f8eee2885899f949eda99281818020301c3c841
 
 branches:
   - name: stable/fraser
index b4fd845..3259b30 100644 (file)
@@ -7,6 +7,9 @@ releases:
   - version: opnfv-6.0.0
     location:
       stor4nfv: fcedde93eb366867ed428d362e2cb8a6b7f28b3c
+  - version: opnfv-6.1.0
+    location:
+      stor4nfv: 3f654aa9b1e19eb9d84b0632a5bc1e7bde96c281
 
 branches:
   - name: stable/fraser
index 1389c7a..b0ef484 100644 (file)
@@ -7,6 +7,12 @@ releases:
   - version: opnfv-6.0.0
     location:
       storperf: 2f8c1546ee5d79f9b4c46e960a74930cfe0fe50e
+  - version: opnfv-6.1.0
+    location:
+      storperf: 8962423227e13f45ef9f7dfbf48ed4847d3f3865
+  - version: opnfv-6.2.0
+    location:
+      storperf: c3707ede35cb59d1ea3139dca6d76de1b76345af
 
 branches:
   - name: stable/fraser
index 95d8db6..f86df40 100644 (file)
@@ -7,6 +7,12 @@ releases:
   - version: opnfv-6.0.0
     location:
       vswitchperf: 998842df061caf3f90adf756c78667262525a6e0
+  - version: opnfv-6.1.0
+    location:
+      vswitchperf: 998842df061caf3f90adf756c78667262525a6e0
+  - version: opnfv-6.2.0
+    location:
+      vswitchperf: 998842df061caf3f90adf756c78667262525a6e0
 
 branches:
   - name: stable/fraser
index 59f6c10..cd87e97 100644 (file)
@@ -7,6 +7,15 @@ releases:
   - version: opnfv-6.0.0
     location:
       yardstick: a4c8f2a99f56dd4c9fbac4021706aa9186d23ed8
+  - version: opnfv-6.1.0
+    location:
+      yardstick: 7dc30d54a77249d95d2a89770393cee98ff63c8c
+  - version: opnfv-6.2.0
+    location:
+      yardstick: b07610535fc00ff02693942fca8b0a55995dc67e
+  - version: opnfv-6.2.1
+    location:
+      yardstick: a99613376daa0656bcd0b24d9c56661ff15c8916
 
 branches:
   - name: stable/fraser
diff --git a/releases/gambia/apex.yaml b/releases/gambia/apex.yaml
new file mode 100644 (file)
index 0000000..fada4cb
--- /dev/null
@@ -0,0 +1,18 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: apex
+project-type: installer
+release-model: stable
+upstream: https://wiki.openstack.org/wiki/TripleO
+
+branches:
+  - name: stable/gambia
+    location:
+      apex: 82bf9da27ea0c973068720d440e3391084e8a9d9
diff --git a/releases/gambia/armband.yaml b/releases/gambia/armband.yaml
new file mode 100644 (file)
index 0000000..2e5f65a
--- /dev/null
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: armband
+project-type: installer
+release-model: stable
+
+releases:
+  - version: opnfv-7.0.0
+    location:
+      armband: 30027365166c204bc03043234bbc6b0dfc2506fb
+
+branches:
+  - name: stable/gambia
+    location:
+      armband: a880b5c0fe55397b73f0fcf8f8c87d523327099d
+
+release-notes: https://opnfv-armband.readthedocs.io/en/latest/release/release-notes/release-notes.html
diff --git a/releases/gambia/auto.yaml b/releases/gambia/auto.yaml
new file mode 100644 (file)
index 0000000..f53a411
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: auto
+project-type: feature
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      auto: 1a2260efe5d15f95b8fa778a9ee8023121facd7e
diff --git a/releases/gambia/availability.yaml b/releases/gambia/availability.yaml
new file mode 100644 (file)
index 0000000..ee28fec
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: availability
+project-type: feature
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      availability: 053a75d02fe1842ba3e2bc0d6b019e1cdc6a6123
diff --git a/releases/gambia/barometer.yaml b/releases/gambia/barometer.yaml
new file mode 100644 (file)
index 0000000..e5c9842
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      barometer: 9ac248a08de1f197bafbabbf43175b30ab19ec32
diff --git a/releases/gambia/bottlenecks.yaml b/releases/gambia/bottlenecks.yaml
new file mode 100644 (file)
index 0000000..d32d207
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: bottlenecks
+project-type: testing
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      bottlenecks: 399ec2f328f56c1f81c454ecedfb6b99eaf93c42
diff --git a/releases/gambia/clover.yaml b/releases/gambia/clover.yaml
new file mode 100644 (file)
index 0000000..260d56c
--- /dev/null
@@ -0,0 +1,16 @@
+---
+project: clover
+project-type: feature
+release-model: stable
+
+releases:
+  - version: opnfv-7.0.0
+    location:
+      clover: b0231105e304d21f454bec58c4c2905f8d8b8e5f
+
+branches:
+  - name: stable/gambia
+    location:
+      clover: ee2169ee4b8fb3539ad173fbc1557b54b2f2216f
+
+release-notes: https://opnfv-clover.readthedocs.io/en/stable-gambia/release/release-notes/release-notes.html
diff --git a/releases/gambia/compass4nfv.yaml b/releases/gambia/compass4nfv.yaml
new file mode 100644 (file)
index 0000000..e052388
--- /dev/null
@@ -0,0 +1,12 @@
+---
+project: compass4nfv
+project-type: installer
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      compass4nfv: 33f94b43639dbe37b7e8e2b5eeb4c65064207c6b
+  - name: stable/gambia
+    location:
+      compass-containers: 20e229822b31b03e1120c3e5efd4ba131261617e
diff --git a/releases/gambia/container4nfv.yaml b/releases/gambia/container4nfv.yaml
new file mode 100644 (file)
index 0000000..6522cfa
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: container4nfv
+project-type: feature
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      container4nfv: 1fa07ef952a6b0e12487901919cec52b9d9b9739
diff --git a/releases/gambia/doctor.yaml b/releases/gambia/doctor.yaml
new file mode 100644 (file)
index 0000000..3bf86a4
--- /dev/null
@@ -0,0 +1,25 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 Nokia Corporation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: doctor
+project-type: feature
+release-model: stable
+
+releases:
+  - version: opnfv-7.0.0
+    location:
+      doctor: 3ddc2392b0ed364eede49ff006d64df3ea456350
+  - version: opnfv-7.0.1
+    location:
+      doctor: 9250be9b9f3e3fff0c5aa827daf89f0212ce964f
+
+branches:
+  - name: stable/gambia
+    location:
+      doctor: 3ddc2392b0ed364eede49ff006d64df3ea456350
diff --git a/releases/gambia/edgecloud.yaml b/releases/gambia/edgecloud.yaml
new file mode 100644 (file)
index 0000000..db80383
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: edgecloud
+project-type: feature
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      edgecloud: 3708debe3f7ff9edd0f0a52998ebf7a799bb712f
diff --git a/releases/gambia/fuel.yaml b/releases/gambia/fuel.yaml
new file mode 100644 (file)
index 0000000..a958209
--- /dev/null
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: fuel
+project-type: installer
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      fuel: 90a442136b0aff8380388ac0a94831d0904e3cb8
+
+releases:
+  - version: opnfv-7.0.0
+    location:
+      fuel: 326c26b9b89c23e9775ba287d9da3bea35cfc437
+
+release-notes: https://opnfv-fuel.readthedocs.io/en/latest/release/release-notes/release-notes.html
diff --git a/releases/gambia/functest.yaml b/releases/gambia/functest.yaml
new file mode 100644 (file)
index 0000000..722a35e
--- /dev/null
@@ -0,0 +1,23 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+releases:
+  - version: opnfv-7.0.0
+    location:
+      functest: b7f9b53a34cd8ffcc9df4d2f286e55e13adb4a58
+  - version: opnfv-7.0.0
+    location:
+      functest-kubernetes: fd5c939094244458aea31ead8c6519da95266c2a
+
+branches:
+  - name: stable/gambia
+    location:
+      functest: d5fce8c12bc73c1b1547df2750563c271a3ab3f6
+  - name: stable/gambia
+    location:
+      functest-kubernetes: ca3c9cf6f57c87aee8c6f0b93f70d84e3df736de
+  - name: stable/gambia
+    location:
+      functest-xtesting: a09ece9bbe9cda52487cf5b443619fec1b3e09ca
diff --git a/releases/gambia/ipv6.yaml b/releases/gambia/ipv6.yaml
new file mode 100644 (file)
index 0000000..a566e54
--- /dev/null
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: ipv6
+project-type: feature
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      ipv6: e36f753cf6e9ab0c02a400b6bac5c79b48268d44
diff --git a/releases/gambia/nfvbench.yaml b/releases/gambia/nfvbench.yaml
new file mode 100644 (file)
index 0000000..796ec4d
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: nfvbench
+project-type: tools
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      nfvbench: 3dfb5268ebfada08c1a2316708564e627f86da42
diff --git a/releases/gambia/opnfvdocs.yaml b/releases/gambia/opnfvdocs.yaml
new file mode 100644 (file)
index 0000000..b9cd04f
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      opnfvdocs: c89a5b35e81cf085bba892b230ca4516a92ffd57
diff --git a/releases/gambia/ovn4nfv.yaml b/releases/gambia/ovn4nfv.yaml
new file mode 100644 (file)
index 0000000..b5c8ad3
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: ovn4nfv
+project-type: feature
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      ovn4nfv: 9301afc8b3d9314ba19b464b8a7eb3cb7fa3bd7e
diff --git a/releases/gambia/samplevnf.yaml b/releases/gambia/samplevnf.yaml
new file mode 100644 (file)
index 0000000..6efd2d4
--- /dev/null
@@ -0,0 +1,16 @@
+---
+project: samplevnf
+project-type: feature
+release-model: stable
+
+releases:
+  - version: opnfv-7.0.0
+    location:
+      samplevnf: 66ee98f3427439a4cdd56c9f67a25aec140a5c9c
+
+branches:
+  - name: stable/gambia
+    location:
+      samplevnf: 4d59d3530d1c41734f15423142e64eb9c929c717
+
+release-notes: https://opnfv-samplevnf.readthedocs.io/en/latest/release/release-notes/release-notes.html
diff --git a/releases/gambia/sandbox.yaml b/releases/gambia/sandbox.yaml
new file mode 100644 (file)
index 0000000..16f2df2
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: sandbox
+project-type: infra
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      sandbox: c2012f5b642f17e6024db631b833414114a329d5
diff --git a/releases/gambia/sdnvpn.yaml b/releases/gambia/sdnvpn.yaml
new file mode 100644 (file)
index 0000000..c3ee8ca
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: sdnvpn
+project-type: feature
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      sdnvpn: c05105a4f9f51f7bb31cad791e65d664e5a3bc4b
diff --git a/releases/gambia/sfc.yaml b/releases/gambia/sfc.yaml
new file mode 100644 (file)
index 0000000..c32b9c8
--- /dev/null
@@ -0,0 +1,11 @@
+---
+project: sfc
+project-type: feature
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      sfc: 77bdb6a79a506f91959070dc8ff28949f2dff825
+
+release-notes: https://opnfv-sfc.readthedocs.io/en/stable-gambia/release/release-notes/
diff --git a/releases/gambia/stor4nfv.yaml b/releases/gambia/stor4nfv.yaml
new file mode 100644 (file)
index 0000000..65c327f
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: stor4nfv
+project-type: feature
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      stor4nfv: cefd1d0854ee948acc7147834146914983a11556
diff --git a/releases/gambia/storperf.yaml b/releases/gambia/storperf.yaml
new file mode 100644 (file)
index 0000000..088f738
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: storperf
+project-type: testing
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      storperf: 4aba838a07f5cd7dbd6d606c34f688e647a5d890
diff --git a/releases/gambia/vswitchperf.yaml b/releases/gambia/vswitchperf.yaml
new file mode 100644 (file)
index 0000000..8daa55e
--- /dev/null
@@ -0,0 +1,9 @@
+---
+project: vswitchperf
+project-type: testing
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      vswitchperf: 96de8654952a606d28d56c057ba871b5553e4176
diff --git a/releases/gambia/yardstick.yaml b/releases/gambia/yardstick.yaml
new file mode 100644 (file)
index 0000000..2dc5ad6
--- /dev/null
@@ -0,0 +1,16 @@
+---
+project: yardstick
+project-type: testing
+release-model: stable
+
+branches:
+  - name: stable/gambia
+    location:
+      yardstick: 497292013ef0d7e1e014d76803d4f284618b4986
+
+releases:
+  - version: opnfv-7.0.0
+    location:
+      yardstick: b9fa3eac6caf65a371f339fcbc3abc7cbe5b41cc
+
+release-notes: https://opnfv-yardstick.readthedocs.io/en/stable-gambia/release/release-notes/
diff --git a/releases/hunter/functest.yaml b/releases/hunter/functest.yaml
new file mode 100644 (file)
index 0000000..064ae05
--- /dev/null
@@ -0,0 +1,23 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+releases:
+  - version: opnfv-8.0.0
+    location:
+      functest: a3355e22585c66823c430d6adcdef8dddacb33a1
+  - version: opnfv-8.0.0
+    location:
+      functest-kubernetes: ec2bf0e8bd1b4d3b2c4e2894820d2cbb454e36e7
+
+branches:
+  - name: stable/hunter
+    location:
+      functest: d5fce8c12bc73c1b1547df2750563c271a3ab3f6
+  - name: stable/hunter
+    location:
+      functest-kubernetes: ca3c9cf6f57c87aee8c6f0b93f70d84e3df736de
+  - name: stable/hunter
+    location:
+      functest-xtesting: a09ece9bbe9cda52487cf5b443619fec1b3e09ca
diff --git a/releases/scripts/create_branch.py b/releases/scripts/create_branch.py
deleted file mode 100644 (file)
index fa3c92d..0000000
+++ /dev/null
@@ -1,143 +0,0 @@
-#!/usr/bin/env python2
-# SPDX-License-Identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-"""
-Create Gerrit Branchs
-"""
-
-import argparse
-
-try:
-    import ConfigParser
-except ImportError:
-    import configparser as ConfigParser
-
-import logging
-import os
-import yaml
-
-from requests.compat import quote
-from requests.exceptions import RequestException
-
-from pygerrit2.rest import GerritRestAPI
-from pygerrit2.rest.auth import HTTPDigestAuthFromNetrc, HTTPBasicAuthFromNetrc
-
-
-logging.basicConfig(level=logging.INFO)
-
-
-def quote_branch(arguments):
-    """
-    Quote is used here to escape the '/' in branch name. By
-    default '/' is listed in 'safe' characters which aren't escaped.
-    quote is not used in the data of the PUT request, as quoting for
-    arguments is handled by the request library
-    """
-    new_args = arguments.copy()
-    new_args['branch'] = quote(new_args['branch'], '')
-    return new_args
-
-
-def create_branch(api, arguments):
-    """
-    Create a branch using the Gerrit REST API
-    """
-    logger = logging.getLogger(__file__)
-
-    branch_data = """
-    {
-      "ref": "%(branch)s"
-      "revision": "%(commit)s"
-    }""" % arguments
-
-    # First verify the commit exists, otherwise the branch will be
-    # created at HEAD
-    try:
-        request = api.get("/projects/%(project)s/commits/%(commit)s" %
-                          arguments)
-        logger.debug(request)
-        logger.debug("Commit exists: %(commit)s", arguments)
-    except RequestException as err:
-        if hasattr(err, 'response') and err.response.status_code in [404]:
-            logger.warn("Commit %(commit)s for %(project)s does"
-                        " not exist. Not creating branch.", arguments)
-            logger.warn(err)
-        else:
-            logger.error("Error: %s", str(err))
-        # Skip trying to create the branch
-        return
-
-    # Try to create the branch and let us know if it already exist.
-    try:
-        request = api.put("/projects/%(project)s/branches/%(branch)s" %
-                          quote_branch(arguments), branch_data)
-        logger.info("Branch %(branch)s for %(project)s successfully created",
-                    arguments)
-    except RequestException as err:
-        if hasattr(err, 'response') and err.response.status_code in [412, 409]:
-            logger.info("Branch %(branch)s already created for %(project)s",
-                        arguments)
-            logger.info(err)
-        else:
-            logger.error("Error: %s", str(err))
-
-
-def main():
-    """Given a yamlfile that follows the release syntax, create branches
-    in Gerrit listed under branches"""
-
-    config = ConfigParser.ConfigParser()
-    config.read(os.path.join(os.path.abspath(os.path.dirname(__file__)),
-                'defaults.cfg'))
-    config.read([os.path.expanduser('~/releases.cfg'), 'releases.cfg'])
-
-    gerrit_url = config.get('gerrit', 'url')
-
-    parser = argparse.ArgumentParser()
-    parser.add_argument('--file', '-f',
-                        type=argparse.FileType('r'),
-                        required=True)
-    parser.add_argument('--basicauth', '-b', action='store_true')
-    args = parser.parse_args()
-
-    GerritAuth = HTTPDigestAuthFromNetrc
-    if args.basicauth:
-        GerritAuth = HTTPBasicAuthFromNetrc
-
-    try:
-        auth = GerritAuth(url=gerrit_url)
-    except ValueError as err:
-        logging.error("%s for %s", err, gerrit_url)
-        quit(1)
-    restapi = GerritRestAPI(url=gerrit_url, auth=auth)
-
-    project = yaml.safe_load(args.file)
-
-    create_branches(restapi, project)
-
-
-def create_branches(restapi, project):
-    """Create branches for a specific project defined in the release
-    file"""
-
-    branches = []
-    for branch in project['branches']:
-        repo, ref = next(iter(branch['location'].items()))
-        branches.append({
-            'project': repo,
-            'branch': branch['name'],
-            'commit': ref
-        })
-
-    for branch in branches:
-        create_branch(restapi, branch)
-
-
-if __name__ == "__main__":
-    main()
diff --git a/releases/scripts/release-status.sh b/releases/scripts/release-status.sh
new file mode 100755 (executable)
index 0000000..6790100
--- /dev/null
@@ -0,0 +1,25 @@
+#!/bin/bash
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+set -o pipefail
+
+TAG="${TAG:-opnfv-6.0.0}"
+RELEASE="${RELEASE:-fraser}"
+
+[ -a repos.txt ] && rm repos.txt
+
+for project in releases/$RELEASE/*; do
+    python releases/scripts/repos.py -n -f $project >> repos.txt
+done
+
+while read -r repo
+do
+    tag="$(git ls-remote "https://gerrit.opnfv.org/gerrit/$repo.git" "refs/tags/$TAG")"
+    echo "$repo $tag"
+done < repos.txt
diff --git a/releases/scripts/repos.py b/releases/scripts/repos.py
new file mode 100644 (file)
index 0000000..91c4e93
--- /dev/null
@@ -0,0 +1,121 @@
+#!/usr/bin/env python2
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+"""
+List Release Repos
+"""
+
+import argparse
+import yaml
+
+
+class Repo(object):
+    """Object representing a repo listed in the release file.
+
+    Includes eq, hash, and ne methods so set comparisons work
+    """
+
+    def __init__(self, repo=None, ref=None, version=None):
+        self.repo = repo
+        self.ref = ref
+        self.version = version
+
+    def __repr__(self):
+        if self.version:
+            return "%s %s %s" % (self.repo, self.ref, self.version)
+        elif self.ref:
+            return "%s %s" % (self.repo, self.ref)
+        return "%s" % self.repo
+
+    def __eq__(self, obj):
+        if isinstance(obj, Repo):
+            return ((self.repo == obj.repo) and
+                    (self.ref == obj.ref) and
+                    (self.version == obj.version))
+        return False
+
+    def __ne__(self, obj):
+        return (not self.__eq__(obj))
+
+    def __hash__(self):
+        return hash(self.__repr__())
+
+
+def main():
+    """Given a release yamlfile list the repos it contains"""
+
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--file', '-f',
+                        type=argparse.FileType('r'),
+                        required=True)
+    parser.add_argument('--names', '-n',
+                        action='store_true',
+                        default=False,
+                        help="Only print the names of repos, "
+                             "not their SHAs")
+    parser.add_argument('--release', '-r',
+                        type=str,
+                        help="Only print"
+                             "SHAs for the specified release")
+    parser.add_argument('--branches', '-b',
+                        action='store_true',
+                        default=False,
+                        help="Print Branch info")
+
+    args = parser.parse_args()
+
+    project = yaml.safe_load(args.file)
+
+    if args.branches:
+        list_branches(project, args)
+    else:
+        list_repos(project, args)
+
+
+def list_repos(project, args):
+    """List repositories in the project file"""
+
+    lookup = project.get('releases', [])
+
+    if 'releases' not in project:
+        exit(0)
+    repos = set()
+    for item in lookup:
+        repo, ref = next(iter(item['location'].items()))
+        if args.names:
+            repos.add(Repo(repo))
+        elif args.release and item['version'] == args.release:
+            repos.add(Repo(repo, ref))
+        elif not args.release:
+            repos.add(Repo(repo, item['version'], ref))
+    for repo in repos:
+        print(repo)
+
+
+def list_branches(project, args):
+    """List branches in the project file"""
+
+    lookup = project.get('branches', [])
+
+    if 'branches' not in project:
+        exit(0)
+    repos = set()
+    for item in lookup:
+        repo, ref = next(iter(item['location'].items()))
+        if args.names:
+            repos.add(Repo(repo))
+        elif args.release and item['name'] == args.release:
+            repos.add(Repo(repo, ref))
+        elif not args.release:
+            repos.add(Repo(repo, item['name'], ref))
+    for repo in repos:
+        print(repo)
+
+if __name__ == "__main__":
+    main()
diff --git a/tox.ini b/tox.ini
index abdffc5..0cc9e23 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -15,9 +15,9 @@ setenv=
 
 [testenv:jjb]
 deps =
-  jenkins-job-builder==1.6.1
+  jenkins-job-builder==2.5.0
 commands=
-  jenkins-jobs test -o job_output -r jjb/global:{posargs:"jjb/"}
+  jenkins-jobs test -o job_output -r {posargs:"jjb/"}
 
 [testenv:docs]
 deps = -r{toxinidir}/docs/requirements.txt
index 1984b49..2b35006 100755 (executable)
@@ -31,7 +31,7 @@ do
     for index in $(gsutil ls -l gs://artifacts.opnfv.org/logs/"$project"/ |awk 'NF==1'| sed s,gs://artifacts.opnfv.org/,, )
     do
     index="$(echo ${index%/*} | sed s,/,_,g)"
-      echo "<LI><a href=\"http://artifacts.opnfv.org/${index%/*}.html\">"$index"</a></LI>" >> $OUTPUT
+      echo "<LI><a href=\"https://artifacts.opnfv.org/${index%/*}.html\">"$index"</a></LI>" >> $OUTPUT
     done
 
 done
@@ -55,7 +55,7 @@ rm -f $OUTPUT
       echo "<LI>$path</LI>" >> $OUTPUT
       echo "</UL>" >> $OUTPUT
     else
-      echo "<LI><a href=\"http://artifacts.opnfv.org/$filepath\">"$filepath"</a></LI>" >> $OUTPUT
+      echo "<LI><a href=\"https://artifacts.opnfv.org/$filepath\">"$filepath"</a></LI>" >> $OUTPUT
     fi
 done
 
@@ -85,7 +85,7 @@ rm -f $OUTPUT
           echo "<LI>$path</LI>" >> $OUTPUT
           echo "</UL>" >> $OUTPUT
         else
-          echo "<LI><a href=\"http://artifacts.opnfv.org/$filepath\">"$filepath"</a></LI>" >> $OUTPUT
+          echo "<LI><a href=\"https://artifacts.opnfv.org/$filepath\">"$filepath"</a></LI>" >> $OUTPUT
         fi
 
 done
@@ -119,7 +119,7 @@ do
               echo "<LI>$path</LI>" >> $OUTPUT
               echo "</UL>" >> $OUTPUT
             else
-              echo "<LI><a href=\"http://artifacts.opnfv.org/$filepath\">"$filepath"</a></LI>" >> $OUTPUT
+              echo "<LI><a href=\"https://artifacts.opnfv.org/$filepath\">"$filepath"</a></LI>" >> $OUTPUT
             fi
 
 
index 0fcce71..a4d3b08 100644 (file)
       pip:
         name: tox
         state: present
+    - name: install yamllint
+      pip:
+        name: yamllint
+        state: present
     - include: vars/docker-compose-CentOS.yml
       when: ansible_distribution == "CentOS"
     - include: vars/docker-compose-Ubuntu.yml
       when: ansible_distribution == "Ubuntu"
+    - name: Install manifest-tool
+      get_url:
+        url: '{{ manifest_tool_url }}/{{ manifest_tool_version }}/manifest-tool-linux-amd64'
+        dest: '{{ manifest_tool_bin_dir }}/manifest-tool'
+        mode: '755'
index 528388f..50839be 100644 (file)
     name: "{{ item }}"
     state: present
   with_items:
-    - python-pip
-    - rpm-build
+    - bc
+    - collectd
+    - doxygen
+    - facter
+    - jq
     - kernel-headers
     - libpcap-devel
-    - zlib-devel
-    - numactl-devel
-    - doxygen
-    - python-sphinx
     - libvirt-devel
-    - python-devel
+    - net-tools
+    - numactl-devel
     - openssl-devel
+    - python-devel
+    - python-pip
     - python-six
-    - net-tools
-    - bc
+    - python-sphinx
+    - rpm-build
     - sysstat
     - xmlstarlet
-    - facter
+    - zlib-devel
 
 - name: install the 'Development tools' package group
   yum:
index e91c485..bd77a4d 100644 (file)
     - sysstat
     - xmlstarlet
     - facter
+    - jq
+    - python-tox
+    - collectd-dev
+    - python3
+    - python3-dev
+    - libdpdk-dev
+    - dpdk-dev
+    - fakeroot
+    - devscripts
+    - debhelper
+    - dpkg-dev
+    - po-debconf
+    - dh-systemd
+    - dh-strip-nondeterminism
+    - autotools-dev
+    - libltdl-dev
+    - iptables-dev
+    - javahelper
+    - libatasmart-dev
+    - libcap-dev
+    - libcurl4-gnutls-dev
+    - libdbi0-dev
+    - libesmtp-dev
+    - libganglia1-dev
+    - libgcrypt11-dev
+    - libgps-dev
+    - libhiredis-dev
+    - libi2c-dev
+    - libldap2-dev
+    - liblua5.3-dev
+    - liblvm2-dev
+    - libmemcached-dev
+    - libmodbus-dev
+    - libmnl-dev
+    - libmosquitto-dev
+    - libmysqlclient-dev
+    - libnotify-dev
+    - libopenipmi-dev
+    - liboping-dev
+    - libow-dev
+    - libpcap0.8-dev
+    - libpcap-dev
+    - libperl-dev
+    - libpq-dev
+    - libprotobuf-c-dev
+    - libriemann-client-dev
+    - librdkafka-dev
+    - librabbitmq-dev
+    - librrd-dev
+    - libsensors4-dev
+    - libsigrok-dev
+    - libsnmp-dev
+    - libsnmp9-dev
+    - snmp
+    - snmp-mibs-downloader
+    - snmpd
+    - perl
+    - libtokyocabinet-dev
+    - libtokyotyrant-dev
+    - libudev-dev
+    - libupsclient-dev
+    - libvarnishapi-dev
+    - libvirt-dev
+    - libvirt-daemon
+    - libxen-dev
+    - libyajl-dev
+    - linux-libc-dev
+    - default-jdk
+    - protobuf-c-compiler
+    - openvswitch-switch
+    - mcelog
 
 - name: Add Docker apt key.
   apt_key:
index 8d83380..ff37c95 100644 (file)
@@ -21,3 +21,7 @@ docker_yum_repo_url: https://download.docker.com/linux/centos/docker-ce.repo
 # yamllint enable rule:line-length
 docker_yum_repo_enable_edge: 0
 docker_yum_repo_enable_test: 0
+
+manifest_tool_version: 'v0.7.0'
+manifest_tool_url: 'https://github.com/estesp/manifest-tool/releases/download'
+manifest_tool_bin_dir: '/usr/local/bin'
index 4bc5858..5010950 100755 (executable)
@@ -8,7 +8,6 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 set -o errexit
-set -o nounset
 set -o pipefail
 
 usage() {
@@ -149,23 +148,33 @@ if [ "$installer_type" == "fuel" ]; then
     echo $auth_url >> $dest_path
 
 elif [ "$installer_type" == "apex" ]; then
-    if ! ipcalc -c $installer_ip; then
-      installer_ip=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}')
-      if [ -z "$installer_ip" ] || ! $(ipcalc -c $installer_ip); then
-        echo "Unable to find valid IP for Apex undercloud: ${installer_ip}"
-        exit 1
-      fi
-    fi
-    verify_connectivity $installer_ip
+    if [ -n "$RC_FILE_PATH" ]; then
+        echo "RC_FILE_PATH is set: ${RC_FILE_PATH}. Copying RC FILE to ${dest_path}"
+        sudo cp -f ${RC_FILE_PATH} ${dest_path}
+    else
+        if ! ipcalc -c $installer_ip; then
+            installer_ip=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}')
+            if [ -z "$installer_ip" ] || ! $(ipcalc -c $installer_ip); then
+                echo "Unable to find valid IP for Apex undercloud: ${installer_ip}"
+                exit 1
+            fi
+        fi
+        verify_connectivity $installer_ip
 
-    # The credentials file is located in the Instack VM (192.0.2.1)
-    # NOTE: This might change for bare metal deployments
-    info "... from Instack VM $installer_ip..."
-    if [ -f /root/.ssh/id_rsa ]; then
-        chmod 600 /root/.ssh/id_rsa
-    fi
-    sudo scp $ssh_options root@$installer_ip:/home/stack/overcloudrc.v3 $dest_path
+        # The credentials file is located in the Instack VM (192.0.2.1)
+        # NOTE: This might change for bare metal deployments
+        info "... from Instack VM $installer_ip..."
+        if [ -f /root/.ssh/id_rsa ]; then
+            chmod 600 /root/.ssh/id_rsa
+        fi
 
+        if [ "${BRANCH}" == "stable/fraser" ]; then
+            rc_file=overcloudrc.v3
+        else
+            rc_file=overcloudrc
+        fi
+        sudo scp $ssh_options root@$installer_ip:/home/stack/${rc_file} $dest_path
+    fi
 elif [ "$installer_type" == "compass" ]; then
     if [ "${BRANCH}" == "stable/danube" ]; then
         verify_connectivity $installer_ip
index f45e409..94ffbaa 100755 (executable)
@@ -51,10 +51,14 @@ else
   if [ -z "$NODE_NAME" ];
     then echo "Cannot find node name"
       exit 0
-    else echo "Importing key for '$NODE_NAME'";
-     gsutil cp gs://opnfv-signing-keys/"$NODE_NAME"-subkey .
-     gpg2 --import "$NODE_NAME"-subkey
-     rm -f "$NODE_NAME"-subkey
-   fi
+  elif gsutil ls gs://opnfv-signing-keys | grep $NODE_NAME; then
+    echo "Importing key for '$NODE_NAME'"
+    gsutil cp gs://opnfv-signing-keys/"$NODE_NAME"-subkey .
+    gpg2 --import "$NODE_NAME"-subkey
+    rm -f "$NODE_NAME"-subkey
+  else
+    echo "No keys found locally or remotely for host, skipping import"
+    exit 0
+  fi
 fi
 
index a87e399..253cfa7 100644 (file)
@@ -21,15 +21,15 @@ dir_result="${HOME}/opnfv/$project/results/${branch}"
 # src: https://wiki.opnfv.org/display/INF/Hardware+Infrastructure
 # + intel-pod12 (vsperf)
 node_list=(\
-'lf-pod1' 'lf-pod2' 'intel-pod2' 'intel-pod12' \
+'lf-pod1' 'lf-pod2' \
 'lf-virtual2' 'lf-virtual3' \
-'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' 'intel-pod18' \
+'intel-pod12' 'intel-pod18' \
 'ericsson-pod1' 'ericsson-pod2' \
 'ericsson-virtual1' 'ericsson-virtual2'  'ericsson-virtual3' \
-'ericsson-virtual4' 'ericsson-virtual5' 'ericsson-virtual12' \
-'arm-pod1' 'arm-pod5' 'arm-pod6' \
-'huawei-pod1' 'huawei-pod2' 'huawei-pod3' 'huawei-pod4' 'huawei-pod5' \
-'huawei-pod6' 'huawei-pod7' 'huawei-pod12' \
+'ericsson-virtual4' 'ericsson-virtual5' \
+'arm-pod5' 'arm-pod6' \
+'huawei-pod1' 'huawei-pod2' 'huawei-pod3' 'huawei-pod4' \
+'huawei-pod6' 'huawei-pod7' 'huawei-pod12' 'huawei-pod8' \
 'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4' \
 'huawei-virtual5' 'huawei-virtual8' 'huawei-virtual9' \
 'zte-pod2' 'zte-pod3' 'zte-pod9' \
@@ -64,11 +64,11 @@ if [ -d "$dir_result" ]; then
             else
                 echo "Uploading logs to artifact $project_artifact"
                 gsutil -m cp -r "$dir_result"/* gs://artifacts.opnfv.org/"$project_artifact"/ >/dev/null 2>&1
-                echo "Logs can be found in http://artifacts.opnfv.org/logs_${project}_${testbed}.html"
+                echo "Logs can be found in https://artifacts.opnfv.org/logs_${project}_${testbed}.html"
                 cd $dir_result
                 files=($(find . -name \* -print|sed 's/^\.//'|sed '/^\s*$/d'))
                 for f in ${files[@]}; do
-                    echo "http://artifacts.opnfv.org/${project_artifact}${f}"
+                    echo "https://artifacts.opnfv.org/${project_artifact}${f}"
                 done
             fi
         fi